summaryrefslogtreecommitdiff
path: root/gnu/packages/patches
diff options
context:
space:
mode:
Diffstat (limited to 'gnu/packages/patches')
-rw-r--r--gnu/packages/patches/ark-skip-xar-test.patch44
-rw-r--r--gnu/packages/patches/connman-CVE-2021-33833.patch74
-rw-r--r--gnu/packages/patches/emacs-telega-patch-server-functions.patch31
-rw-r--r--gnu/packages/patches/emacs-telega-path-placeholder.patch44
-rw-r--r--gnu/packages/patches/go-fix-script-tests.patch18
-rw-r--r--gnu/packages/patches/libgrss-CVE-2016-2001.patch101
-rw-r--r--gnu/packages/patches/oneko-remove-nonfree-characters.patch307
-rw-r--r--gnu/packages/patches/proot-test-fhs.patch98
-rw-r--r--gnu/packages/patches/python-seaborn-kde-test.patch36
-rw-r--r--gnu/packages/patches/tor-fix-build-with-gcc-7.patch30
-rw-r--r--gnu/packages/patches/transmission-remote-gtk-fix-appstream.patch61
-rw-r--r--gnu/packages/patches/upower-builddir.patch24
-rw-r--r--gnu/packages/patches/vtk-8-fix-freetypetools-build-failure.patch36
-rw-r--r--gnu/packages/patches/ytfzf-programs.patch643
-rw-r--r--gnu/packages/patches/ytfzf-updates.patch44
15 files changed, 1352 insertions, 239 deletions
diff --git a/gnu/packages/patches/ark-skip-xar-test.patch b/gnu/packages/patches/ark-skip-xar-test.patch
new file mode 100644
index 0000000000..525201997b
--- /dev/null
+++ b/gnu/packages/patches/ark-skip-xar-test.patch
@@ -0,0 +1,44 @@
+Guix libarchive no support xar.
+
+--- ark-20.04.1.orig/autotests/kerfuffle/loadtest.cpp 2020-12-23 08:46:15.780782601 +0800
++++ ark-20.04.1/autotests/kerfuffle/loadtest.cpp 2020-12-23 11:13:17.101724042 +0800
+@@ -181,13 +181,6 @@
+ qDebug() << "lz4 executable not found in path. Skipping lz4 test.";
+ }
+
+- QTest::newRow("xar archive")
+- << QFINDTESTDATA("data/simplearchive.xar")
+- << QStringLiteral("simplearchive")
+- << true << false << false << false << false << 0 << Archive::Unencrypted
+- << QStringLiteral("simplearchive")
+- << QString();
+-
+ QTest::newRow("mimetype child of application/zip")
+ << QFINDTESTDATA("data/test.odt")
+ << QStringLiteral("test")
+--- ark-20.04.1.orig/autotests/kerfuffle/extracttest.cpp 2020-12-23 08:46:15.780782601 +0800
++++ ark-20.04.1/autotests/kerfuffle/extracttest.cpp 2020-12-23 11:14:02.801809620 +0800
+@@ -350,23 +350,6 @@
+ qDebug() << "lz4 executable not found in path. Skipping lz4 test.";
+ }
+
+- archivePath = QFINDTESTDATA("data/simplearchive.xar");
+- QTest::newRow("extract selected entries from a xar archive without path")
+- << archivePath
+- << QVector<Archive::Entry*> {
+- new Archive::Entry(this, QStringLiteral("dir1/file11.txt"), QString()),
+- new Archive::Entry(this, QStringLiteral("file4.txt"), QString())
+- }
+- << optionsNoPaths
+- << 2;
+-
+- archivePath = QFINDTESTDATA("data/simplearchive.xar");
+- QTest::newRow("extract all entries from a xar archive with path")
+- << archivePath
+- << QVector<Archive::Entry*>()
+- << optionsPreservePaths
+- << 6;
+-
+ archivePath = QFINDTESTDATA("data/hello-1.0-x86_64.AppImage");
+ QTest::newRow("extract all entries from an AppImage with path")
+ << archivePath
diff --git a/gnu/packages/patches/connman-CVE-2021-33833.patch b/gnu/packages/patches/connman-CVE-2021-33833.patch
deleted file mode 100644
index 3e1a19d961..0000000000
--- a/gnu/packages/patches/connman-CVE-2021-33833.patch
+++ /dev/null
@@ -1,74 +0,0 @@
-Fix CVE-2021-33833:
-
-https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-33833
-
-Patch copied from upstream source repository:
-
-https://git.kernel.org/pub/scm/network/connman/connman.git/commit/?id=eceb2e8d2341c041df55a5e2f047d9a8c491463c
-
-From eceb2e8d2341c041df55a5e2f047d9a8c491463c Mon Sep 17 00:00:00 2001
-From: Valery Kashcheev <v.kascheev@omp.ru>
-Date: Mon, 7 Jun 2021 18:58:24 +0200
-Subject: [PATCH] dnsproxy: Check the length of buffers before memcpy
-
-Fix using a stack-based buffer overflow attack by checking the length of
-the ptr and uptr buffers.
-
-Fix debug message output.
-
-Fixes: CVE-2021-33833
----
- src/dnsproxy.c | 20 +++++++++++---------
- 1 file changed, 11 insertions(+), 9 deletions(-)
-
-diff --git a/src/dnsproxy.c b/src/dnsproxy.c
-index de52df5a..38dbdd71 100644
---- a/src/dnsproxy.c
-+++ b/src/dnsproxy.c
-@@ -1788,17 +1788,15 @@ static char *uncompress(int16_t field_count, char *start, char *end,
- * tmp buffer.
- */
-
-- debug("pos %d ulen %d left %d name %s", pos, ulen,
-- (int)(uncomp_len - (uptr - uncompressed)), uptr);
--
-- ulen = strlen(name);
-- if ((uptr + ulen + 1) > uncomp_end) {
-+ ulen = strlen(name) + 1;
-+ if ((uptr + ulen) > uncomp_end)
- goto out;
-- }
-- strncpy(uptr, name, uncomp_len - (uptr - uncompressed));
-+ strncpy(uptr, name, ulen);
-+
-+ debug("pos %d ulen %d left %d name %s", pos, ulen,
-+ (int)(uncomp_end - (uptr + ulen)), uptr);
-
- uptr += ulen;
-- *uptr++ = '\0';
-
- ptr += pos;
-
-@@ -1841,7 +1839,7 @@ static char *uncompress(int16_t field_count, char *start, char *end,
- } else if (dns_type == ns_t_a || dns_type == ns_t_aaaa) {
- dlen = uptr[-2] << 8 | uptr[-1];
-
-- if (ptr + dlen > end) {
-+ if ((ptr + dlen) > end || (uptr + dlen) > uncomp_end) {
- debug("data len %d too long", dlen);
- goto out;
- }
-@@ -1880,6 +1878,10 @@ static char *uncompress(int16_t field_count, char *start, char *end,
- * refresh interval, retry interval, expiration
- * limit and minimum ttl). They are 20 bytes long.
- */
-+ if ((uptr + 20) > uncomp_end || (ptr + 20) > end) {
-+ debug("soa record too long");
-+ goto out;
-+ }
- memcpy(uptr, ptr, 20);
- uptr += 20;
- ptr += 20;
---
-2.32.0
-
diff --git a/gnu/packages/patches/emacs-telega-patch-server-functions.patch b/gnu/packages/patches/emacs-telega-patch-server-functions.patch
deleted file mode 100644
index e3d49278d0..0000000000
--- a/gnu/packages/patches/emacs-telega-patch-server-functions.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-Remove interactive build for telega-server, as it fails on Guix.
-Modify the `telega-server--find-bin' function to only use the version
-of telega-server installed by Guix.
-
-Created by Brett Gilio <brettg@gnu.org>
-
---- a/telega-server.el
-+++ b/telega-server.el
-@@ -113,7 +113,6 @@ If already deferring, then just executes the BODY."
- If BUILD-FLAGS is specified, then rebuild server without any
- queries using this flags for building, could be empty string.
- Otherwise query user about building flags."
-- (interactive)
- (telega-test-env 'quiet)
- (when (or build-flags
- (y-or-n-p "Build `telega-server'? "))
-@@ -137,11 +136,8 @@ Otherwise query user about building flags."
- (defun telega-server--find-bin ()
- "Find telega-server executable.
- Raise error if not found."
-- (let ((exec-path (cons telega-directory exec-path)))
-- (or (executable-find "telega-server")
-- (progn (telega-server-build)
-- (executable-find "telega-server"))
-- (error "`telega-server' not found in exec-path"))))
-+ (or (executable-find "telega-server")
-+ (error "`telega-server' not found in exec-path")))
-
- (defun telega-server-version ()
- "Return telega-server version."
-
diff --git a/gnu/packages/patches/emacs-telega-path-placeholder.patch b/gnu/packages/patches/emacs-telega-path-placeholder.patch
new file mode 100644
index 0000000000..c20be36712
--- /dev/null
+++ b/gnu/packages/patches/emacs-telega-path-placeholder.patch
@@ -0,0 +1,44 @@
+From 865b8c553722a971c68742c2e849e41eb0e2360c Mon Sep 17 00:00:00 2001
+From: Zhu Zihao <all_but_last@163.com>
+Date: Thu, 24 Jun 2021 23:43:50 +0800
+Subject: [PATCH] Replace code that search path with placeholder for
+ configuration.
+
+---
+ telega-server.el | 6 +-----
+ telega-util.el | 2 +-
+ 2 files changed, 2 insertions(+), 6 deletions(-)
+
+diff --git a/telega-server.el b/telega-server.el
+index 999125d..0fa0817 100644
+--- a/telega-server.el
++++ b/telega-server.el
+@@ -142,11 +142,7 @@ Otherwise query user about building flags."
+ (defun telega-server--find-bin ()
+ "Find telega-server executable.
+ Raise error if not found."
+- (let ((exec-path (cons telega-directory exec-path)))
+- (or (executable-find "telega-server")
+- (progn (telega-server-build)
+- (executable-find "telega-server"))
+- (error "`telega-server' not found in exec-path"))))
++ "@TELEGA_SERVER_BIN@")
+
+ (defun telega-server-version ()
+ "Return telega-server version."
+diff --git a/telega-util.el b/telega-util.el
+index 73a46b1..f53e20a 100644
+--- a/telega-util.el
++++ b/telega-util.el
+@@ -464,7 +464,7 @@ N can't be 0."
+
+ (defun telega-etc-file (filename)
+ "Return absolute path to FILENAME from etc/ directory in telega."
+- (expand-file-name (concat "etc/" filename) telega--lib-directory))
++ (concat "@TELEGA_SHARE@" "/" filename))
+
+ (defun telega-link-props (link-type link-to &optional face)
+ "Generate props for link button openable with `telega-link--button-action'."
+--
+2.32.0
+
diff --git a/gnu/packages/patches/go-fix-script-tests.patch b/gnu/packages/patches/go-fix-script-tests.patch
new file mode 100644
index 0000000000..b29e83fef0
--- /dev/null
+++ b/gnu/packages/patches/go-fix-script-tests.patch
@@ -0,0 +1,18 @@
+Make library/header paths visible to cmd/go script tests, which is necessary for
+cgo/gccgo tests to work correctly
+
+diff --git a/src/cmd/go/script_test.go b/src/cmd/go/script_test.go
+index dfaa40548e..9d0f0e9bcd 100644
+--- a/src/cmd/go/script_test.go
++++ b/src/cmd/go/script_test.go
+@@ -100,6 +100,10 @@ const (
+ var extraEnvKeys = []string{
+ "SYSTEMROOT", // must be preserved on Windows to find DLLs; golang.org/issue/25210
+ "WINDIR", // must be preserved on Windows to be able to run PowerShell command; golang.org/issue/30711
++ "CPATH",
++ "C_INCLUDE_PATH",
++ "CPLUS_INCLUDE_PATH",
++ "LIBRARY_PATH",
+ "LD_LIBRARY_PATH", // must be preserved on Unix systems to find shared libraries
+ "CC", // don't lose user settings when invoking cgo
+ "GO_TESTING_GOTOOLS", // for gccgo testing
diff --git a/gnu/packages/patches/libgrss-CVE-2016-2001.patch b/gnu/packages/patches/libgrss-CVE-2016-2001.patch
new file mode 100644
index 0000000000..b7de681475
--- /dev/null
+++ b/gnu/packages/patches/libgrss-CVE-2016-2001.patch
@@ -0,0 +1,101 @@
+From 2c6ea642663e2a44efc8583fae7c54b7b98f72b3 Mon Sep 17 00:00:00 2001
+From: Ariadne Conill <ariadne@dereferenced.org>
+Date: Mon, 7 Jun 2021 18:51:07 -0600
+Subject: [PATCH] Ensure the ssl-use-system-ca-file property is set to true on
+ all SoupSessions.
+
+The default SoupSessionSync and SoupSessionAsync behaviour does not perform any
+TLS certificate validation, unless the ssl-use-system-ca-file property is set
+to true.
+
+This mitigates CVE-2016-20011.
+---
+ src/feed-channel.c | 2 ++
+ src/feed-enclosure.c | 4 ++++
+ src/feeds-pool.c | 1 +
+ src/feeds-publisher.c | 4 +++-
+ src/feeds-subscriber.c | 4 +++-
+ 5 files changed, 13 insertions(+), 2 deletions(-)
+
+diff --git a/src/feed-channel.c b/src/feed-channel.c
+index 19ca7b2..d2d51b9 100644
+--- a/src/feed-channel.c
++++ b/src/feed-channel.c
+@@ -973,6 +973,8 @@ quick_and_dirty_parse (GrssFeedChannel *channel, SoupMessage *msg, GList **save_
+ static void
+ init_soup_session (SoupSession *session, GrssFeedChannel *channel)
+ {
++ g_object_set (G_OBJECT (session), "ssl-use-system-ca-file", TRUE, NULL);
++
+ if (channel->priv->jar != NULL)
+ soup_session_add_feature (session, SOUP_SESSION_FEATURE (channel->priv->jar));
+ if (channel->priv->gzip == TRUE)
+diff --git a/src/feed-enclosure.c b/src/feed-enclosure.c
+index 68ebbfe..2cd8f9e 100644
+--- a/src/feed-enclosure.c
++++ b/src/feed-enclosure.c
+@@ -220,6 +220,8 @@ grss_feed_enclosure_fetch (GrssFeedEnclosure *enclosure, GError **error)
+ url = grss_feed_enclosure_get_url (enclosure);
+
+ session = soup_session_sync_new ();
++ g_object_set (G_OBJECT (session), "ssl-use-system-ca-file", TRUE, NULL);
++
+ msg = soup_message_new ("GET", url);
+ status = soup_session_send_message (session, msg);
+
+@@ -282,6 +284,8 @@ grss_feed_enclosure_fetch_async (GrssFeedEnclosure *enclosure, GAsyncReadyCallba
+
+ task = g_task_new (enclosure, NULL, callback, user_data);
+ session = soup_session_async_new ();
++ g_object_set (G_OBJECT (session), "ssl-use-system-ca-file", TRUE, NULL);
++
+ msg = soup_message_new ("GET", grss_feed_enclosure_get_url (enclosure));
+ soup_session_queue_message (session, msg, enclosure_downloaded, task);
+ }
+diff --git a/src/feeds-pool.c b/src/feeds-pool.c
+index f18f3cd..7b33956 100644
+--- a/src/feeds-pool.c
++++ b/src/feeds-pool.c
+@@ -178,6 +178,7 @@ grss_feeds_pool_init (GrssFeedsPool *node)
+ memset (node->priv, 0, sizeof (GrssFeedsPoolPrivate));
+ node->priv->parser = grss_feed_parser_new ();
+ node->priv->soupsession = soup_session_async_new ();
++ g_object_set (G_OBJECT (node->priv->soupsession), "ssl-use-system-ca-file", TRUE, NULL);
+ }
+
+ /**
+diff --git a/src/feeds-publisher.c b/src/feeds-publisher.c
+index 427a54f..500cd96 100644
+--- a/src/feeds-publisher.c
++++ b/src/feeds-publisher.c
+@@ -888,8 +888,10 @@ create_and_run_server (GrssFeedsPublisher *pub)
+ {
+ SoupAddress *soup_addr;
+
+- if (pub->priv->soupsession == NULL)
++ if (pub->priv->soupsession == NULL) {
+ pub->priv->soupsession = soup_session_async_new ();
++ g_object_set (G_OBJECT (pub->priv->soupsession), "ssl-use-system-ca-file", TRUE, NULL);
++ }
+
+ soup_addr = soup_address_new_any (SOUP_ADDRESS_FAMILY_IPV4, pub->priv->port);
+ pub->priv->server = soup_server_new ("port", pub->priv->port, "interface", soup_addr, NULL);
+diff --git a/src/feeds-subscriber.c b/src/feeds-subscriber.c
+index 259f891..0f63f83 100644
+--- a/src/feeds-subscriber.c
++++ b/src/feeds-subscriber.c
+@@ -513,8 +513,10 @@ init_run_server (GrssFeedsSubscriber *sub)
+ {
+ GInetAddress *addr;
+
+- if (sub->priv->soupsession == NULL)
++ if (sub->priv->soupsession == NULL) {
+ sub->priv->soupsession = soup_session_async_new ();
++ g_object_set (G_OBJECT (sub->priv->soupsession), "ssl-use-system-ca-file", TRUE, NULL);
++ }
+
+ /*
+ Flow:
+--
+GitLab
+
diff --git a/gnu/packages/patches/oneko-remove-nonfree-characters.patch b/gnu/packages/patches/oneko-remove-nonfree-characters.patch
new file mode 100644
index 0000000000..4f80e53995
--- /dev/null
+++ b/gnu/packages/patches/oneko-remove-nonfree-characters.patch
@@ -0,0 +1,307 @@
+Remove options and code to use bitmaps of the BSD daemon and other characters
+with copyright issues. The bitmaps themselves are deleted in a source snippet.
+diff --git a/cursors/cursor.include b/cursors/cursor.include
+index ef96d72..1dc3dc2 100644
+--- a/cursors/cursor.include
++++ b/cursors/cursor.include
+@@ -2,9 +2,3 @@
+ #include "mouse_cursor_mask.xbm"
+ #include "bone_cursor.xbm"
+ #include "bone_cursor_mask.xbm"
+-#include "bsd_cursor.xbm"
+-#include "bsd_cursor_mask.xbm"
+-#include "card_cursor.xbm"
+-#include "card_cursor_mask.xbm"
+-#include "petal_cursor.xbm"
+-#include "petal_cursor_mask.xbm"
+diff --git a/oneko.c b/oneko.c
+index d2b81fe..04fa59f 100644
+--- a/oneko.c
++++ b/oneko.c
+@@ -33,7 +33,7 @@ XColor theBackgroundColor; /* $@?'(J ($@%P%C%/%0%i%&%s%I(J) */
+
+ int Synchronous = False;
+ /* Types of animals */
+-#define BITMAPTYPES 6
++#define BITMAPTYPES 3
+ typedef struct _AnimalDefaults {
+ char *name;
+ int speed, idle, bitmap_width, bitmap_height;
+@@ -51,12 +51,6 @@ AnimalDefaultsData AnimalDefaultsDataTable[] =
+ mouse_cursor_width,mouse_cursor_height, mouse_cursor_x_hot,mouse_cursor_y_hot },
+ { "dog" , 10, 6, 32, 32, 125000L, 0, 0, bone_cursor_bits,bone_cursor_mask_bits,
+ bone_cursor_width,bone_cursor_height, bone_cursor_x_hot,bone_cursor_y_hot },
+- { "bsd_daemon" , 16, 6, 32, 32, 300000L, 22, 20, bsd_cursor_bits,bsd_cursor_mask_bits,
+- bsd_cursor_width,bsd_cursor_height, bsd_cursor_x_hot,bsd_cursor_y_hot },
+- { "sakura" , 13, 6, 32, 32, 125000L, 0, 0, card_cursor_bits,card_cursor_mask_bits,
+- card_cursor_width,card_cursor_height, card_cursor_x_hot,card_cursor_y_hot },
+- { "tomoyo" , 10, 6, 32, 32, 125000L, 32, 32, petal_cursor_bits,petal_cursor_mask_bits,
+- petal_cursor_width,petal_cursor_height, petal_cursor_x_hot,petal_cursor_y_hot },
+ };
+
+ /*
+@@ -154,70 +148,70 @@ typedef struct {
+
+ BitmapGCData BitmapGCDataTable[] =
+ {
+- { &Mati2GC, &Mati2Xbm, mati2_bits, mati2_tora_bits, mati2_dog_bits, mati2_bsd_bits, mati2_sakura_bits, mati2_tomoyo_bits,
+- &Mati2Msk, mati2_mask_bits, mati2_mask_bits, mati2_dog_mask_bits, mati2_bsd_mask_bits, mati2_sakura_mask_bits, mati2_tomoyo_mask_bits },
+- { &Jare2GC, &Jare2Xbm, jare2_bits, jare2_tora_bits, jare2_dog_bits, jare2_bsd_bits, jare2_sakura_bits, jare2_tomoyo_bits,
+- &Jare2Msk, jare2_mask_bits, jare2_mask_bits, jare2_dog_mask_bits, jare2_bsd_mask_bits, jare2_sakura_mask_bits, jare2_tomoyo_mask_bits },
+- { &Kaki1GC, &Kaki1Xbm, kaki1_bits, kaki1_tora_bits, kaki1_dog_bits, kaki1_bsd_bits, kaki1_sakura_bits, kaki1_tomoyo_bits,
+- &Kaki1Msk, kaki1_mask_bits, kaki1_mask_bits, kaki1_dog_mask_bits, kaki1_bsd_mask_bits, kaki1_sakura_mask_bits, kaki1_tomoyo_mask_bits },
+- { &Kaki2GC, &Kaki2Xbm, kaki2_bits, kaki2_tora_bits, kaki2_dog_bits, kaki2_bsd_bits, kaki2_sakura_bits, kaki2_tomoyo_bits,
+- &Kaki2Msk, kaki2_mask_bits, kaki2_mask_bits, kaki2_dog_mask_bits, kaki2_bsd_mask_bits, kaki2_sakura_mask_bits, kaki2_tomoyo_mask_bits },
+- { &Mati3GC, &Mati3Xbm, mati3_bits, mati3_tora_bits, mati3_dog_bits, mati3_bsd_bits, mati3_sakura_bits, mati3_tomoyo_bits,
+- &Mati3Msk, mati3_mask_bits, mati3_mask_bits, mati3_dog_mask_bits, mati3_bsd_mask_bits, mati3_sakura_mask_bits, mati3_tomoyo_mask_bits },
+- { &Sleep1GC, &Sleep1Xbm, sleep1_bits, sleep1_tora_bits, sleep1_dog_bits, sleep1_bsd_bits, sleep1_sakura_bits, sleep1_tomoyo_bits,
+- &Sleep1Msk, sleep1_mask_bits, sleep1_mask_bits, sleep1_dog_mask_bits, sleep1_bsd_mask_bits, sleep1_sakura_mask_bits, sleep1_tomoyo_mask_bits },
+- { &Sleep2GC, &Sleep2Xbm, sleep2_bits, sleep2_tora_bits, sleep2_dog_bits, sleep2_bsd_bits, sleep2_sakura_bits, sleep2_tomoyo_bits,
+- &Sleep2Msk, sleep2_mask_bits, sleep2_mask_bits, sleep2_dog_mask_bits, sleep2_bsd_mask_bits, sleep2_sakura_mask_bits, sleep2_tomoyo_mask_bits },
+- { &AwakeGC, &AwakeXbm, awake_bits, awake_tora_bits, awake_dog_bits, awake_bsd_bits, awake_sakura_bits, awake_tomoyo_bits,
+- &AwakeMsk, awake_mask_bits, awake_mask_bits, awake_dog_mask_bits, awake_bsd_mask_bits, awake_sakura_mask_bits, awake_tomoyo_mask_bits },
+- { &Up1GC, &Up1Xbm, up1_bits, up1_tora_bits, up1_dog_bits, up1_bsd_bits, up1_sakura_bits, up1_tomoyo_bits,
+- &Up1Msk, up1_mask_bits, up1_mask_bits, up1_dog_mask_bits, up1_bsd_mask_bits, up1_sakura_mask_bits, up1_tomoyo_mask_bits },
+- { &Up2GC, &Up2Xbm, up2_bits, up2_tora_bits, up2_dog_bits, up2_bsd_bits, up2_sakura_bits, up2_tomoyo_bits,
+- &Up2Msk, up2_mask_bits, up2_mask_bits, up2_dog_mask_bits, up2_bsd_mask_bits, up2_sakura_mask_bits, up2_tomoyo_mask_bits },
+- { &Down1GC, &Down1Xbm, down1_bits, down1_tora_bits, down1_dog_bits, down1_bsd_bits, down1_sakura_bits, down1_tomoyo_bits,
+- &Down1Msk, down1_mask_bits, down1_mask_bits, down1_dog_mask_bits, down1_bsd_mask_bits, down1_sakura_mask_bits, down1_tomoyo_mask_bits },
+- { &Down2GC, &Down2Xbm, down2_bits, down2_tora_bits, down2_dog_bits, down2_bsd_bits, down2_sakura_bits, down2_tomoyo_bits,
+- &Down2Msk, down2_mask_bits, down2_mask_bits, down2_dog_mask_bits, down2_bsd_mask_bits, down2_sakura_mask_bits, down2_tomoyo_mask_bits },
+- { &Left1GC, &Left1Xbm, left1_bits, left1_tora_bits, left1_dog_bits, left1_bsd_bits, left1_sakura_bits, left1_tomoyo_bits,
+- &Left1Msk, left1_mask_bits, left1_mask_bits, left1_dog_mask_bits, left1_bsd_mask_bits, left1_sakura_mask_bits, left1_tomoyo_mask_bits },
+- { &Left2GC, &Left2Xbm, left2_bits, left2_tora_bits, left2_dog_bits, left2_bsd_bits, left2_sakura_bits, left2_tomoyo_bits,
+- &Left2Msk, left2_mask_bits, left2_mask_bits, left2_dog_mask_bits, left2_bsd_mask_bits, left2_sakura_mask_bits, left2_tomoyo_mask_bits },
+- { &Right1GC, &Right1Xbm, right1_bits, right1_tora_bits, right1_dog_bits, right1_bsd_bits, right1_sakura_bits, right1_tomoyo_bits,
+- &Right1Msk, right1_mask_bits, right1_mask_bits,right1_dog_mask_bits, right1_bsd_mask_bits, right1_sakura_mask_bits, right1_tomoyo_mask_bits },
+- { &Right2GC, &Right2Xbm, right2_bits, right2_tora_bits, right2_dog_bits, right2_bsd_bits, right2_sakura_bits, right2_tomoyo_bits,
+- &Right2Msk, right2_mask_bits, right2_mask_bits, right2_dog_mask_bits, right2_bsd_mask_bits, right2_sakura_mask_bits, right2_tomoyo_mask_bits },
+- { &UpLeft1GC, &UpLeft1Xbm, upleft1_bits, upleft1_tora_bits, upleft1_dog_bits, upleft1_bsd_bits, upleft1_sakura_bits, upleft1_tomoyo_bits,
+- &UpLeft1Msk, upleft1_mask_bits, upleft1_mask_bits, upleft1_dog_mask_bits, upleft1_bsd_mask_bits, upleft1_sakura_mask_bits, upleft1_tomoyo_mask_bits },
+- { &UpLeft2GC, &UpLeft2Xbm, upleft2_bits, upleft2_tora_bits, upleft2_dog_bits, upleft2_bsd_bits, upleft2_sakura_bits, upleft2_tomoyo_bits,
+- &UpLeft2Msk, upleft2_mask_bits, upleft2_mask_bits,upleft2_dog_mask_bits, upleft2_bsd_mask_bits, upleft2_sakura_mask_bits, upleft2_tomoyo_mask_bits },
+- { &UpRight1GC, &UpRight1Xbm, upright1_bits, upright1_tora_bits, upright1_dog_bits, upright1_bsd_bits, upright1_sakura_bits, upright1_tomoyo_bits,
+- &UpRight1Msk, upright1_mask_bits, upright1_mask_bits,upright1_dog_mask_bits, upright1_bsd_mask_bits, upright1_sakura_mask_bits, upright1_tomoyo_mask_bits },
+- { &UpRight2GC, &UpRight2Xbm, upright2_bits, upright2_tora_bits, upright2_dog_bits, upright2_bsd_bits, upright2_sakura_bits, upright2_tomoyo_bits,
+- &UpRight2Msk, upright2_mask_bits, upright2_mask_bits,upright2_dog_mask_bits, upright2_bsd_mask_bits, upright2_sakura_mask_bits, upright2_tomoyo_mask_bits },
+- { &DownLeft1GC, &DownLeft1Xbm, dwleft1_bits, dwleft1_tora_bits, dwleft1_dog_bits, dwleft1_bsd_bits, dwleft1_sakura_bits, dwleft1_tomoyo_bits,
+- &DownLeft1Msk, dwleft1_mask_bits, dwleft1_mask_bits, dwleft1_dog_mask_bits, dwleft1_bsd_mask_bits, dwleft1_sakura_mask_bits, dwleft1_tomoyo_mask_bits },
+- { &DownLeft2GC, &DownLeft2Xbm, dwleft2_bits, dwleft2_tora_bits, dwleft2_dog_bits, dwleft2_bsd_bits, dwleft2_sakura_bits, dwleft2_tomoyo_bits,
+- &DownLeft2Msk, dwleft2_mask_bits, dwleft2_mask_bits, dwleft2_dog_mask_bits, dwleft2_bsd_mask_bits, dwleft2_sakura_mask_bits, dwleft2_tomoyo_mask_bits },
+- { &DownRight1GC, &DownRight1Xbm, dwright1_bits, dwright1_tora_bits, dwright1_dog_bits, dwright1_bsd_bits, dwright1_sakura_bits, dwright1_tomoyo_bits,
+- &DownRight1Msk, dwright1_mask_bits, dwright1_mask_bits, dwright1_dog_mask_bits, dwright1_bsd_mask_bits, dwright1_sakura_mask_bits, dwright1_tomoyo_mask_bits },
+- { &DownRight2GC, &DownRight2Xbm, dwright2_bits, dwright2_tora_bits, dwright2_dog_bits, dwright2_bsd_bits, dwright2_sakura_bits, dwright2_tomoyo_bits,
+- &DownRight2Msk, dwright2_mask_bits, dwright2_mask_bits, dwright2_dog_mask_bits, dwright2_bsd_mask_bits, dwright2_sakura_mask_bits, dwright2_tomoyo_mask_bits },
+- { &UpTogi1GC, &UpTogi1Xbm, utogi1_bits, utogi1_tora_bits, utogi1_dog_bits, utogi1_bsd_bits, utogi1_sakura_bits, utogi1_tomoyo_bits,
+- &UpTogi1Msk, utogi1_mask_bits, utogi1_mask_bits, utogi1_dog_mask_bits, utogi1_bsd_mask_bits, utogi1_sakura_mask_bits, utogi1_tomoyo_mask_bits },
+- { &UpTogi2GC, &UpTogi2Xbm, utogi2_bits, utogi2_tora_bits, utogi2_dog_bits, utogi2_bsd_bits, utogi2_sakura_bits, utogi2_tomoyo_bits,
+- &UpTogi2Msk, utogi2_mask_bits, utogi2_mask_bits, utogi2_dog_mask_bits, utogi2_bsd_mask_bits, utogi2_sakura_mask_bits, utogi2_tomoyo_mask_bits },
+- { &DownTogi1GC, &DownTogi1Xbm, dtogi1_bits, dtogi1_tora_bits, dtogi1_dog_bits, dtogi1_bsd_bits, dtogi1_sakura_bits, dtogi1_tomoyo_bits,
+- &DownTogi1Msk, dtogi1_mask_bits, dtogi1_mask_bits, dtogi1_dog_mask_bits, dtogi1_bsd_mask_bits, dtogi1_sakura_mask_bits, dtogi1_tomoyo_mask_bits },
+- { &DownTogi2GC, &DownTogi2Xbm, dtogi2_bits, dtogi2_tora_bits, dtogi2_dog_bits, dtogi2_bsd_bits, dtogi2_sakura_bits, dtogi2_tomoyo_bits,
+- &DownTogi2Msk, dtogi2_mask_bits, dtogi2_mask_bits, dtogi2_dog_mask_bits, dtogi2_bsd_mask_bits, dtogi2_sakura_mask_bits, dtogi2_tomoyo_mask_bits },
+- { &LeftTogi1GC, &LeftTogi1Xbm, ltogi1_bits, ltogi1_tora_bits, ltogi1_dog_bits, ltogi1_bsd_bits, ltogi1_sakura_bits, ltogi1_tomoyo_bits,
+- &LeftTogi1Msk, ltogi1_mask_bits, ltogi1_mask_bits,ltogi1_dog_mask_bits, ltogi1_bsd_mask_bits, ltogi1_sakura_mask_bits, ltogi1_tomoyo_mask_bits },
+- { &LeftTogi2GC, &LeftTogi2Xbm, ltogi2_bits, ltogi2_tora_bits, ltogi2_dog_bits, ltogi2_bsd_bits, ltogi2_sakura_bits, ltogi2_tomoyo_bits,
+- &LeftTogi2Msk, ltogi2_mask_bits, ltogi2_mask_bits,ltogi2_dog_mask_bits, ltogi2_bsd_mask_bits, ltogi2_sakura_mask_bits, ltogi2_tomoyo_mask_bits },
+- { &RightTogi1GC, &RightTogi1Xbm, rtogi1_bits, rtogi1_tora_bits, rtogi1_dog_bits, rtogi1_bsd_bits, rtogi1_sakura_bits, rtogi1_tomoyo_bits,
+- &RightTogi1Msk, rtogi1_mask_bits, rtogi1_mask_bits,rtogi1_dog_mask_bits, rtogi1_bsd_mask_bits, rtogi1_sakura_mask_bits, rtogi1_tomoyo_mask_bits },
+- { &RightTogi2GC, &RightTogi2Xbm, rtogi2_bits, rtogi2_tora_bits, rtogi2_dog_bits, rtogi2_bsd_bits, rtogi2_sakura_bits, rtogi2_tomoyo_bits,
+- &RightTogi2Msk, rtogi2_mask_bits, rtogi2_mask_bits,rtogi2_dog_mask_bits, rtogi2_bsd_mask_bits, rtogi2_sakura_mask_bits, rtogi2_tomoyo_mask_bits },
++ { &Mati2GC, &Mati2Xbm, mati2_bits, mati2_tora_bits, mati2_dog_bits,
++ &Mati2Msk, mati2_mask_bits, mati2_mask_bits, mati2_dog_mask_bits },
++ { &Jare2GC, &Jare2Xbm, jare2_bits, jare2_tora_bits, jare2_dog_bits,
++ &Jare2Msk, jare2_mask_bits, jare2_mask_bits, jare2_dog_mask_bits },
++ { &Kaki1GC, &Kaki1Xbm, kaki1_bits, kaki1_tora_bits, kaki1_dog_bits,
++ &Kaki1Msk, kaki1_mask_bits, kaki1_mask_bits, kaki1_dog_mask_bits },
++ { &Kaki2GC, &Kaki2Xbm, kaki2_bits, kaki2_tora_bits, kaki2_dog_bits,
++ &Kaki2Msk, kaki2_mask_bits, kaki2_mask_bits, kaki2_dog_mask_bits },
++ { &Mati3GC, &Mati3Xbm, mati3_bits, mati3_tora_bits, mati3_dog_bits,
++ &Mati3Msk, mati3_mask_bits, mati3_mask_bits, mati3_dog_mask_bits },
++ { &Sleep1GC, &Sleep1Xbm, sleep1_bits, sleep1_tora_bits, sleep1_dog_bits,
++ &Sleep1Msk, sleep1_mask_bits, sleep1_mask_bits, sleep1_dog_mask_bits },
++ { &Sleep2GC, &Sleep2Xbm, sleep2_bits, sleep2_tora_bits, sleep2_dog_bits,
++ &Sleep2Msk, sleep2_mask_bits, sleep2_mask_bits, sleep2_dog_mask_bits },
++ { &AwakeGC, &AwakeXbm, awake_bits, awake_tora_bits, awake_dog_bits,
++ &AwakeMsk, awake_mask_bits, awake_mask_bits, awake_dog_mask_bits },
++ { &Up1GC, &Up1Xbm, up1_bits, up1_tora_bits, up1_dog_bits,
++ &Up1Msk, up1_mask_bits, up1_mask_bits, up1_dog_mask_bits },
++ { &Up2GC, &Up2Xbm, up2_bits, up2_tora_bits, up2_dog_bits,
++ &Up2Msk, up2_mask_bits, up2_mask_bits, up2_dog_mask_bits },
++ { &Down1GC, &Down1Xbm, down1_bits, down1_tora_bits, down1_dog_bits,
++ &Down1Msk, down1_mask_bits, down1_mask_bits, down1_dog_mask_bits },
++ { &Down2GC, &Down2Xbm, down2_bits, down2_tora_bits, down2_dog_bits,
++ &Down2Msk, down2_mask_bits, down2_mask_bits, down2_dog_mask_bits },
++ { &Left1GC, &Left1Xbm, left1_bits, left1_tora_bits, left1_dog_bits,
++ &Left1Msk, left1_mask_bits, left1_mask_bits, left1_dog_mask_bits },
++ { &Left2GC, &Left2Xbm, left2_bits, left2_tora_bits, left2_dog_bits,
++ &Left2Msk, left2_mask_bits, left2_mask_bits, left2_dog_mask_bits },
++ { &Right1GC, &Right1Xbm, right1_bits, right1_tora_bits, right1_dog_bits,
++ &Right1Msk, right1_mask_bits, right1_mask_bits,right1_dog_mask_bits },
++ { &Right2GC, &Right2Xbm, right2_bits, right2_tora_bits, right2_dog_bits,
++ &Right2Msk, right2_mask_bits, right2_mask_bits, right2_dog_mask_bits },
++ { &UpLeft1GC, &UpLeft1Xbm, upleft1_bits, upleft1_tora_bits, upleft1_dog_bits,
++ &UpLeft1Msk, upleft1_mask_bits, upleft1_mask_bits, upleft1_dog_mask_bits },
++ { &UpLeft2GC, &UpLeft2Xbm, upleft2_bits, upleft2_tora_bits, upleft2_dog_bits,
++ &UpLeft2Msk, upleft2_mask_bits, upleft2_mask_bits,upleft2_dog_mask_bits },
++ { &UpRight1GC, &UpRight1Xbm, upright1_bits, upright1_tora_bits, upright1_dog_bits,
++ &UpRight1Msk, upright1_mask_bits, upright1_mask_bits,upright1_dog_mask_bits },
++ { &UpRight2GC, &UpRight2Xbm, upright2_bits, upright2_tora_bits, upright2_dog_bits,
++ &UpRight2Msk, upright2_mask_bits, upright2_mask_bits,upright2_dog_mask_bits },
++ { &DownLeft1GC, &DownLeft1Xbm, dwleft1_bits, dwleft1_tora_bits, dwleft1_dog_bits,
++ &DownLeft1Msk, dwleft1_mask_bits, dwleft1_mask_bits, dwleft1_dog_mask_bits },
++ { &DownLeft2GC, &DownLeft2Xbm, dwleft2_bits, dwleft2_tora_bits, dwleft2_dog_bits,
++ &DownLeft2Msk, dwleft2_mask_bits, dwleft2_mask_bits, dwleft2_dog_mask_bits },
++ { &DownRight1GC, &DownRight1Xbm, dwright1_bits, dwright1_tora_bits, dwright1_dog_bits,
++ &DownRight1Msk, dwright1_mask_bits, dwright1_mask_bits, dwright1_dog_mask_bits },
++ { &DownRight2GC, &DownRight2Xbm, dwright2_bits, dwright2_tora_bits, dwright2_dog_bits,
++ &DownRight2Msk, dwright2_mask_bits, dwright2_mask_bits, dwright2_dog_mask_bits },
++ { &UpTogi1GC, &UpTogi1Xbm, utogi1_bits, utogi1_tora_bits, utogi1_dog_bits,
++ &UpTogi1Msk, utogi1_mask_bits, utogi1_mask_bits, utogi1_dog_mask_bits },
++ { &UpTogi2GC, &UpTogi2Xbm, utogi2_bits, utogi2_tora_bits, utogi2_dog_bits,
++ &UpTogi2Msk, utogi2_mask_bits, utogi2_mask_bits, utogi2_dog_mask_bits },
++ { &DownTogi1GC, &DownTogi1Xbm, dtogi1_bits, dtogi1_tora_bits, dtogi1_dog_bits,
++ &DownTogi1Msk, dtogi1_mask_bits, dtogi1_mask_bits, dtogi1_dog_mask_bits },
++ { &DownTogi2GC, &DownTogi2Xbm, dtogi2_bits, dtogi2_tora_bits, dtogi2_dog_bits,
++ &DownTogi2Msk, dtogi2_mask_bits, dtogi2_mask_bits, dtogi2_dog_mask_bits },
++ { &LeftTogi1GC, &LeftTogi1Xbm, ltogi1_bits, ltogi1_tora_bits, ltogi1_dog_bits,
++ &LeftTogi1Msk, ltogi1_mask_bits, ltogi1_mask_bits,ltogi1_dog_mask_bits },
++ { &LeftTogi2GC, &LeftTogi2Xbm, ltogi2_bits, ltogi2_tora_bits, ltogi2_dog_bits,
++ &LeftTogi2Msk, ltogi2_mask_bits, ltogi2_mask_bits,ltogi2_dog_mask_bits },
++ { &RightTogi1GC, &RightTogi1Xbm, rtogi1_bits, rtogi1_tora_bits, rtogi1_dog_bits,
++ &RightTogi1Msk, rtogi1_mask_bits, rtogi1_mask_bits,rtogi1_dog_mask_bits },
++ { &RightTogi2GC, &RightTogi2Xbm, rtogi2_bits, rtogi2_tora_bits, rtogi2_dog_bits,
++ &RightTogi2Msk, rtogi2_mask_bits, rtogi2_mask_bits,rtogi2_dog_mask_bits },
+ { NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL }
+ };
+
+@@ -1512,8 +1506,6 @@ GetArguments(argc, argv, theDisplayName)
+ }
+ else {
+ char *av = argv[ArgCounter] + 1;
+- if (strcmp(av, "bsd") == 0)
+- av = "bsd_daemon";
+ for (loop=0;loop<BITMAPTYPES;loop++) {
+ if (strcmp(av,AnimalDefaultsDataTable[loop].name)==0)
+ {NekoMoyou = loop;found=1;}
+diff --git a/oneko.h b/oneko.h
+index 414e12f..38281fd 100644
+--- a/oneko.h
++++ b/oneko.h
+@@ -36,17 +36,11 @@
+ #include "bitmaps/neko/neko.include"
+ #include "bitmaps/tora/tora.include"
+ #include "bitmaps/dog/dog.include"
+-#include "bitmaps/bsd/bsd.include"
+-#include "bitmaps/sakura/sakura.include"
+-#include "bitmaps/tomoyo/tomoyo.include"
+
+ /* These are the bitmasks that allow the use of the shape extension. */
+
+ #include "bitmasks/neko/neko.mask.include"
+ #include "bitmasks/dog/dog.mask.include"
+-#include "bitmasks/bsd/bsd.mask.include"
+-#include "bitmasks/sakura/sakura.mask.include"
+-#include "bitmasks/tomoyo/tomoyo.mask.include"
+
+ /*
+ * $@Dj?tDj5A(J
+diff --git a/oneko.man b/oneko.man
+index d6d2b40..0e9b09d 100644
+--- a/oneko.man
++++ b/oneko.man
+@@ -3,9 +3,7 @@
+ The program oneko creates a cute cat chasing around your mouse cursor.
+ .SH SYNOPSIS
+ .B oneko
+-[\fB-help\fP] [\fB-tora\fP]
+-[\fB-dog\fP] [\fB-bsd_daemon\fP] [\fB-bsd\fP]
+-[\fB-sakura\fP] [\fB-tomoyo\fP]
++[\fB-help\fP] [\fB-tora\fP] [\fB-dog\fP]
+ [\fB-time\fP \fIn\fP] [\fB-speed\fP \fIn\fP] [\fB-idle\fP \fIn\fP]
+ [\fB-name\fP \fIname\fP] [\fB-towindow\fP] [\fB-toname\fP \fIname\fP]
+ [\fB-tofocus\fP]
+@@ -28,19 +26,6 @@ I don't know how to say it in English.
+ .B -dog
+ Runs a dog instead of a cat.
+ .TP
+-.B -bsd_daemon
+-Runs a 4.3BSD daemon instead of a cat.
+-.TP
+-.B -bsd
+-Same as
+-.B -bsd_daemon.
+-.TP
+-.B -sakura
+-Runs Sakura Kinomoto instead of a cat.
+-.TP
+-.B -tomoyo
+-Runs Tomoyo Daidouji instead of a cat.
+-.TP
+ .BI \-time " interval"
+ Sets interval timer which determine intervals for cat animation.
+ Default value is 125000 and unit is micro-second. Smaller value makes cat
+@@ -129,12 +114,6 @@ Background color.
+ While this program uses XGetDefault, be sure to use "neko.resouce" form.
+ If you run this program as "tora", by hard of soft link, the \fB-tora\fP
+ option is enabled by default.
+-.PP
+-BSD Daemon Copyright 1988 by Marshall Kirk McKusick. All Rights Reserved.
+-.PP
+-Sakura Kinomoto and Tomoyo Daidouji are characters in a comic strip
+-"CARDCAPTOR SAKURA" (CLAMP, Kodansha), with the sanction indicated in
+-CLAMP SCHOOL WEB CAMPUS (http://www.clamp.f-2.co.jp/).
+ .SH AUTHOR
+ Original
+ .I xneko
+diff --git a/oneko.man.jp b/oneko.man.jp
+index 96f9e3a..9a885df 100644
+--- a/oneko.man.jp
++++ b/oneko.man.jp
+@@ -3,9 +3,7 @@
+ $@$+$o$$$$G-$,%^%&%9%+!<%=%k$rDI$$$+$1$k%W%m%0%i%`(J
+ .SH $@=q<0(J
+ .B oneko
+-[\fB-help\fP] [\fB-tora\fP]
+-[\fB-dog\fP] [\fB-bsd_daemon\fP] [\fB-bsd\fP]
+-[\fB-sakura\fP] [\fB-tomoyo\fP]
++[\fB-help\fP] [\fB-tora\fP] [\fB-dog\fP]
+ [\fB-time\fP \fIn\fP] [\fB-speed\fP \fIn\fP] [\fB-idle\fP \fIn\fP]
+ [\fB-name\fP \fIname\fP] [\fB-towindow\fP] [\fB-toname\fP \fIname\fP]
+ [\fB-tofocus\fP]
+@@ -27,19 +25,6 @@
+ .B -dog
+ $@G-$KBe$o$j8$$,Av$j$^$9!#(J
+ .TP
+-.B -bsd_daemon
+-$@G-$KBe$o$j(J 4.3BSD $@%G!<%b%s$,Av$j$^$9!#(J
+-.TP
+-.B -bsd
+-.B -bsd_daemon
+-$@$KF1$8!#(J
+-.TP
+-.B -sakura
+-$@G-$KBe$o$jLZG7K\:y$,Av$j$^$9!#(J
+-.TP
+-.B -tomoyo
+-$@G-$KBe$o$jBgF;;{CN@$$,Av$j$^$9!#(J
+-.TP
+ .BI \-time " interval"
+ $@G-%"%K%a!<%7%g%s$N4V3V$r;XDj$7$^$9!#(J
+ $@%G%U%)%k%HCM$O(J 125000 $@$GC10L$O%^%$%/%mIC$G$9!#(J
+@@ -127,15 +112,6 @@ SHAPE extension $@$r;H$$$?$/$J$$>l9g(J True $@$H$7$^$9!#(J
+ $@$r;H$&$3$H$KN10U$7$F$/$@$5$$!#(J
+ $@%O!<%I%j%s%/$"$k$$$O%=%U%H%j%s%/$K$h$C$F!"$3$N%W%m%0%i%`$r(J tora $@$H$7$F(J
+ $@5/F0$7$?>l9g$K$O!"%G%U%)%k%H$G(J \fB-tora\fP $@%*%W%7%g%s$,M-8z$H$J$j$^$9!#(J
+-.PP
+-BSD $@%G!<%b%s$O(J Marshall Kirk McKusick $@;a$NCx:nJ*$G$9!#$9$Y$F$N8"Mx$O(J
+-$@J];}$5$l$F$$$^$9!#(J
+-BSD Daemon Copyright 1988 by Marshall Kirk McKusick. All Rights Reserved.
+-.PP
+-$@LZG7K\:y$*$h$SBgF;;{CN@$$OL!2h!X%+!<%I%-%c%W%?!<$5$/$i!Y!J(JCLAMP, $@9VCL(J
+-$@<R!K$N%-%c%i%/%?!<$G$"$j!"(JCLAMP $@3X1`EE;RJ,9;(J
+-$@!J(Jhttp://www.clamp.f-2.co.jp/$@!K$K<($5$l$F$$$kMFG'$N$b$H$KMxMQ$5$l$F$$(J
+-$@$^$9!#(J
+ .SH $@:n<T(J
+ $@%*%j%8%J%k$N(J
+ .I xneko
diff --git a/gnu/packages/patches/proot-test-fhs.patch b/gnu/packages/patches/proot-test-fhs.patch
deleted file mode 100644
index d3896addd6..0000000000
--- a/gnu/packages/patches/proot-test-fhs.patch
+++ /dev/null
@@ -1,98 +0,0 @@
-The test suite of PRoot makes many FHS assumptions, such as assuming
-that /bin, /bin/true, and /usr exist. This patch fixes these assumptions.
-
---- source/tests/GNUmakefile 2017-05-11 15:26:36.899115484 +0200
-+++ source/tests/GNUmakefile 2017-05-11 15:26:46.143063166 +0200
-@@ -121,7 +121,7 @@ $(ROOTFS_DIR):
- setup: $(ROOTFS_BIN)
-
- $(ROOTFS)/bin/abs-true:
-- @ln -fs /bin/true $@
-+ @ln -fs `which true` $@
-
- $(ROOTFS)/bin/rel-true:
- @ln -fs ./true $@
-
---- source/tests/test-d2175fc3.sh 2017-05-11 15:36:53.727617010 +0200
-+++ source/tests/test-d2175fc3.sh 2017-05-11 15:37:10.155523637 +0200
-@@ -2,8 +2,8 @@ if [ ! -x ${ROOTFS}/bin/readlink ] || [
- exit 125;
- fi
-
--${PROOT} -r ${ROOTFS} /bin/readlink /bin/abs-true | grep '^/bin/true$'
-+${PROOT} -r ${ROOTFS} /bin/readlink /bin/abs-true | grep "`which true`"
- ${PROOT} -r ${ROOTFS} /bin/readlink /bin/rel-true | grep '^\./true$'
-
--${PROOT} -b /:/host-rootfs -r ${ROOTFS} /bin/readlink /bin/abs-true | grep '^/bin/true$'
-+${PROOT} -b /:/host-rootfs -r ${ROOTFS} /bin/readlink /bin/abs-true | grep "`which true`"
- ${PROOT} -b /:/host-rootfs -r ${ROOTFS} /bin/readlink /bin/rel-true | grep '^./true$'
-
---- source/tests/test-d1be631a.sh 2017-05-11 15:41:36.458008715 +0200
-+++ source/tests/test-d1be631a.sh 2017-05-11 15:41:38.921994686 +0200
-@@ -1,4 +1,4 @@
--if [ -z `which mknod`] || [ `id -u` -eq 0 ]; then
-+if [ -z `which mknod` ] || [ `id -u` -eq 0 ]; then
- exit 125;
- fi
-
---- source/tests/test-5bed7141.c 2017-05-11 15:34:23.088472743 +0200
-+++ source/tests/test-5bed7141.c 2017-05-11 15:34:27.052450235 +0200
-@@ -80,7 +80,7 @@ int main(int argc, char *argv[])
- exit(EXIT_FAILURE);
-
- case 0: /* child */
-- status = chdir("/usr");
-+ status = chdir("/gnu");
- if (status < 0) {
- perror("chdir");
- exit(EXIT_FAILURE);
-
---- a/tests/test-092c5e26.sh
-+++ b/tests/test-092c5e26.sh
-@@ -24,7 +24,7 @@ fi
-
- unset LD_LIBRARY_PATH
-
--env PROOT_FORCE_FOREIGN_BINARY=1 PATH=/tmp:/bin:/usr/bin ${PROOT} -r ${ROOTFS} -q echo ${TMP} | grep "^-U LD_LIBRARY_PATH ${EXTRA}-0 /bin/argv0 /bin/argv0 ${TMP_ABS}$"
-+env PROOT_FORCE_FOREIGN_BINARY=1 PATH=/tmp:/bin:/usr/bin:$(dirname $(which echo)) ${PROOT} -r ${ROOTFS} -q echo ${TMP} | grep "^-U LD_LIBRARY_PATH ${EXTRA}-0 /bin/argv0 /bin/argv0 ${TMP_ABS}$"
- env PROOT_FORCE_FOREIGN_BINARY=1 ${PROOT} -r ${ROOTFS} -q echo ${TMP_ABS} | grep "^-U LD_LIBRARY_PATH ${EXTRA}-0 /bin/argv0 /bin/argv0 ${TMP_ABS}$"
-
- cat > ${ROOTFS}/${TMP_ABS} <<EOF
-@@ -34,7 +34,7 @@ chmod +x ${ROOTFS}/${TMP_ABS}
-
- # Valgrind prepends "/bin/sh" in front of foreign binaries.
- if ! $(echo ${PROOT} | grep -q valgrind); then
-- env PATH=/tmp:/bin:/usr/bin ${PROOT} -r ${ROOTFS} -q echo ${TMP} | grep "^-U LD_LIBRARY_PATH -0 ${TMP} ${TMP_ABS}$"
-+ env PATH=/tmp:/bin:/usr/bin:$(dirname $(which echo)) ${PROOT} -r ${ROOTFS} -q echo ${TMP} | grep "^-U LD_LIBRARY_PATH -0 ${TMP} ${TMP_ABS}$"
- ${PROOT} -r ${ROOTFS} -q echo ${TMP_ABS} | grep "^-U LD_LIBRARY_PATH -0 ${TMP_ABS} ${TMP_ABS}$"
- fi
-
-diff --git a/tests/test-5467b986.sh b/tests/test-5467b986.sh
-index c6ac71a..f616f1e 100644
---- a/tests/test-5467b986.sh
-+++ b/tests/test-5467b986.sh
-@@ -30,8 +30,8 @@ ${PROOT} -v -1 -b /tmp:/b -b /tmp:/a -r ${ROOTFS} fchdir_getcwd /b | grep '^/[ab
- ! ${PROOT} -w /bin -r ${ROOTFS} fchdir_getcwd true
- [ $? -eq 0 ]
-
--${PROOT} -v -1 -w /usr -r / ${ROOTFS}/bin/chdir_getcwd share | grep '^/usr/share$'
--${PROOT} -v -1 -w /usr -r / ${ROOTFS}/bin/fchdir_getcwd share | grep '^/usr/share$'
-+${PROOT} -v -1 -w /gnu -r / ${ROOTFS}/bin/chdir_getcwd store | grep '^/gnu/store$'
-+${PROOT} -v -1 -w /gnu -r / ${ROOTFS}/bin/fchdir_getcwd store | grep '^/gnu/store$'
-
--(cd /; ${PROOT} -v -1 -w usr -r / ${ROOTFS}/bin/chdir_getcwd share | grep '^/usr/share$')
--(cd /; ${PROOT} -v -1 -w usr -r / ${ROOTFS}/bin/fchdir_getcwd share | grep '^/usr/share$')
-+(cd /; ${PROOT} -v -1 -w gnu -r / ${ROOTFS}/bin/chdir_getcwd store | grep '^/gnu/store$')
-+(cd /; ${PROOT} -v -1 -w gnu -r / ${ROOTFS}/bin/fchdir_getcwd store | grep '^/gnu/store$')
-
---- a/tests/test-c15999f9.sh
-+++ b/tests/test-c15999f9.sh
-@@ -5,7 +5,7 @@ fi
- TMP=/tmp/$(mcookie)
- mkdir ${TMP}
-
--${PROOT} -b /bin/true:${TMP}/true /bin/true
-+${PROOT} -b `which true`:${TMP}/true `which true`
- ! test -e ${TMP}/true
- [ $? -eq 0 ]
-
diff --git a/gnu/packages/patches/python-seaborn-kde-test.patch b/gnu/packages/patches/python-seaborn-kde-test.patch
new file mode 100644
index 0000000000..f300dffc6f
--- /dev/null
+++ b/gnu/packages/patches/python-seaborn-kde-test.patch
@@ -0,0 +1,36 @@
+This patch is an excerpt of this upstream commit:
+
+ commit 0a24478a550132f1882e5be5f5dbc0fc446a8a6c
+ Author: Michael Waskom <mwaskom@users.noreply.github.com>
+ Date: Mon Dec 21 18:44:58 2020 -0500
+
+ Raise minimal supported Python to 3.7 and bump requirements (#2396)
+
+It fixes the failure of 'test_weights'.
+
+--- a/seaborn/tests/test_distributions.py
++++ b/seaborn/tests/test_distributions.py
+@@ -709,21 +708,17 @@ class TestKDEPlotUnivariate:
+ integral = integrate.trapz(ydata, np.log10(xdata))
+ assert integral == pytest.approx(1)
+
+- @pytest.mark.skipif(
+- LooseVersion(scipy.__version__) < "1.2.0",
+- reason="Weights require scipy >= 1.2.0"
+- )
+ def test_weights(self):
+
+ x = [1, 2]
+ weights = [2, 1]
+
+- ax = kdeplot(x=x, weights=weights)
++ ax = kdeplot(x=x, weights=weights, bw_method=.1)
+
+ xdata, ydata = ax.lines[0].get_xydata().T
+
+- y1 = ydata[np.argwhere(np.abs(xdata - 1).min())]
+- y2 = ydata[np.argwhere(np.abs(xdata - 2).min())]
++ y1 = ydata[np.abs(xdata - 1).argmin()]
++ y2 = ydata[np.abs(xdata - 2).argmin()]
+
+ assert y1 == pytest.approx(2 * y2)
diff --git a/gnu/packages/patches/tor-fix-build-with-gcc-7.patch b/gnu/packages/patches/tor-fix-build-with-gcc-7.patch
deleted file mode 100644
index 5481695a63..0000000000
--- a/gnu/packages/patches/tor-fix-build-with-gcc-7.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From 810c0616d59809b89f5144d4afdbf70391df7a7f Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Alexander=20F=C3=A6r=C3=B8y?= <ahf@torproject.org>
-Date: Thu, 10 Jun 2021 20:04:13 +0000
-Subject: [PATCH] Fix compilation on OpenSUSE.
-
-This patch fixes a build error with GCC 7.x which doesn't seem to accept
-const int's as constants in macro initialization.
-
-See: tpo/core/tor#40410
----
- src/feature/dirclient/dirclient.c | 3 +--
- 1 file changed, 1 insertion(+), 2 deletions(-)
-
-diff --git a/src/feature/dirclient/dirclient.c b/src/feature/dirclient/dirclient.c
-index 74c6452061..a2b20621a0 100644
---- a/src/feature/dirclient/dirclient.c
-+++ b/src/feature/dirclient/dirclient.c
-@@ -1907,8 +1907,7 @@ dir_client_decompress_response_body(char **bodyp, size_t *bodylenp,
- /* If we're pretty sure that we have a compressed directory, and
- * we didn't manage to uncompress it, then warn and bail. */
- if (!plausible && !new_body) {
-- const int LOG_INTERVAL = 3600;
-- static ratelim_t warning_limit = RATELIM_INIT(LOG_INTERVAL);
-+ static ratelim_t warning_limit = RATELIM_INIT(60 * 60);
- log_fn_ratelim(&warning_limit, LOG_WARN, LD_HTTP,
- "Unable to decompress HTTP body (tried %s%s%s, on %s).",
- description1,
---
-GitLab
-
diff --git a/gnu/packages/patches/transmission-remote-gtk-fix-appstream.patch b/gnu/packages/patches/transmission-remote-gtk-fix-appstream.patch
new file mode 100644
index 0000000000..e7d4c8bb86
--- /dev/null
+++ b/gnu/packages/patches/transmission-remote-gtk-fix-appstream.patch
@@ -0,0 +1,61 @@
+Fix a bug related to AppStream:
+
+https://github.com/transmission-remote-gtk/transmission-remote-gtk/issues/95
+
+Patch adapted from upstream source repository:
+
+https://github.com/transmission-remote-gtk/transmission-remote-gtk/commit/89259ff90c703c7fe6768b5317803b9aa5b5ab8c
+
+From 89259ff90c703c7fe6768b5317803b9aa5b5ab8c Mon Sep 17 00:00:00 2001
+From: Alan <alan-github@eth0.org.uk>
+Date: Tue, 17 Nov 2020 01:26:22 +0000
+Subject: [PATCH] fix screenshot and add OARS rating so appdata validates
+
+---
+ README.md | 5 ++---
+ data/io.github.TransmissionRemoteGtk.appdata.xml.in | 5 +++--
+ 2 files changed, 5 insertions(+), 5 deletions(-)
+
+diff --git a/README.md b/README.md
+index 4306cc7..d38314b 100644
+--- a/README.md
++++ b/README.md
+@@ -6,13 +6,12 @@ the Transmission BitTorrent client, using its HTTP RPC protocol.
+
+ # DEPENDENCIES
+
+-The following packages are required dependencies:
++The following packages are required dependencies (debian/ubuntu).
+
+ ```bash
+-autoconf-archive appstream-glib intltool
++libgtk-3-dev automake autoconf gcc libgeoip-dev gettext autoconf-archive libappstream-dev appstream-util libcurl4-openssl-dev libjson-glib-dev
+ ```
+
+-
+ # BUILDING
+
+ Optionally install `libgeoip` and its headers to see the country
+diff --git a/data/io.github.TransmissionRemoteGtk.appdata.xml.in b/data/io.github.TransmissionRemoteGtk.appdata.xml.in
+index fc35a8f..1476c2a 100644
+--- a/data/io.github.TransmissionRemoteGtk.appdata.xml.in
++++ b/data/io.github.TransmissionRemoteGtk.appdata.xml.in
+@@ -7,6 +7,7 @@
+ <translation type="gettext">transmission-remote-gtk</translation>
+ <name>Transmission Remote</name>
+ <developer_name>Transmission Remote Gtk Team</developer_name>
++ <content_rating type="oars-1.1" />
+ <summary>Remotely manage the Transmission BitTorrent client</summary>
+ <url type="homepage">https://github.com/transmission-remote-gtk/transmission-remote-gtk</url>
+ <url type="bugtracker">https://github.com/transmission-remote-gtk/transmission-remote-gtk/issues</url>
+@@ -23,8 +24,8 @@
+ </ul>
+ </description>
+ <screenshots>
+- <screenshot height="576" width="1024" type="default">
+- 
++ <screenshot height="512" width="973" type="default">
++ 
+ </screenshot>
+ </screenshots>
+ <releases>
diff --git a/gnu/packages/patches/upower-builddir.patch b/gnu/packages/patches/upower-builddir.patch
index 51295f2076..a61d387faf 100644
--- a/gnu/packages/patches/upower-builddir.patch
+++ b/gnu/packages/patches/upower-builddir.patch
@@ -1,3 +1,8 @@
+From 27ae011b31d831752b97eb209bc2b2206fcf40f7 Mon Sep 17 00:00:00 2001
+From: Tobias Geerinckx-Rice <me@tobias.gr>
+Date: Mon, 28 Jun 2021 11:58:47 +0200
+Subject: [PATCH] gnu: upower: Fix build directory.
+
Remove explicit set of UPOWER_CONF_FILE_NAME in up-self-test.c;
instead the harness should set it. In Guix we set it explicitly; the
right thing is to use AM_TEST_ENVIRONMENT and regenerate the
@@ -6,13 +11,17 @@ some things, so we patch the Makefile.in instead.
Also fix to not try to create /var/lib/upower if /var isn't writable.
-Patch by Andy Wingo <wingo@igalia.com>
-Reduced to upower 0.99.10 by Tobias Geerinckx-Rice <me@tobias.gr>
+Based on a patch by Andy Wingo <wingo@igalia.com>
+---
+ src/Makefile.am | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
---- upower-0.99.2.orig/src/Makefile.in 2014-12-18 10:32:01.000000000 +0100
-+++ upower-0.99.2/src/Makefile.in 2015-04-04 19:49:28.020843678 +0200
-@@ -1789,7 +1790,7 @@
- @HAVE_SYSTEMDSYSTEMUNITDIR_TRUE@ @sed -e "s|\@libexecdir\@|$(libexecdir)|" $< > $@
+diff --git a/src/Makefile.am b/src/Makefile.am
+index 3400139..f51ee89 100644
+--- a/src/Makefile.am
++++ b/src/Makefile.am
+@@ -181,7 +181,7 @@ $(systemdservice_DATA): $(systemdservice_in_files) Makefile
+ endif
install-data-hook:
- if test -w $(DESTDIR)$(prefix)/; then \
@@ -20,3 +29,6 @@ Reduced to upower 0.99.10 by Tobias Geerinckx-Rice <me@tobias.gr>
mkdir -p $(DESTDIR)$(historydir); \
fi
+--
+2.32.0
+
diff --git a/gnu/packages/patches/vtk-8-fix-freetypetools-build-failure.patch b/gnu/packages/patches/vtk-8-fix-freetypetools-build-failure.patch
new file mode 100644
index 0000000000..6988e65872
--- /dev/null
+++ b/gnu/packages/patches/vtk-8-fix-freetypetools-build-failure.patch
@@ -0,0 +1,36 @@
+This fixes a build failure in VTK when building against recent versions
+of freetype.
+
+ https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7432
+
+Patch by Ben Boeckel <ben.boeckel@kitware.com>
+
+Subject: [PATCH] vtkFreeTypeTools: avoid using an internal macro
+
+This macro has been removed upstream as it was always intended to be
+private.
+---
+ Rendering/FreeType/vtkFreeTypeTools.cxx | 7 ++-----
+ 1 file changed, 2 insertions(+), 5 deletions(-)
+
+diff --git a/Rendering/FreeType/vtkFreeTypeTools.cxx b/Rendering/FreeType/vtkFreeTypeTools.cxx
+index c54289dc60..03b899c4da 100644
+--- a/Rendering/FreeType/vtkFreeTypeTools.cxx
++++ b/Rendering/FreeType/vtkFreeTypeTools.cxx
+@@ -387,11 +387,8 @@ FTC_CMapCache* vtkFreeTypeTools::GetCMapCache()
+ }
+
+ //----------------------------------------------------------------------------
+-FT_CALLBACK_DEF(FT_Error)
+-vtkFreeTypeToolsFaceRequester(FTC_FaceID face_id,
+- FT_Library lib,
+- FT_Pointer request_data,
+- FT_Face* face)
++static FT_Error vtkFreeTypeToolsFaceRequester(
++ FTC_FaceID face_id, FT_Library lib, FT_Pointer request_data, FT_Face* face)
+ {
+ #if VTK_FTFC_DEBUG_CD
+ printf("vtkFreeTypeToolsFaceRequester()\n");
+--
+2.30.1
+
diff --git a/gnu/packages/patches/ytfzf-programs.patch b/gnu/packages/patches/ytfzf-programs.patch
new file mode 100644
index 0000000000..005ce2cf99
--- /dev/null
+++ b/gnu/packages/patches/ytfzf-programs.patch
@@ -0,0 +1,643 @@
+From 3f1eaf5a1645b28ca18cfa028417dc225b7a557f Mon Sep 17 00:00:00 2001
+From: Raghav Gururajan <rg@raghavgururajan.name>
+Date: Mon, 5 Jul 2021 06:45:49 -0400
+Subject: [PATCH] Modify the strings of referenced programs.
+
+Pattern the strings of referenced programs, so that they can be easily
+substituted with absolute paths using a custom-phase.
+
+Co-authored-by: jgart <jgart@dismail.de>
+---
+ ytfzf | 198 +++++++++++++++++++++++++++++-----------------------------
+ 1 file changed, 99 insertions(+), 99 deletions(-)
+
+diff --git a/ytfzf b/ytfzf
+index f4d2e0d..e8bb60b 100755
+--- a/ytfzf
++++ b/ytfzf
+@@ -49,17 +49,17 @@ cache_dir=${YTFZF_CACHE-${cache_dir-$HOME/.cache/ytfzf}}
+ #video type preference (mp4/1080p, mp4/720p, etc..)
+ video_pref=${YTFZF_PREF-${video_pref-}}
+ #the menu to use instead of fzf when -D is specified
+-external_menu=${YTFZF_EXTMENU-${external_menu-dmenu -i -l 30 -p Search:}}
++external_menu=${YTFZF_EXTMENU-${external_menu-@dmenu@ -i -l 30 -p Search:}}
+ #number of columns (characters on a line) the external menu can have
+ #necessary for formatting text for external menus
+ external_menu_len=${YTFZF_EXTMENU_LEN-${external_menu_len-220}}
+ #player settings (players need to support streaming with youtube-dl)
+ #player to use for watching the video
+-video_player=${YTFZF_PLAYER-${video_player-mpv}}
++video_player=${YTFZF_PLAYER-${video_player-@mpv@}}
+ #if YTFZF_PREF is specified, use this player instead
+-video_player_format=${YTFZF_PLAYER_FORMAT-${video_player_format-mpv --ytdl-format=}}
++video_player_format=${YTFZF_PLAYER_FORMAT-${video_player_format-@mpv@ --ytdl-format=}}
+ #player to use for audio only
+-audio_player=${YTFZF_AUDIO_PLAYER-${audio_player-mpv --no-video}}
++audio_player=${YTFZF_AUDIO_PLAYER-${audio_player-@mpv@ --no-video}}
+ #the command to use for displaying thumbnails
+ thumb_disp_method=${YTFZF_THUMB_DISP_METHOD-${thumb_disp_method-ueberzug}}
+ #Storing the argument and location for autogenerated subtitles
+@@ -85,8 +85,8 @@ subscriptions_file=${subscriptions_file-$config_dir/subscriptions}
+ #> stores the pid of running ytfzf sessions
+ pid_file="$cache_dir/.pid"
+ #> make folders that don't exist
+-[ -d "$cache_dir" ] || mkdir -p "$cache_dir"
+-[ -d "$thumb_dir" ] || mkdir -p "$thumb_dir"
++[ -d "$cache_dir" ] || @mkdir@ -p "$cache_dir"
++[ -d "$thumb_dir" ] || @mkdir@ -p "$thumb_dir"
+
+ #> config settings
+ #list of shortcuts to use in fzf
+@@ -177,12 +177,12 @@ dep_ck () {
+ done
+ unset Dep
+ }
+-dep_ck "jq" "youtube-dl" "curl"
++dep_ck "@jq@" "@youtube-dl@" "@curl@"
+
+
+ #only check for mpv if $YTFZF_PLAYER is set to it
+ #don't check $YTFZF_PLAYER as it could be multiple commands
+-[ "$video_player" = "mpv" ] && dep_ck "mpv"
++[ "$video_player" = "@mpv@" ] && dep_ck "@mpv@"
+
+ ############################
+ # Help Texts #
+@@ -326,8 +326,8 @@ print_info () {
+ }
+
+ print_error () {
+- [ $ext_menu_notifs -eq 1 ] && notify-send "error" "$*" || printf "\033[31m$*\033[0m" >&2
+- [ $ext_menu_notifs -eq 1 ] && notify-send "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" || printf "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" >&2
++ [ $ext_menu_notifs -eq 1 ] && @notify-send@ "error" "$*" || printf "\033[31m$*\033[0m" >&2
++ [ $ext_menu_notifs -eq 1 ] && @notify-send@ "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" || printf "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" >&2
+ }
+
+ ############################
+@@ -398,12 +398,12 @@ format_fzf () {
+ format_menu () {
+ if [ "$is_ext_menu" -eq 0 ]; then
+ #dep_ck fzf here because it is only necessary to use here
+- dep_ck "fzf"
+- menu_command='column -t -s "$tab_space" | fzf -m --bind change:top --tabstop=1 --layout=reverse --delimiter="$tab_space" --nth=1,2 --expect="$shortcuts" $FZF_DEFAULT_OPTS'
++ dep_ck "@fzf@"
++ menu_command='@column@ -t -s "$tab_space" | @fzf@ -m --bind change:top --tabstop=1 --layout=reverse --delimiter="$tab_space" --nth=1,2 --expect="$shortcuts" $FZF_DEFAULT_OPTS'
+ format_fzf
+ else
+ # Dmenu doesn't render tabs so removing it
+- menu_command='tr -d "$tab_space" | '"$external_menu"
++ menu_command='@tr@ -d "$tab_space" | '"$external_menu"
+ format_ext_menu
+ fi
+ }
+@@ -461,13 +461,13 @@ ID="ytfzf-ueberzug"
+ WIDTH=$FZF_PREVIEW_COLUMNS
+ HEIGHT=$FZF_PREVIEW_LINES
+ start_ueberzug () {
+- [ -e $FIFO ] || { mkfifo "$FIFO" || exit 1 ; }
+- ueberzug layer --parser json --silent < "$FIFO" &
++ [ -e $FIFO ] || { @mkfifo@ "$FIFO" || exit 1 ; }
++ @ueberzug@ layer --parser json --silent < "$FIFO" &
+ exec 3>"$FIFO"
+ }
+ stop_ueberzug () {
+ exec 3>&-
+- rm "$FIFO" > /dev/null 2>&1
++ @rm@ "$FIFO" > /dev/null 2>&1
+ }
+
+ preview_img () {
+@@ -476,12 +476,12 @@ preview_img () {
+ shorturl=${args##*${tab_space}|}
+ shorturl="${shorturl%% *}"
+
+- json_obj=$(printf "%s" "$videos_json" | jq '.[]|select( .videoID == "'"$shorturl"'")')
++ json_obj=$(printf "%s" "$videos_json" | @jq@ '.[]|select( .videoID == "'"$shorturl"'")')
+
+
+ IFS=$tab_space read -r title channel duration views date description <<-EOF
+ $(
+- printf "%s" "$json_obj" | jq -r \
++ printf "%s" "$json_obj" | @jq@ -r \
+ '
+ [.title,.channel,.duration,.views,.date,.description]|@tsv
+ '
+@@ -530,31 +530,31 @@ preview_img () {
+ } > "$FIFO" ;;
+ catimg)
+ printf "\n"
+- catimg -w "$((thumb_width * 2))" "$IMAGE" ;;
++ @catimg@ -w "$((thumb_width * 2))" "$IMAGE" ;;
+ jp2a)
+ printf "\n"
+- jp2a --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=24 "$IMAGE" ;;
++ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=24 "$IMAGE" ;;
+ jp2a-8)
+ printf "\n"
+- jp2a --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=8 "$IMAGE" ;;
++ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=8 "$IMAGE" ;;
+ jp2a-4)
+ printf "\n"
+- jp2a --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=4 "$IMAGE" ;;
++ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=4 "$IMAGE" ;;
+ jp2a-gray|jp2a-grey)
+ printf "\n"
+- jp2a --size="${thumb_width}x$((thumb_height / 2))" "$IMAGE" ;;
++ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" "$IMAGE" ;;
+ chafa)
+ printf "\n"
+- chafa --size="${thumb_width}x${thumb_height}" "$IMAGE" ;;
++ @chafa@ --size="${thumb_width}x${thumb_height}" "$IMAGE" ;;
+ chafa-gray|chafa-grey)
+ printf "\n"
+- chafa --size="${thumb_width}x${thumb_height}" --colors=2 "$IMAGE" ;;
++ @chafa@ --size="${thumb_width}x${thumb_height}" --colors=2 "$IMAGE" ;;
+ chafa-4)
+ printf "\n"
+- chafa --size="${thumb_width}x${thumb_height}" --colors=16 "$IMAGE" ;;
++ @chafa@ --size="${thumb_width}x${thumb_height}" --colors=16 "$IMAGE" ;;
+ chafa-8)
+ printf "\n"
+- chafa --size="${thumb_width}x${thumb_height}" --colors=256 "$IMAGE" ;;
++ @chafa@ --size="${thumb_width}x${thumb_height}" --colors=256 "$IMAGE" ;;
+ custom)
+ if ! function_exists "handle_display_img"; then
+ printf "\033[031mERROR[#07]: \033[0m\033[1mhandle_display_img\033[0m is not defined" >&2
+@@ -585,20 +585,20 @@ download_thumbnails () {
+ if [ "$thumbnail_quality" -eq 1 ]; then
+ image_download () {
+ # higher quality images
+- curl -s "$Url" -G --data-urlencode "sqp=" > "$thumb_dir/$Name.png"
++ @curl@ -s "$Url" -G --data-urlencode "sqp=" > "$thumb_dir/$Name.png"
+ }
+ else
+ image_download () {
+- curl -s "$Url" > "$thumb_dir/$Name.png"
++ @curl@ -s "$Url" > "$thumb_dir/$Name.png"
+ }
+ fi
+
+ print_info "Downloading Thumbnails...\n"
+ thumb_urls=$(printf "%s" "$*" |\
+- jq -r '.[]|[.thumbs,.videoID]|@tsv' )
++ @jq@ -r '.[]|[.thumbs,.videoID]|@tsv' )
+
+ while IFS=$tab_space read -r Url Name; do
+- sleep 0.001
++ @sleep@ 0.001
+ {
+ image_download
+ } &
+@@ -628,7 +628,7 @@ get_sp_filter () {
+ #another example is sort by filter + upload date filter only changes one character as well
+ if [ -n "$filter_id" ]; then
+ #gets the character in the filter_id that needs to be replaced if upload_date_filter is also given
+- upload_date_character=$(printf "%s" "$filter_id" | awk '{print substr($1, 8, 1)}')
++ upload_date_character=$(printf "%s" "$filter_id" | @awk@ '{print substr($1, 8, 1)}')
+ fi
+
+ #For each of these, if upload_date_character is unset, the filter_id should be the normal filter
+@@ -650,7 +650,7 @@ get_sp_filter () {
+ if [ -n "$upload_date_character" ]; then
+ #replaces the 8th character in the filter_id with the appropriate character
+ #the 8th character specifies the upload_date_filter
+- sp=$(printf "%s" "$filter_id" | sed 's/\(.\{7\}\)./\1'"$upload_date_character"'/')
++ sp=$(printf "%s" "$filter_id" | @sed@ 's/\(.\{7\}\)./\1'"$upload_date_character"'/')
+ #otherwise set it to the filter_id
+ else
+ sp=$filter_id
+@@ -660,15 +660,15 @@ get_sp_filter () {
+
+ get_yt_json () {
+ # scrapes the json embedded in the youtube html page
+- printf "%s" "$*" | sed -n '/var *ytInitialData/,$p' | tr -d '\n' |\
+- sed -E ' s_^.*var ytInitialData ?=__ ; s_;</script>.*__ ;'
++ printf "%s" "$*" | @sed@ -n '/var *ytInitialData/,$p' | @tr@ -d '\n' |\
++ @sed@ -E ' s_^.*var ytInitialData ?=__ ; s_;</script>.*__ ;'
+ }
+
+ get_yt_html () {
+ link=$1
+ query=$2
+ printf "%s" "$(
+- curl "$link" -s \
++ @curl@ "$link" -s \
+ -G --data-urlencode "search_query=$query" \
+ -G --data-urlencode "sp=$sp" \
+ -H 'Authority: www.youtube.com' \
+@@ -684,7 +684,7 @@ get_video_data () {
+ # outputs tab and pipe separated fields: title, channel, view count, video length, video upload date, and the video id/url
+ # from the videos_json
+ printf "%s" "$*" |\
+- jq -r '.[]| "\(.title)'"$tab_space"'|\(.channel)'"$tab_space"'|\(.views)'"$tab_space"'|\(.duration)'"$tab_space"'|\(.date)'"$tab_space"'|\(.videoID)"'
++ @jq@ -r '.[]| "\(.title)'"$tab_space"'|\(.channel)'"$tab_space"'|\(.views)'"$tab_space"'|\(.duration)'"$tab_space"'|\(.date)'"$tab_space"'|\(.videoID)"'
+ }
+
+ scrape_channel () {
+@@ -694,7 +694,7 @@ scrape_channel () {
+ channel_url=$*
+
+ # Converting channel title page url to channel video url
+- if ! printf "%s" "$channel_url" | grep -q '/videos *$'; then
++ if ! printf "%s" "$channel_url" | @grep@ -q '/videos *$'; then
+ channel_url=${channel_url%/featured}/videos
+ fi
+
+@@ -706,8 +706,8 @@ scrape_channel () {
+ fi
+
+ #gets the channel name from title of page
+- channel_name=$(printf "%s" "$yt_html" | grep -o '<title>.*</title>' |
+- sed \
++ channel_name=$(printf "%s" "$yt_html" | @grep@ -o '<title>.*</title>' |
++ @sed@ \
+ -e 's/ - YouTube//' \
+ -e 's/<\/\?title>//g' \
+ -e "s/&apos;/'/g" \
+@@ -723,7 +723,7 @@ scrape_channel () {
+
+ #gets a list of videos
+ videos_json=$(printf "%s" "$yt_json" |\
+- jq '[ .contents | ..|.gridVideoRenderer? |
++ @jq@ '[ .contents | ..|.gridVideoRenderer? |
+ select(. !=null) |
+ {
+ title: .title.runs[0].text,
+@@ -736,7 +736,7 @@ scrape_channel () {
+ }
+ ]')
+
+- videos_json=$(printf "%s" "$videos_json" | jq '.[0:'$sub_link_count']')
++ videos_json=$(printf "%s" "$videos_json" | @jq@ '.[0:'$sub_link_count']')
+ printf "%s\n" "$videos_json" >> "$tmp_video_json_file"
+ #checks if it's empty in case it was defined in a config function eg: on_get_search
+ [ -z "$videos_data" ] && videos_data=$(get_video_data "$videos_json")
+@@ -768,11 +768,11 @@ get_trending_url_data () {
+ scrape_pt () {
+ #gets a list of videos
+ pt_json=$(
+- curl \
++ @curl@ \
+ -s "https://sepiasearch.org/api/v1/search/videos" \
+ -G --data-urlencode "search=$*")
+ videos_json=$(printf "%s" "$pt_json" |\
+- jq '[ .data | .[] |
++ @jq@ '[ .data | .[] |
+ {
+ title: .name,
+ channel: .channel.displayName,
+@@ -829,7 +829,7 @@ scrape_yt () {
+ fi
+
+ #gets a list of videos
+- videos_json=$(printf "%s" "$yt_json" | jq '[ .contents|
++ videos_json=$(printf "%s" "$yt_json" | @jq@ '[ .contents|
+ ..|.videoRenderer? |
+ select(. !=null) |
+ {
+@@ -844,7 +844,7 @@ scrape_yt () {
+ }
+ ]')
+
+- playlist_json=$(printf "%s" "$yt_json" | jq '[ .contents|
++ playlist_json=$(printf "%s" "$yt_json" | @jq@ '[ .contents|
+ ..|.playlistRenderer? |
+ select(. !=null) |
+ {
+@@ -904,28 +904,28 @@ get_search_query () {
+ #> To select videos from videos_data
+ user_selection () {
+ #remove subscription separators
+- videos_data_clean=$(printf "%s" "$videos_data" | sed "/.*$tab_space$/d")
++ videos_data_clean=$(printf "%s" "$videos_data" | @sed@ "/.*$tab_space$/d")
+
+ #$selected_data is the video the user picked
+ #picks the first n videos
+ if [ "$select_all" -eq 1 ] ; then
+ selected_data=$videos_data_clean
+ elif [ "$auto_select" -eq 1 ] ; then
+- selected_data=$(printf "%s\n" "$videos_data_clean" | sed "${link_count}"q )
++ selected_data=$(printf "%s\n" "$videos_data_clean" | @sed@ "${link_count}"q )
+ #picks n random videos
+ elif [ "$random_select" -eq 1 ] ; then
+- selected_data=$(printf "%s\n" "$videos_data_clean" | posix_shuf | head -n${link_count})
++ selected_data=$(printf "%s\n" "$videos_data_clean" | posix_shuf | @head@ -n${link_count})
+ #posix_shuf, pick the first $link_count videos
+
+ #show thumbnail menu
+ elif [ "$show_thumbnails" -eq 1 ] ; then
+- dep_ck "ueberzug" "fzf"
++ dep_ck "@ueberzug@" "@fzf@"
+ export YTFZF_THUMB_DISP_METHOD="$thumb_disp_method"
+ [ "$thumb_disp_method" = "ueberzug" ] && start_ueberzug
+ #thumbnails only work in fzf, use fzf
+- menu_command="fzf -m --tabstop=1 --bind change:top --delimiter=\"$tab_space\" \
++ menu_command="@fzf@ -m --tabstop=1 --bind change:top --delimiter=\"$tab_space\" \
+ --nth=1,2 --expect='$shortcuts' $FZF_DEFAULT_OPTS \
+- --layout=reverse --preview \"sh $0 -U {}\" \
++ --layout=reverse --preview \"@sh@ $0 -U {}\" \
+ --preview-window \"$PREVIEW_SIDE:50%:noborder:wrap\""
+ selected_data=$( title_len=200 video_menu "$videos_data" )
+ [ "$thumb_disp_method" = "ueberzug" ] && stop_ueberzug
+@@ -951,10 +951,10 @@ handle_shortcuts () {
+ case $selected_key in
+ "$urls_shortcut") printf "%s\n" $selected_urls; return 1 ;;
+ "$title_shortcut")
+- printf "%s\n" "$selected_data" | awk -F " " '{print $1}'; return 1 ;;
++ printf "%s\n" "$selected_data" | @awk@ -F " " '{print $1}'; return 1 ;;
+ "$open_browser_shortcut")
+ for url in $selected_urls; do
+- nohup $BROWSER "$url" >/dev/null 2>&1
++ @nohup@ $BROWSER "$url" >/dev/null 2>&1
+ done
+ return 1 ;;
+ "$watch_shortcut") is_download=0; is_audio_only=0; return 0;;
+@@ -988,10 +988,10 @@ format_user_selection () {
+ 11) selected_urls=$selected_urls$new_line'https://www.youtube.com/watch?v='$surl ;;
+ 34) selected_urls=$selected_urls$new_line'https://www.youtube.com/playlist?list='$surl ;;
+ 36)
+- selected_urls=$selected_urls$new_line"$(printf "%s" "$videos_json" | jq '.[].url' | grep -F "$surl" | tr -d '"')" ;;
++ selected_urls=$selected_urls$new_line"$(printf "%s" "$videos_json" | @jq@ '.[].url' | @grep@ -F "$surl" | @tr@ -d '"')" ;;
+ *) continue ;;
+ esac
+- refined_selected_data=$refined_selected_data$new_line$(printf '%s' "$videos_data" | grep "|$surl" )
++ refined_selected_data=$refined_selected_data$new_line$(printf '%s' "$videos_data" | @grep@ "|$surl" )
+ done<<-EOF
+ $selected_data
+ EOF
+@@ -1014,9 +1014,9 @@ print_data () {
+ get_video_format () {
+ # select format if flag given
+ [ $show_format -eq 0 ] && return
+- formats=$(youtube-dl -F "$(printf "$selected_urls")")
+- line_number=$(printf "$formats" | grep -n '.*extension resolution.*' | cut -d: -f1)
+- quality=$(printf "$formats \n1 2 xAudio" | awk -v lineno=$line_number 'FNR > lineno {print $3}' | sort -n | awk -F"x" '{print $2 "p"}' | uniq | sed -e "s/Audiop/Audio/" -e "/^p$/d" | eval "$menu_command" | sed "s/p//g")
++ formats=$(@youtube-dl@ -F "$(printf "$selected_urls")")
++ line_number=$(printf "$formats" | @grep@ -n '.*extension resolution.*' | @cut@ -d: -f1)
++ quality=$(printf "$formats \n1 2 xAudio" | @awk@ -v lineno=$line_number 'FNR > lineno {print $3}' | @sort@ -n | @awk@ -F"x" '{print $2 "p"}' | @uniq@ | @sed@ -e "s/Audiop/Audio/" -e "/^p$/d" | eval "$menu_command" | @sed@ "s/p//g")
+ [ -z "$quality" ] && exit;
+ [ $quality = "Audio" ] && video_pref= && video_player="$audio_player" || video_pref="bestvideo[height=?$quality][vcodec!=?vp9]+bestaudio/best"
+
+@@ -1026,9 +1026,9 @@ get_video_format () {
+ get_sub_lang () {
+ if [ $auto_caption -eq 1 ]; then
+ #Gets the auto generated subs and stores them in a file
+- sub_list=$(youtube-dl --list-subs --write-auto-sub "$selected_urls" | sed '/Available subtitles/,$d' | awk '{print $1}' | sed '1d;2d;3d')
++ sub_list=$(@youtube-dl@ --list-subs --write-auto-sub "$selected_urls" | @sed@ '/Available subtitles/,$d' | @awk@ '{print $1}' | @sed@ '1d;2d;3d')
+ if [ -n "$sub_list" ]; then
+- [ -n "$selected_sub" ] || selected_sub=$(printf "$sub_list" | eval "$menu_command") && youtube-dl --sub-lang $selected_sub --write-auto-sub --skip-download "$selected_urls" -o /tmp/ytfzf && YTFZF_SUBT_NAME="--sub-file=/tmp/ytfzf.$selected_sub.vtt" || printf "Auto generated subs not available."
++ [ -n "$selected_sub" ] || selected_sub=$(printf "$sub_list" | eval "$menu_command") && @youtube-dl@ --sub-lang $selected_sub --write-auto-sub --skip-download "$selected_urls" -o /tmp/ytfzf && YTFZF_SUBT_NAME="--sub-file=/tmp/ytfzf.$selected_sub.vtt" || printf "Auto generated subs not available."
+ fi
+ unset sub_list
+ fi
+@@ -1046,10 +1046,10 @@ open_player () {
+ if [ $detach_player -eq 1 ]; then
+ if [ -z "$video_pref" ] || [ $is_audio_only -eq 1 ]; then
+ printf "Opening Player: %s\n" "$video_player $*"
+- setsid -f $video_player "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1
++ @setsid@ -f $video_player "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1
+ else
+ printf "Opening Player: %s\n" "$video_player_format$video_pref $*"
+- setsid -f $video_player_format"$video_pref" "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1
++ @setsid@ -f $video_player_format"$video_pref" "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1
+ fi
+ return
+ fi
+@@ -1064,9 +1064,9 @@ open_player () {
+ fi
+ elif [ $is_download -eq 1 ]; then
+ if [ -z "$video_pref" ]; then
+- youtube-dl "$@" "$YTFZF_SUBT_NAME"
++ @youtube-dl@ "$@" "$YTFZF_SUBT_NAME"
+ else
+- youtube-dl -f "$video_pref" "$@" $YTFZF_SUBT_NAME || video_pref= open_player "$@"
++ @youtube-dl@ -f "$video_pref" "$@" $YTFZF_SUBT_NAME || video_pref= open_player "$@"
+ fi
+ fi
+ }
+@@ -1087,7 +1087,7 @@ play_url () {
+ fi
+
+ #Delete the temp auto-gen subtitle file
+- [ $auto_caption -eq 1 ] && rm -f "${YTFZF_SUBT_NAME#*=}"
++ [ $auto_caption -eq 1 ] && @rm@ -f "${YTFZF_SUBT_NAME#*=}"
+
+ unset player_urls
+ }
+@@ -1102,7 +1102,7 @@ session_is_running () {
+ #> removes tmp files and clutter
+ clean_up () {
+ if ! session_is_running ; then
+- [ -d "$thumb_dir" ] && rm -r "$thumb_dir"
++ [ -d "$thumb_dir" ] && @rm@ -r "$thumb_dir"
+ : > "$pid_file"
+ function_exists "on_exit" && on_exit
+ fi
+@@ -1124,9 +1124,9 @@ save_before_exit () {
+ check_if_url () {
+ # to check if given input is a url
+ url_regex='^https\?://.*'
+- if printf "%s" "$1" | grep -q "$url_regex"; then
++ if printf "%s" "$1" | @grep@ -q "$url_regex"; then
+ is_url=1
+- selected_urls=$(printf "%s" "$1" | tr ' ' '\n')
++ selected_urls=$(printf "%s" "$1" | @tr@ ' ' '\n')
+ scrape="url"
+ else
+ is_url=0
+@@ -1139,10 +1139,10 @@ get_history () {
+ if [ "$enable_hist" -eq 1 ]; then
+ [ -e "$history_file" ] || : > "$history_file"
+ #gets history data in reverse order (makes it most recent to least recent)
+- hist_data=$( sed '1!G; h; $!d' "$history_file" )
++ hist_data=$( @sed@ '1!G; h; $!d' "$history_file" )
+ [ -z "$hist_data" ] && printf "History is empty!\n" >&2 && return 1;
+ #removes duplicate values from $history_data
+- videos_data=$(printf "%s" "$hist_data" | uniq )
++ videos_data=$(printf "%s" "$hist_data" | @uniq@ )
+ [ "$sort_videos_data" -eq 1 ] && videos_data="$(printf "%s" "$videos_data" | sort_video_data_fn)"
+ else
+ printf "History is not enabled. Please enable it to use this option (-H).\n" >&2;
+@@ -1177,10 +1177,10 @@ get_search_history () {
+ if [ "$enable_search_hist" -eq 1 ]; then
+ [ -e "$search_history_file" ] || : > "$search_history_file"
+ #gets history data in reverse order (makes it most recent to least recent)
+- hist_data=$( sed '1!G; h; $!d' "$search_history_file" )
++ hist_data=$( @sed@ '1!G; h; $!d' "$search_history_file" )
+ [ -z "$hist_data" ] && printf "Search history is empty!\n" >&2 && return 1;
+ #removes duplicate values from $history_data
+- search_history=$(printf "%s" "$hist_data" | uniq )
++ search_history=$(printf "%s" "$hist_data" | @uniq@ )
+ else
+ printf "Search history is not enabled. Please enable it to use this option (-q).\n" >&2;
+ exit 1;
+@@ -1190,7 +1190,7 @@ get_search_history () {
+
+ set_search_history () {
+ [ -z "$search_query" ] && return
+- [ $enable_search_hist -eq 1 ] && printf "%s\t%s\n" "$(date '+%Y-%m-%d %H:%M:%S')" "$search_query" >> "$search_history_file" ;
++ [ $enable_search_hist -eq 1 ] && printf "%s\t%s\n" "$(@date@ '+%Y-%m-%d %H:%M:%S')" "$search_query" >> "$search_history_file" ;
+ }
+
+ search_history_menu () {
+@@ -1200,15 +1200,15 @@ search_history_menu () {
+ #when using an external menu, the search history will be done there
+ choice=$( printf "%s\n" "$search_history" | eval "$external_menu" )
+ else
+- choice="$( printf "%s\n" "$search_history" | fzf --prompt="$search_history_prompt" --print-query --no-multi -d '\t' --with-nth=2.. --expect='alt-enter' --bind='tab:replace-query' )"
++ choice="$( printf "%s\n" "$search_history" | @fzf@ --prompt="$search_history_prompt" --print-query --no-multi -d '\t' --with-nth=2.. --expect='alt-enter' --bind='tab:replace-query' )"
+ fi
+
+ # first line is the fzf query (what the user types in fzf)
+ # second line is the fzf --expect key pressed
+ # third line is the search_history selection made
+- query="$( printf "%s" "$choice" | sed -n '1p' )"
+- key="$( printf "%s" "$choice" | sed -n '2p' )"
+- selection="$( printf "%s" "$choice" | sed -n '3p' )"
++ query="$( printf "%s" "$choice" | @sed@ -n '1p' )"
++ key="$( printf "%s" "$choice" | @sed@ -n '2p' )"
++ selection="$( printf "%s" "$choice" | @sed@ -n '3p' )"
+
+ # if no search history selection has been made
+ # and the user typed a query, use that instead
+@@ -1225,7 +1225,7 @@ search_history_menu () {
+ search_query="$query"
+ return;;
+ esac
+- search_query="$( printf "%s" "$selection" | awk -F'\t' '{printf "%s", $NF}' )"
++ search_query="$( printf "%s" "$selection" | @awk@ -F'\t' '{printf "%s", $NF}' )"
+ }
+
+ ! function_exists "send_select_video_notif" && send_select_video_notif () {
+@@ -1244,13 +1244,13 @@ search_history_menu () {
+
+ #if downloading, say Downloading not currently playing
+ [ $is_download -eq 1 ] && title="Downloading" || title="Currently playing"
+- notify-send "$title" "$message" -i "$video_thumb"
++ @notify-send@ "$title" "$message" -i "$video_thumb"
+
+ unset message video_thumb title
+ }
+
+ send_notify () {
+- videos_selected_count=$(printf "%s\n" "$*" | wc -l)
++ videos_selected_count=$(printf "%s\n" "$*" | @wc@ -l)
+ while IFS=$tab_space read -r video_title video_channel video_views video_duration video_date video_shorturl; do
+ send_select_video_notif
+ done << EOF
+@@ -1284,14 +1284,14 @@ if ! function_exists "data_sort_key"; then
+ sort_by="${5#|}"
+ sort_by="${sort_by#Streamed}"
+ #print the data that should be sorted by
+- printf "%d" "$(date -d "${sort_by}" '+%s')"
++ printf "%d" "$(@date@ -d "${sort_by}" '+%s')"
+ unset sort_by
+ }
+ fi
+ #the function to use for sorting
+ if ! function_exists "data_sort_fn"; then
+ data_sort_fn () {
+- sort -nr
++ @sort@ -nr
+ }
+ fi
+ sort_video_data_fn () {
+@@ -1300,7 +1300,7 @@ sort_video_data_fn () {
+ IFS="$tab_space"
+ #run the key function to get the value to sort by
+ printf "%s\t%s\n" "$(data_sort_key $line)" "$line"
+- done | data_sort_fn | cut -f2-
++ done | data_sort_fn | @cut@ -f2-
+ unset IFS line
+ }
+
+@@ -1314,19 +1314,19 @@ scrape_subscriptions () {
+ while IFS= read -r url; do
+ scrape_channel "$url" &
+ done <<-EOF
+- $( sed \
++ $( @sed@ \
+ -e "s/#.*//" \
+ -e "/^[[:space:]]*$/d" \
+ -e "s/[[:space:]]*//g" \
+ "$subscriptions_file")
+ EOF
+ wait
+- videos_json="$(cat "$tmp_video_json_file")"
++ videos_json="$(@cat@ "$tmp_video_json_file")"
+ export videos_json
+ if [ $sort_videos_data -eq 1 ]; then
+ videos_data=$(sort_video_data_fn < "$tmp_video_data_file")
+ else
+- videos_data=$(cat "$tmp_video_data_file")
++ videos_data=$(@cat@ "$tmp_video_data_file")
+ fi
+ }
+
+@@ -1346,11 +1346,11 @@ create_subs () {
+ : > "$config_dir/subscriptions"
+
+ # check how many subscriptions there are in the file
+- sublength=$( jq '. | length' < "$yt_sub_import_file" )
++ sublength=$( @jq@ '. | length' < "$yt_sub_import_file" )
+
+- for i in $(seq $((sublength - 1))); do
+- channelInfo=$(jq --argjson index ${i} '[ "https://www.youtube.com/channel/" + .[$index].snippet.resourceId.channelId + "/videos", "#" + .[$index].snippet.title ]' < "$yt_sub_import_file")
+- printf "%s\n" "$(printf "%s" "$channelInfo" | tr -d '[]"\n,')" >> "$subscriptions_file"
++ for i in $(@seq@ $((sublength - 1))); do
++ channelInfo=$(@jq@ --argjson index ${i} '[ "https://www.youtube.com/channel/" + .[$index].snippet.resourceId.channelId + "/videos", "#" + .[$index].snippet.title ]' < "$yt_sub_import_file")
++ printf "%s\n" "$(printf "%s" "$channelInfo" | @tr@ -d '[]"\n,')" >> "$subscriptions_file"
+ done
+ exit
+ }
+@@ -1367,10 +1367,10 @@ verify_thumb_disp_method () {
+
+ #sort -R is not posix
+ posix_shuf () {
+- awk -F '\n' '
++ @awk@ -F '\n' '
+ BEGIN {srand()} #set the random seed at the start
+ {print rand() " " $0} #prepend a random number for each line' |\
+- sort | sed -E 's/[^ ]* //'
++ @sort@ | @sed@ -E 's/[^ ]* //'
+ #sort by the random numbers, remove the random number
+ }
+
+@@ -1486,8 +1486,8 @@ parse_opt () {
+ exit ;;
+ version)
+ printf "\033[1mytfzf:\033[0m %s\n" "$YTFZF_VERSION"
+- printf "\033[1myoutube-dl:\033[0m %s\n" "$(youtube-dl --version)"
+- command -v "fzf" 1>/dev/null && printf "\033[1mfzf:\033[0m %s\n" "$(fzf --version)"
++ printf "\033[1myoutube-dl:\033[0m %s\n" "$(@youtube-dl@ --version)"
++ command -v "@fzf@" 1>/dev/null && printf "\033[1mfzf:\033[0m %s\n" "$(@fzf@ --version)"
+ exit ;;
+
+ subt)
+@@ -1559,19 +1559,19 @@ done
+ shift $((OPTIND-1))
+
+ #only apply to ext_menu since they dont have a terminal to print to
+-[ $is_ext_menu -eq 1 ] && command -v notify-send 1>/dev/null 2>&1 && ext_menu_notifs=1 || ext_menu_notifs=0
++[ $is_ext_menu -eq 1 ] && command -v @notify-send@ 1>/dev/null 2>&1 && ext_menu_notifs=1 || ext_menu_notifs=0
+
+ #used for thumbnail previews in ueberzug
+ if [ $is_ext_menu -eq 0 ]; then
+- export TTY_LINES=$(tput lines)
+- export TTY_COLS=$(tput cols)
++ export TTY_LINES=$(@tput@ lines)
++ export TTY_COLS=$(@tput@ cols)
+ fi
+
+ #if both are true, it defaults to using fzf, and if fzf isnt installed it will throw an error
+ #so print this error instead and set $show_thumbnails to 0
+ if [ $is_ext_menu -eq 1 ] && [ $show_thumbnails -eq 1 ]; then
+ [ $ext_menu_notifs -eq 1 ] &&\
+- notify-send "warning" "Currently thumbnails do not work in external menus" ||\
++ @notify-send@ "warning" "Currently thumbnails do not work in external menus" ||\
+ printf "\033[33mWARNING: Currently thumbnails do not work in external menus\033[0m\n" >&2
+ show_thumbnails=0
+ fi
+--
+2.32.0
+
diff --git a/gnu/packages/patches/ytfzf-updates.patch b/gnu/packages/patches/ytfzf-updates.patch
new file mode 100644
index 0000000000..40e7c138b0
--- /dev/null
+++ b/gnu/packages/patches/ytfzf-updates.patch
@@ -0,0 +1,44 @@
+From ceb6836cd31653267506957cd0ccf78046404d3b Mon Sep 17 00:00:00 2001
+From: Raghav Gururajan <rg@raghavgururajan.name>
+Date: Mon, 5 Jul 2021 06:47:38 -0400
+Subject: [PATCH 2/2] Disable updates within the application.
+
+Patch the code responsible for self-updating the application.
+
+Co-authored-by: jgart <jgart@dismail.de>
+---
+ ytfzf | 18 ++----------------
+ 1 file changed, 2 insertions(+), 16 deletions(-)
+
+diff --git a/ytfzf b/ytfzf
+index f0f2e16..2d1bb2e 100755
+--- a/ytfzf
++++ b/ytfzf
+@@ -1260,22 +1260,8 @@ EOF
+ }
+
+ update_ytfzf () {
+- branch="$1"
+- updatefile="/tmp/ytfzf-update"
+- curl -L "https://raw.githubusercontent.com/pystardust/ytfzf/$branch/ytfzf" -o "$updatefile"
+-
+- if sed -n '1p' < "$updatefile" | grep -q '#!/bin/sh'; then
+- chmod 755 "$updatefile"
+- [ "$(uname)" = "Darwin" ] && prefix="/usr/local/bin" || prefix="/usr/bin"
+- function_exists "sudo" && doasroot="sudo" || doasroot="doas"
+- $doasroot cp "$updatefile" "$prefix/ytfzf"
+- unset prefix doasroot
+- else
+- printf "%bFailed to update ytfzf. Try again later.%b" "$c_red" "$c_reset"
+- fi
+-
+- rm "$updatefile"
+- exit 0
++ printf "%bUpdates have to be installed with Guix.%b\n" "$c_red" "$c_reset"
++ exit 1
+ }
+
+ #gives a value to sort by (this will give the unix time the video was uploaded)
+--
+2.32.0
+