URI:
       [st][patches][copyurl] add a highlight+forward loop patch - sites - public wiki contents of suckless.org
  HTML git clone git://git.suckless.org/sites
   DIR Log
   DIR Files
   DIR Refs
       ---
   DIR commit bb7f7f2aa382153f36bca70164565cdb26bb3d0c
   DIR parent 113f54e895b422fe88828357df395b6fe2c7a4b7
  HTML Author: Coral Pink <coral.pink@disr.it>
       Date:   Fri, 26 Dec 2025 00:28:28 +0100
       
       [st][patches][copyurl] add a highlight+forward loop patch
       
       Patch fixing two bugs in a previous highlight patch 183019411
       (st-copyurl-20230406-211964d.diff), rebased to version 0.9.3.
       
       Diffstat:
         M st.suckless.org/patches/copyurl/in… |       1 +
         A st.suckless.org/patches/copyurl/st… |     189 +++++++++++++++++++++++++++++++
       
       2 files changed, 190 insertions(+), 0 deletions(-)
       ---
   DIR diff --git a/st.suckless.org/patches/copyurl/index.md b/st.suckless.org/patches/copyurl/index.md
       @@ -24,6 +24,7 @@ Following patches also highlight the selected urls:
        * [st-copyurl-0.8.4.diff](st-copyurl-0.8.4.diff)
        * [st-copyurl-20220221-0.8.5.diff](st-copyurl-20220221-0.8.5.diff)
        * [st-copyurl-20230406-211964d.diff](st-copyurl-20230406-211964d.diff)
       +* [st-copyurl-0.9.3.diff](st-copyurl-0.9.3.diff)
        
        Following patches also allow urls that span multiple lines:
        
   DIR diff --git a/st.suckless.org/patches/copyurl/st-copyurl-0.9.3.diff b/st.suckless.org/patches/copyurl/st-copyurl-0.9.3.diff
       @@ -0,0 +1,189 @@
       +From 35ae18e5c351739b2d80acd0d77db7856caf2596 Mon Sep 17 00:00:00 2001
       +From: Coral Pink <coral.pink@disr.it>
       +Date: Fri, 26 Dec 2025 00:11:31 +0100
       +Subject: [PATCH] highlight & loop through urls in both directions
       +
       +Based on a previous patch (st-copyurl-20230406-211964d.diff).
       +This patch fixes two bugs when looping forward:
       +1. URLs on the same line iterated the same way as when looping backward
       +   (right-to-left). Now it's left-to-right.
       +2. When an URL in the first row was selected, looping forward didn't
       +   work, it just got stuck there.
       +
       +Co-authored-by: Gildasio Junior <gildasiojunior@riseup.net>
       +---
       + config.def.h |   2 +
       + st.c         | 127 +++++++++++++++++++++++++++++++++++++++++++++++++++
       + st.h         |   1 +
       + 3 files changed, 130 insertions(+)
       +
       +diff --git a/config.def.h b/config.def.h
       +index 2cd740a..f2d8d77 100644
       +--- a/config.def.h
       ++++ b/config.def.h
       +@@ -201,6 +201,8 @@ static Shortcut shortcuts[] = {
       +         { TERMMOD,              XK_Y,           selpaste,       {.i =  0} },
       +         { ShiftMask,            XK_Insert,      selpaste,       {.i =  0} },
       +         { TERMMOD,              XK_Num_Lock,    numlock,        {.i =  0} },
       ++        { MODKEY,               XK_l,           copyurl,        {.i =  0} },
       ++        { MODKEY|ShiftMask,     XK_L,           copyurl,        {.i =  1} }
       + };
       + 
       + /*
       +diff --git a/st.c b/st.c
       +index 8e57991..10dd9d5 100644
       +--- a/st.c
       ++++ b/st.c
       +@@ -201,6 +201,8 @@ static void tdefutf8(char);
       + static int32_t tdefcolor(const int *, int *, int);
       + static void tdeftran(char);
       + static void tstrsequence(uchar);
       ++static const char *findlastany(const char *, const char**, size_t);
       ++static const char *findfirstany(const char *, const char**, size_t);
       + 
       + static void drawregion(int, int, int, int);
       + 
       +@@ -2699,3 +2701,128 @@ redraw(void)
       +         tfulldirt();
       +         draw();
       + }
       ++
       ++const char *
       ++findlastany(const char *str, const char**find, size_t len)
       ++{
       ++        const char *found = NULL;
       ++        int i = 0;
       ++
       ++        for (found = str + strlen(str) - 1; found >= str; --found) {
       ++                for(i = 0; i < len; i++) {
       ++                        if (strncmp(found, find[i], strlen(find[i])) == 0) {
       ++                                return found;
       ++                        }
       ++                }
       ++        }
       ++
       ++        return NULL;
       ++}
       ++
       ++const char *
       ++findfirstany(const char *str, const char**find, size_t len)
       ++{
       ++        const char *found = NULL;
       ++        int i = 0;
       ++
       ++        for (found = str; found < str + strlen(str); ++found) {
       ++                for(i = 0; i < len; i++) {
       ++                        if (strncmp(found, find[i], strlen(find[i])) == 0) {
       ++                                return found;
       ++                        }
       ++                }
       ++        }
       ++
       ++        return NULL;
       ++}
       ++
       ++/*
       ++ * Select and copy the previous url on screen (do nothing if there's no url).
       ++ *
       ++ * FIXME: doesn't handle urls that span multiple lines; will need to add support
       ++ *        for multiline "getsel()" first
       ++ */
       ++void
       ++copyurl(const Arg *arg) {
       ++        /*
       ++         * () and [] can appear in urls, but excluding them here will reduce false
       ++         * positives when figuring out where a given url ends.
       ++         */
       ++        static const char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
       ++                "abcdefghijklmnopqrstuvwxyz"
       ++                "0123456789-._~:/?#@!$&'*+,;=%";
       ++
       ++        static const char* URLSTRINGS[] = {"http://", "https://"};
       ++
       ++        int row = 0, /* row of current URL */
       ++                col = 0, /* index of current URL start */
       ++                colend = 0, /* column of last occurrence */
       ++                passes = 0; /* how many rows have been scanned */
       ++
       ++        char linestr[term.col + 1];
       ++        const char *c = NULL,
       ++                 *match = NULL;
       ++
       ++        /*
       ++         * arg->i = 0 --> botton-up
       ++         * arg->i = 1 --> top-down
       ++         */
       ++        row = sel.ob.x == -1 ? term.bot : arg->i ? sel.ne.y : sel.nb.y;
       ++        LIMIT(row, term.top, term.bot);
       ++
       ++        colend = sel.ob.x == -1 ? term.col : arg->i ? sel.ne.x : sel.nb.x;
       ++        LIMIT(colend, 0, term.col);
       ++
       ++        /*
       ++         * Scan from (term.row - 1,term.col - 1) to (0,0) (or in reverse,
       ++         * when arg->i = 1) and find previous/next occurrance of a URL.
       ++         */
       ++        for (passes = 0; passes < term.row; passes++) {
       ++                if (!arg->i) {
       ++                        /*
       ++                         * Read in each column of every row until
       ++                         * we hit previous occurrence of URL.
       ++                         */
       ++                        for (col = 0; col < colend; ++col)
       ++                                linestr[col] = term.line[row][col].u < 128
       ++                                        ? term.line[row][col].u : ' ';
       ++                        linestr[col] = '\0';
       ++
       ++                        if ((match = findlastany(linestr, URLSTRINGS,
       ++                                                sizeof(URLSTRINGS)/sizeof(URLSTRINGS[0]))))
       ++                                break;
       ++
       ++                        if (--row < 0)
       ++                                row = term.row - 1;
       ++
       ++                        colend = term.col;
       ++                } else {
       ++                        /*
       ++                         * Read in each column of every row until
       ++                         * we hit next occurrence of URL.
       ++                         */
       ++                        for (col = colend + 1; col < term.col; ++col)
       ++                                linestr[col] = term.line[row][col].u < 128
       ++                                        ? term.line[row][col].u : ' ';
       ++                        linestr[col] = '\0';
       ++
       ++                        if ((match = findfirstany(linestr + colend + 1, URLSTRINGS,
       ++                                                sizeof(URLSTRINGS)/sizeof(URLSTRINGS[0]))))
       ++                                break;
       ++
       ++                        if (++row >= term.row)
       ++                                row = 0;
       ++
       ++                        colend = -1;
       ++                }
       ++        }
       ++
       ++        if (match) {
       ++                size_t l = strspn(match, URLCHARS);
       ++                selstart(match - linestr, row, 0);
       ++                selextend(match - linestr + l - 1, row, SEL_REGULAR, 0);
       ++                selextend(match - linestr + l - 1, row, SEL_REGULAR, 1);
       ++                xsetsel(getsel());
       ++                xclipcopy();
       ++        }
       ++}
       +diff --git a/st.h b/st.h
       +index fd3b0d8..baa8f29 100644
       +--- a/st.h
       ++++ b/st.h
       +@@ -85,6 +85,7 @@ void printscreen(const Arg *);
       + void printsel(const Arg *);
       + void sendbreak(const Arg *);
       + void toggleprinter(const Arg *);
       ++void copyurl(const Arg *);
       + 
       + int tattrset(int);
       + void tnew(int, int);
       +-- 
       +2.51.2
       +