summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorzachir <zachir@librem.one>2023-02-20 01:55:04 -0600
committerzachir <zachir@librem.one>2023-02-20 01:55:04 -0600
commit3f50f3d208c6534011a319b0cecc129718d89cad (patch)
treeb716eba5b24c2f279ce169c7778af232de0fe088
parente37e594a2a75e965c353dd541385208d03f2b4bc (diff)
download st-copyurl patches
-rw-r--r--patches/st-copyurl-20190202-0.8.1.diff109
-rw-r--r--patches/st-copyurl-20220221-0.8.5.diff137
-rw-r--r--patches/st-copyurl-multiline-20220221-0.8.5.diff156
3 files changed, 402 insertions, 0 deletions
diff --git a/patches/st-copyurl-20190202-0.8.1.diff b/patches/st-copyurl-20190202-0.8.1.diff
new file mode 100644
index 0000000..8d6782b
--- /dev/null
+++ b/patches/st-copyurl-20190202-0.8.1.diff
@@ -0,0 +1,109 @@
+From be408247f1c1ff8ccf7ab128b126f54d19bd4392 Mon Sep 17 00:00:00 2001
+From: Michael Buch <michaelbuch12@gmail.com>
+Date: Sat, 2 Feb 2019 14:20:52 +0000
+Subject: [PATCH] Port the copyurl patch to the 0.8.1 st release. Mainly fix
+ usage of depracted selcopy
+
+---
+ config.def.h | 1 +
+ st.c | 62 ++++++++++++++++++++++++++++++++++++++++++++++++++++
+ st.h | 1 +
+ 3 files changed, 64 insertions(+)
+
+diff --git a/config.def.h b/config.def.h
+index 82b1b09..cbe923e 100644
+--- a/config.def.h
++++ b/config.def.h
+@@ -178,6 +178,7 @@ static Shortcut shortcuts[] = {
+ { TERMMOD, XK_Y, selpaste, {.i = 0} },
+ { TERMMOD, XK_Num_Lock, numlock, {.i = 0} },
+ { TERMMOD, XK_I, iso14755, {.i = 0} },
++ { MODKEY, XK_l, copyurl, {.i = 0} },
+ };
+
+ /*
+diff --git a/st.c b/st.c
+index 46c954b..476eb31 100644
+--- a/st.c
++++ b/st.c
+@@ -2616,3 +2616,65 @@ redraw(void)
+ tfulldirt();
+ draw();
+ }
++
++/* select and copy the previous url on screen (do nothing if there's no url).
++ * known bug: doesn't handle urls that span multiple lines (wontfix)
++ * known bug: only finds first url on line (mightfix)
++ */
++void
++copyurl(const Arg *arg) {
++ /* () and [] can appear in urls, but excluding them here will reduce false
++ * positives when figuring out where a given url ends.
++ */
++ static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
++ "abcdefghijklmnopqrstuvwxyz"
++ "0123456789-._~:/?#@!$&'*+,;=%";
++
++ int i, row, startrow;
++ char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */
++ char *c, *match = NULL;
++
++ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y-1 : term.bot;
++ LIMIT(row, term.top, term.bot);
++ startrow = row;
++
++ /* find the start of the last url before selection */
++ do {
++ for (i = 0; i < term.col; ++i) {
++ if (term.line[row][i].u > 127) /* assume ascii */
++ continue;
++ linestr[i] = term.line[row][i].u;
++ }
++ linestr[term.col] = '\0';
++ if ((match = strstr(linestr, "http://"))
++ || (match = strstr(linestr, "https://")))
++ break;
++ if (--row < term.top)
++ row = term.bot;
++ } while (row != startrow);
++
++ if (match) {
++ /* must happen before trim */
++ selclear();
++ sel.ob.x = strlen(linestr) - strlen(match);
++
++ /* trim the rest of the line from the url match */
++ for (c = match; *c != '\0'; ++c)
++ if (!strchr(URLCHARS, *c)) {
++ *c = '\0';
++ break;
++ }
++
++ /* select and copy */
++ sel.mode = 1;
++ sel.type = SEL_REGULAR;
++ sel.oe.x = sel.ob.x + strlen(match)-1;
++ sel.ob.y = sel.oe.y = row;
++ selnormalize();
++ tsetdirt(sel.nb.y, sel.ne.y);
++ xsetsel(getsel());
++ xclipcopy();
++ }
++
++ free(linestr);
++}
+diff --git a/st.h b/st.h
+index dac64d8..5a58f8f 100644
+--- a/st.h
++++ b/st.h
+@@ -85,6 +85,7 @@ void printscreen(const Arg *);
+ void printsel(const Arg *);
+ void sendbreak(const Arg *);
+ void toggleprinter(const Arg *);
++void copyurl(const Arg *);
+
+ int tattrset(int);
+ void tnew(int, int);
+--
+2.20.1
+
diff --git a/patches/st-copyurl-20220221-0.8.5.diff b/patches/st-copyurl-20220221-0.8.5.diff
new file mode 100644
index 0000000..5d914c4
--- /dev/null
+++ b/patches/st-copyurl-20220221-0.8.5.diff
@@ -0,0 +1,137 @@
+From 897c3958d01d8df80ebf1666b972b8a658b419ba Mon Sep 17 00:00:00 2001
+From: Santtu Lakkala <inz@inz.fi>
+Date: Wed, 16 Feb 2022 20:34:20 +0200
+Subject: [PATCH] Loop through urls on screen and copy to clipboard
+
+Based on the previous highlighting patches, slightly simplified and
+fixes graphical issues with mixed copyurl and selection.
+---
+ config.def.h | 1 +
+ st.c | 82 ++++++++++++++++++++++++++++++++++++++++++++++++++++
+ st.h | 1 +
+ 3 files changed, 84 insertions(+)
+
+diff --git a/config.def.h b/config.def.h
+index 91ab8ca..3f365c7 100644
+--- a/config.def.h
++++ b/config.def.h
+@@ -201,6 +201,7 @@ static Shortcut shortcuts[] = {
+ { TERMMOD, XK_Y, selpaste, {.i = 0} },
+ { ShiftMask, XK_Insert, selpaste, {.i = 0} },
+ { TERMMOD, XK_Num_Lock, numlock, {.i = 0} },
++ { MODKEY, XK_l, copyurl, {.i = 0} },
+ };
+
+ /*
+diff --git a/st.c b/st.c
+index 51049ba..bf3d81a 100644
+--- a/st.c
++++ b/st.c
+@@ -200,6 +200,7 @@ static void tdefutf8(char);
+ static int32_t tdefcolor(const int *, int *, int);
+ static void tdeftran(char);
+ static void tstrsequence(uchar);
++static const char *findlastany(const char *, const char**, size_t);
+
+ static void drawregion(int, int, int, int);
+
+@@ -2688,3 +2689,84 @@ redraw(void)
+ tfulldirt();
+ draw();
+ }
++
++const char *
++findlastany(const char *str, const char**find, size_t len)
++{
++ const char *found = NULL;
++ int i = 0;
++
++ for (found = str + strlen(str) - 1; found >= str; --found) {
++ for(i = 0; i < len; i++) {
++ if (strncmp(found, find[i], strlen(find[i])) == 0) {
++ return found;
++ }
++ }
++ }
++
++ return NULL;
++}
++
++/*
++** Select and copy the previous url on screen (do nothing if there's no url).
++**
++** FIXME: doesn't handle urls that span multiple lines; will need to add support
++** for multiline "getsel()" first
++*/
++void
++copyurl(const Arg *arg) {
++ /* () and [] can appear in urls, but excluding them here will reduce false
++ * positives when figuring out where a given url ends.
++ */
++ static const char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
++ "abcdefghijklmnopqrstuvwxyz"
++ "0123456789-._~:/?#@!$&'*+,;=%";
++
++ static const char* URLSTRINGS[] = {"http://", "https://"};
++
++ int row = 0, /* row of current URL */
++ col = 0, /* column of current URL start */
++ colend = 0, /* column of last occurrence */
++ passes = 0; /* how many rows have been scanned */
++
++ char linestr[term.col + 1];
++ const char *c = NULL,
++ *match = NULL;
++
++ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot;
++ LIMIT(row, term.top, term.bot);
++
++ colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col;
++ LIMIT(colend, 0, term.col);
++
++ /*
++ ** Scan from (term.row - 1,term.col - 1) to (0,0) and find
++ ** next occurrance of a URL
++ */
++ for (passes = 0; passes < term.row; passes++) {
++ /* Read in each column of every row until
++ ** we hit previous occurrence of URL
++ */
++ for (col = 0; col < colend; ++col)
++ linestr[col] = term.line[row][col].u < 128 ? term.line[row][col].u : ' ';
++ linestr[col] = '\0';
++
++ if ((match = findlastany(linestr, URLSTRINGS,
++ sizeof(URLSTRINGS)/sizeof(URLSTRINGS[0]))))
++ break;
++
++ if (--row < 0)
++ row = term.row - 1;
++
++ colend = term.col;
++ };
++
++ if (match) {
++ size_t l = strspn(match, URLCHARS);
++ selstart(match - linestr, row, 0);
++ selextend(match - linestr + l - 1, row, SEL_REGULAR, 0);
++ selextend(match - linestr + l - 1, row, SEL_REGULAR, 1);
++ xsetsel(getsel());
++ xclipcopy();
++ }
++}
+diff --git a/st.h b/st.h
+index 519b9bd..0458005 100644
+--- a/st.h
++++ b/st.h
+@@ -85,6 +85,7 @@ void printscreen(const Arg *);
+ void printsel(const Arg *);
+ void sendbreak(const Arg *);
+ void toggleprinter(const Arg *);
++void copyurl(const Arg *);
+
+ int tattrset(int);
+ void tnew(int, int);
+--
+2.32.0
+
diff --git a/patches/st-copyurl-multiline-20220221-0.8.5.diff b/patches/st-copyurl-multiline-20220221-0.8.5.diff
new file mode 100644
index 0000000..7cd26e2
--- /dev/null
+++ b/patches/st-copyurl-multiline-20220221-0.8.5.diff
@@ -0,0 +1,156 @@
+From 30a04d9ecb3998953bdbe42e5617d00d6002869b Mon Sep 17 00:00:00 2001
+From: Santtu Lakkala <inz@inz.fi>
+Date: Wed, 16 Feb 2022 20:34:20 +0200
+Subject: [PATCH] Loop through urls on screen and copy to clipboard
+
+Replace url detection heuristics with a DFA, enabling urls that span
+multiple lines. Also fix the selection not to use snapping so that urls
+are selected exactly.
+---
+ config.def.h | 1 +
+ st.c | 93 ++++++++++++++++++++++++++++++++++++++++++++++++++++
+ st.h | 1 +
+ 3 files changed, 95 insertions(+)
+
+diff --git a/config.def.h b/config.def.h
+index 91ab8ca..3f365c7 100644
+--- a/config.def.h
++++ b/config.def.h
+@@ -201,6 +201,7 @@ static Shortcut shortcuts[] = {
+ { TERMMOD, XK_Y, selpaste, {.i = 0} },
+ { ShiftMask, XK_Insert, selpaste, {.i = 0} },
+ { TERMMOD, XK_Num_Lock, numlock, {.i = 0} },
++ { MODKEY, XK_l, copyurl, {.i = 0} },
+ };
+
+ /*
+diff --git a/st.c b/st.c
+index 51049ba..5b6d919 100644
+--- a/st.c
++++ b/st.c
+@@ -152,6 +152,11 @@ typedef struct {
+ int narg; /* nb of args */
+ } STREscape;
+
++typedef struct {
++ int state;
++ size_t length;
++} URLdfa;
++
+ static void execsh(char *, char **);
+ static void stty(char **);
+ static void sigchld(int);
+@@ -200,6 +205,7 @@ static void tdefutf8(char);
+ static int32_t tdefcolor(const int *, int *, int);
+ static void tdeftran(char);
+ static void tstrsequence(uchar);
++static int daddch(URLdfa *, char);
+
+ static void drawregion(int, int, int, int);
+
+@@ -2688,3 +2694,90 @@ redraw(void)
+ tfulldirt();
+ draw();
+ }
++
++int
++daddch(URLdfa *dfa, char c)
++{
++ /* () and [] can appear in urls, but excluding them here will reduce false
++ * positives when figuring out where a given url ends.
++ */
++ static const char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
++ "abcdefghijklmnopqrstuvwxyz"
++ "0123456789-._~:/?#@!$&'*+,;=%";
++ static const char RPFX[] = "//:sptth";
++
++ if (!strchr(URLCHARS, c)) {
++ dfa->length = 0;
++ dfa->state = 0;
++
++ return 0;
++ }
++
++ dfa->length++;
++
++ if (dfa->state == 2 && c == '/') {
++ dfa->state = 0;
++ } else if (dfa->state == 3 && c == 'p') {
++ dfa->state++;
++ } else if (c != RPFX[dfa->state]) {
++ dfa->state = 0;
++ return 0;
++ }
++
++ if (dfa->state++ == 7) {
++ dfa->state = 0;
++ return 1;
++ }
++
++ return 0;
++}
++
++/*
++** Select and copy the previous url on screen (do nothing if there's no url).
++*/
++void
++copyurl(const Arg *arg) {
++ int row = 0, /* row of current URL */
++ col = 0, /* column of current URL start */
++ colend = 0, /* column of last occurrence */
++ passes = 0; /* how many rows have been scanned */
++
++ const char *c = NULL,
++ *match = NULL;
++ URLdfa dfa = { 0 };
++
++ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot;
++ LIMIT(row, term.top, term.bot);
++
++ colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col;
++ LIMIT(colend, 0, term.col);
++
++ /*
++ ** Scan from (term.row - 1,term.col - 1) to (0,0) and find
++ ** next occurrance of a URL
++ */
++ for (passes = 0; passes < term.row; passes++) {
++ /* Read in each column of every row until
++ ** we hit previous occurrence of URL
++ */
++ for (col = colend; col--;)
++ if (daddch(&dfa, term.line[row][col].u < 128 ? term.line[row][col].u : ' '))
++ break;
++
++ if (col >= 0)
++ break;
++
++ if (--row < 0)
++ row = term.row - 1;
++
++ colend = term.col;
++ }
++
++ if (passes < term.row) {
++ selstart(col, row, 0);
++ selextend((col + dfa.length - 1) % term.col, row + (col + dfa.length - 1) / term.col, SEL_REGULAR, 0);
++ selextend((col + dfa.length - 1) % term.col, row + (col + dfa.length - 1) / term.col, SEL_REGULAR, 1);
++ xsetsel(getsel());
++ xclipcopy();
++ }
++}
+diff --git a/st.h b/st.h
+index 519b9bd..0458005 100644
+--- a/st.h
++++ b/st.h
+@@ -85,6 +85,7 @@ void printscreen(const Arg *);
+ void printsel(const Arg *);
+ void sendbreak(const Arg *);
+ void toggleprinter(const Arg *);
++void copyurl(const Arg *);
+
+ int tattrset(int);
+ void tnew(int, int);
+--
+2.32.0
+