diff options
author | Luke Smith <luke@lukesmith.xyz> | 2020-11-30 15:23:39 -0500 |
---|---|---|
committer | Luke Smith <luke@lukesmith.xyz> | 2020-11-30 15:23:39 -0500 |
commit | 1faf5cbc0b8d5deef74e4bc82b769194b738aa11 (patch) | |
tree | 65e70b1f0c8c0717f1c74858cf8d97410f537052 | |
parent | 8ab3d03681479263a11b05f7f1b53157f61e8c3b (diff) | |
parent | dcaad2ceba5b8fe5d65875da29a2eb08f5ea5b42 (diff) |
Merge branch 'narukeh-master'
-rw-r--r-- | config.h | 8 | ||||
-rwxr-xr-x | st-urlhandler | 19 |
2 files changed, 13 insertions, 14 deletions
@@ -234,12 +234,8 @@ MouseKey mkeys[] = { { Button5, TERMMOD, zoom, {.f = -1} }, }; -static char *openurlcmd[] = { "/bin/sh", "-c", "st-urlhandler", "externalpipe", NULL }; - -static char *copyurlcmd[] = { "/bin/sh", "-c", - "tmp=$(sed 's/.*│//g' | tr -d '\n' | grep -aEo '(((http|https|gopher|gemini|ftp|ftps|git)://|www\\.)[a-zA-Z0-9.]*[:]?[a-zA-Z0-9./@$&%?$#=_-~]*)|((magnet:\\?xt=urn:btih:)[a-zA-Z0-9]*)' | uniq | sed 's/^www./http:\\/\\/www\\./g' ); IFS=; [ ! -z $tmp ] && echo $tmp | dmenu -i -p 'Copy which url?' -l 10 | tr -d '\n' | xclip -selection clipboard", - "externalpipe", NULL }; - +static char *openurlcmd[] = { "/bin/sh", "-c", "st-urlhandler -o", "externalpipe", NULL }; +static char *copyurlcmd[] = { "/bin/sh", "-c", "st-urlhandler -c", "externalpipe", NULL }; static char *copyoutput[] = { "/bin/sh", "-c", "st-copyout", "externalpipe", NULL }; static Shortcut shortcuts[] = { diff --git a/st-urlhandler b/st-urlhandler index 0d39dd5..b8f9787 100755 --- a/st-urlhandler +++ b/st-urlhandler @@ -1,15 +1,18 @@ #!/bin/sh -urlregex="(((http|https)://|www\\.)[a-zA-Z0-9.]*[:]?[a-zA-Z0-9./@$&%?$\#=_~-]*)|((magnet:\\?xt=urn:btih:)[a-zA-Z0-9]*)" +urlregex="(((http|https|gopher|gemini|ftp|ftps|git)://|www\\.)[a-zA-Z0-9.]*[:]?[a-zA-Z0-9./@$&%?$\#=_~-]*)|((magnet:\\?xt=urn:btih:)[a-zA-Z0-9]*)" -# First remove linebreaks and mutt sidebars: -urls="$(sed 's/.*│//g' | tr -d '\n' | +urls="$(sed 's/.*│//g' | tr -d '\n' | # First remove linebreaks and mutt sidebars: grep -aEo "$urlregex" | # grep only urls as defined above. uniq | # Ignore neighboring duplicates. - sed 's/^www./http:\/\/www\./g')" + sed 's/^www./http:\/\/www\./g')" # xdg-open will not detect url without http:// -[ -z "$urls" ] && exit +[ -z "$urls" ] && exit 1 -chosen="$(echo "$urls" | dmenu -i -p 'Follow which url?' -l 10)" - -setsid xdg-open "$chosen" >/dev/null 2>&1 & +while getopts "hoc" o; do case "${o}" in + h) printf "Optional arguments for custom use:\\n -c: copy\\n -o: xdg-open\\n -h: Show this message\\n" && exit 1 ;; + o) chosen="$(echo "$urls" | dmenu -i -p 'Follow which url?' -l 10)" + setsid xdg-open "$chosen" >/dev/null 2>&1 & ;; + c) echo "$urls" | dmenu -i -p 'Copy which url?' -l 10 | tr -d '\n' | xclip -selection clipboard ;; + *) printf "Invalid option: -%s\\n" "$OPTARG" && exit 1 ;; +esac done |