Permalink
Browse files

watch mode

  • Loading branch information...
neeasade committed Feb 1, 2019
1 parent ea37143 commit e8badcab30c8c4d2bd700443645280dce4d14059
Showing with 29 additions and 17 deletions.
  1. +29 −17 bin/bin/reese
@@ -1,41 +1,53 @@
#!/bin/sh
# clone/inspo: https://github.com/enjuus/chandler
# I have stepbrother named chandler, who goes by reese.
# depends on: xclip, awk, https://github.com/ericchiang/pup
# depends on: curl, sed, awk, https://github.com/ericchiang/pup, xclip(optional)

url=""
target="$HOME/4ch/{BOARD}/{THREAD}"
# valid templates: BOARD, THREADID, THREAD (which falls back to THREADID)
target="$HOME/4ch/{BOARD}/{THREADID}"
watch_mode=false

while getopts "s:d:h" opt; do
while getopts "s:d:wh" opt; do
case ${opt} in
d) target="$OPTARG" ;;
s) url="$OPTARG" ;;
w) watch_mode=true ;;
h) echo "read it"; exit ;;
esac
done

# fallback to clipboard if -s not specified
[ -z "$url" ] && url="$(xclip -o -selection clipboard)"
echo "url: $url"
page="$(curl -s $url)"
[ $? -ne 0 ] && echo "invalid url" && exit 1

# eg http://boards.4chan.org/wg/thread/7354127
board=$(echo "$url" | awk '{split($0,a,"/"); print a[4]}')
thread_id=$(echo "$url" | awk '{split($0,a,"/"); print a[6]}' | sed 's/#.*//')
page="$(curl -s $url)"
[ $? -ne 0 ] && echo "invalid url" && exit

thread="$(echo "$page" | pup '.board .subject:first-of-type text{}')"
[ -z "$thread" ] && thread="$thread_id"

target="$(echo "$target" | sed -e "s/{THREAD}/${thread}/" -e "s/{THREADID}/${thread_id}/" -e "s/{BOARD}/${board}/")"
target="$(echo "$target" | sed -e "s/{THREAD}/${thread}/;" -e "s/{THREADID}/${thread_id}/" -e "s/{BOARD}/${board}/")"
mkdir -p "$target"

echo "$page" | pup '.board a.fileThumb attr{href}' | \
while read file; do
filename="$(basename $file)"
# don't duplicate downloads, but fork off every call.
[ -f "$target/$filename" ] || echo getting "https:$file to $target/$filename"
[ -f "$target/$filename" ] || curl -s "https:$file" > "$target/$filename" &
done

wait
get_images() {
echo "$page" | pup '.board a.fileThumb attr{href}' | \
while read file; do
filename="$(basename $file)"
# don't duplicate downloads, but fork off every call.
[ -f "$target/$filename" ] || echo getting "https:$file to $target/$filename"
[ -f "$target/$filename" ] || curl -s "https:$file" > "$target/$filename" &
done

wait
}
get_images

while $watch_mode; do
sleep 60
page="$(curl -s $url)"
page_title="$(echo "$page" | pup 'head title text{}')"
[ "$page_title" = "302 Found" ] && echo "Thread died." && exit 1
get_images
done

0 comments on commit e8badca

Please sign in to comment.