Skip to content
Find file
Fetching contributors…
Cannot retrieve contributors at this time
executable file 282 lines (229 sloc) 7.2 KB
#!/bin/bash
#
# usage: onemanga [-dlos] [-c <first chapter>[+|-<last chapter>]] <manga name> [<manga name> ...]
#
trap cleanup 0
MAIN_SITE="http://www.onemanga.com"
ALT_SITE="http://www.1000manga.com"
search_url="$MAIN_SITE/directory"
base_dir=`pwd`
log="$base_dir/onemanga.log"
cookies="$base_dir/tmp_cookies"
CLEAREOL="`tput el`"
R='\e[0;31m'
G='\e[0;32m'
Y='\e[0;33m'
B='\e[0;34m'
M='\e[0;35m'
C='\e[0;36m'
N='\e[0m'
function calc() {
echo "scale=2; $*" | bc -q 2>/dev/null | cut -f1 -d.
}
function echoes() {
if [[ $2 -gt 0 ]]; then
eval "for i in {${3:-1}..$2}; do echo -n '$1'; done"
fi
}
function progressbar() {
width=20
current=$1
total=$2
percent=`calc "$current / $total * 100"`
stack=`calc "$current / $total * $width"`
stack=${stack:=0}
space=`calc "$width - $stack"`
echoes " " ${#percent} 3
echo -n "$percent% ["
echoes "#" $stack
echoes " " $space
echo "]"
}
function cleanup() {
rm -f *.jpg tmp_* $cookies
}
while getopts ":c:dlos:" op; do
case $op in
c)
CHAPTER=1
CHAPTER_ARG="$OPTARG"
;;
d)
USE_DIR=1
;;
l)
LATEST=1
;;
o)
LOG_FILE=1
;;
s)
SEARCH=1
KEYWORD="$OPTARG"
;;
\?)
echo "Unknown option: -$OPTARG" >&2
exit 1
;;
:)
case $OPTARG in
c)
echo "Please specify a chapter number"
;;
s)
echo "Please specify a search keyword"
;;
esac
exit 1
;;
esac
done
shift $(($OPTIND - 1))
if [[ $CHAPTER -eq 1 && $LATEST -eq 1 ]]; then
echo "Invalid option: -c and -l cannot be used at the same time" >&2
exit 1
fi
if [[ $SEARCH ]]; then
echo -e "\nSearching ${search_url}...\n"
wget -qO - "${search_url}/" | grep "ch-subject" | sed -e 's@^.*href\=\"/@\(@g' -e 's@</a>.*@@g' -e 's@/.*>@\):@g' | awk -F: '{print $2 "\t" $1}' | sed "s/\&\#39\;/'/g" | grep -i "$KEYWORD" > tmp_search
[[ -s tmp_search ]] && cat tmp_search && echo ""
result=`cat tmp_search | wc -l`
echo -n "${result}" && [[ $result -gt 1 ]] && echo " results." || echo " result."
exit
fi
if [[ $CHAPTER = "" && $LATEST = "" ]]; then
UPDATE=1
fi
if [[ $CHAPTER -eq 1 ]]; then
if [[ "${CHAPTER_ARG%+*}" != "$CHAPTER_ARG" ]]; then
first_chapter=${CHAPTER_ARG%+*}
last_chapter="+"
elif [[ "${CHAPTER_ARG%-*}" != "$CHAPTER_ARG" ]]; then
first_chapter=${CHAPTER_ARG%-*}
last_chapter=${CHAPTER_ARG#*-}
else
first_chapter=$CHAPTER_ARG
last_chapter=$CHAPTER_ARG
fi
fi
for manga_name in "$@"; do
base_url="$MAIN_SITE"
manga_name=${manga_name%/}
# field index for onemanga.com HTML page
page_index=2
image_index=4
if [[ $USE_DIR -eq 1 ]]; then
if [[ -f "$base_dir/$manga_name" ]]; then
#echo -e "\n$manga_name ${Y}[SKIPPED]${N}"
#echo "Not a directory."
continue
fi
mkdir -p "$base_dir/$manga_name"
cd "$base_dir/$manga_name"
fi
display_name=`echo $manga_name | sed -e "s/_/ /g" -e "s/%27/'/g"`
word_count=`echo $display_name | wc -w`
sort_key=$((word_count + 1))
local_chapter=`ls ${manga_name}_*.cb? 2> /dev/null | sort -r -n -k $sort_key -t_ | head -n1`
local_chapter=${local_chapter##*_}
local_chapter=${local_chapter%.*}
if [[ -n $local_chapter ]]; then
if [[ `echo "$local_chapter == 0" | bc` -eq 0 ]]; then
local_chapter=`echo $local_chapter | sed "s/^0*//g"`
fi
else
local_chapter="none"
fi
echo -ne "\nOpening $base_url/$manga_name..."
wget -O tmp_index --no-cache $base_url/$manga_name/ 2>tmp_status
if [[ ! -s "tmp_index" ]]; then
if [[ `grep "ERROR 404: Not Found" tmp_status | wc -l` -gt 0 ]]; then
echo -e "\b\b\b ${R}[ERROR]${N}\nPlease make sure the manga name is correct."
elif [[ `grep "unable to resolve host address" tmp_status | wc -l` -gt 0 ]]; then
echo -e "\b\b\b ${R}[ERROR]${N}\nPlease check your internet connection."
fi
exit
fi
grep "ch-subject" tmp_index 2> /dev/null | grep "a href" | cut -f3 -d\/ > tmp_chapters
if [[ ! -s tmp_chapters ]]; then
echo -e "\b\b\b ${R}[ERROR]${N}\nCannot extract chapter list from the specified URL."
continue
fi
echo -e "\b\b\b ${G}[OK]${N}"
latest_chapter=`head -1 tmp_chapters`
if [[ -n $local_chapter && ! $CHAPTER -eq 1 ]]; then
echo "Local chapter: ${local_chapter} / Latest chapter: $latest_chapter"
if [[ $local_chapter == $latest_chapter ]]; then
echo "No update."
cleanup
continue
fi
fi
if [[ $LATEST -eq 1 ]]; then
first_chapter=$latest_chapter
last_chapter=$latest_chapter
fi
if [[ "$last_chapter" == "+" ]]; then
last_chapter=$latest_chapter
fi
if [[ $UPDATE -eq 1 ]]; then
if [[ $local_chapter == "" ]]; then
first_chapter=`sort -n tmp_chapters | head -n1`
else
index=`sort -n tmp_chapters | grep -nm1 $local_chapter | cut -f1 -d:`
first_chapter=`sort -n tmp_chapters | tail -n+$((index + 1)) | head -n1`
fi
last_chapter=$latest_chapter
fi
if [[ $CHAPTER -eq 1 || -z $local_chapter ]]; then
echo "From chapter: $first_chapter / To chapter: $last_chapter"
fi
index=`sort -n tmp_chapters | grep -nm1 $first_chapter | cut -f1 -d:`
last_index=`sort -n tmp_chapters | grep -nm1 $last_chapter | cut -f1 -d:`
CHAPTERS=`sort -n tmp_chapters | awk 'FNR >= '$index' && FNR <= '$last_index`
for chapter in $CHAPTERS; do
COOKIE=""
echo "Downloading $display_name chapter $chapter"
echo -ne "\r$CLEAREOL`progressbar 0 1` (Initializing...)"
##
# BEGIN support for series that are hosted on 1000manga.com
##
redirected=`wget -qO - $base_url/$manga_name/$chapter/ | grep -i "read this series at 1000manga.com" | wc -l`
if [[ $redirected -eq 1 ]]; then
echo -ne "\r$CLEAREOL`progressbar 0 1` (Redirecting to 1000manga.com...)"
base_url="$ALT_SITE"
echo -e "www.1000manga.com\tFALSE\t/\tFALSE\t0\tage_verified\t30" > $cookies
# field index for 1000manga.com HTML page
page_index=4
image_index=10
COOKIE="--load-cookies=$cookies"
fi
##
# END 1000manga.com
##
page_location=`wget $COOKIE -qO - $base_url/$manga_name/$chapter/ | grep -i "begin reading" | cut -f${page_index} -d\"`
wget $COOKIE -qO - $base_url$page_location | sed '/id="id_page_select"/,/\/select/ s/\<option value=/\
\<option value=/g' > tmp_page
PAGES=`sed -n '/id="id_page_select"/,/\/select/ p' < tmp_page | grep -i "option value" | cut -f2 -d\"`
image_location=`grep -i "manga-page" tmp_page | cut -f${image_index} -d\"`
image_location=${image_location%/*.jpg}
total=`echo $PAGES | wc -w`
i=0
for page in $PAGES; do
let i++
echo -ne "\r$CLEAREOL`progressbar $i $total` ($i/$total)"
wget --load-cookies=$cookies -q "$image_location/$page.jpg"
done
int_part=${chapter%\.*}
if [[ `echo "$int_part == 0" | bc` -eq 0 && ${#int_part} -lt 3 ]]; then
chapter=`echoes "0" ${#int_part} 2`$chapter
fi
archive_name="${manga_name}_${chapter}.cbz"
echo -ne "\r$CLEAREOL`progressbar $i $total` (Creating archive...)"
if zip -q $archive_name *.jpg; then
cleanup
echo -e "\r$CLEAREOL`progressbar $i $total` ($archive_name)"
[[ $LOG_FILE -eq 1 ]] && echo "[`date +%c`] $archive_name" >> "$log"
fi
done
done
Something went wrong with that request. Please try again.