Anineko/anineko
2015-01-03 19:23:29 +01:00

237 lines
6.1 KiB
Bash
Executable file

#!/bin/bash
# Copyright 2014 overflowerror (https://github.com/overflowerror/)
#
# This file is part of Anineko.
#
# Anineko is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Anineko is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Anineko. If not, see <http://www.gnu.org/licenses/>.
export useragent="Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37... nope, actually wget"
mode=$1
help() {
if test "$2" = "file"; then
echo -e \
"Usage: $1 file URL FILE"\
"\nDownloads one video from URL (GoGoAnime-Page) to FILE."
elif test "$2" = "search"; then
echo -e \
"Usage: $1 search KEYWORDS [DIRECTORY]"\
"\nLists all links on the result pages on a search for KEYWORDS on GoGoAnime."\
"\nIf DIRECTORY is given, it will be created if it doesn't exist yet and all result videos will be downloaded to that directory."
else
echo -e \
"Usage: $1 file|search|update"\
"\nDownloads videos from GoGoAnime."\
"\n"\
"\nFor more help try: $1 file"\
"\n or: $1 search"
fi
}
if test "$mode" = "update"; then
echo "Checking md5sum of current version..."
if test "$(md5sum $0 | awk '{print $1}')" = $(wget -O - "https://raw.githubusercontent.com/overflowerror/Anineko/master/anineko.md5" -q); then
echo "Local version is up to date."
exit 0
fi
echo "Downloading update script..."
wget -O /tmp/$$.anineko-install "https://raw.githubusercontent.com/overflowerror/Anineko/master/install" --progress=bar:force 2>&1 | tail -f -n +12
echo "Download finished..."
echo "Executing..."
echo
bash /tmp/$$.anineko-install
exit $?
elif test "$mode" = "search"; then
text=$(echo $2 | tr " " "+")
dir=$3
tmp="/tmp/anineko.$$.log"
echo "" > $tmp
if test -z "$text"; then
help $0 search
exit 2
fi
echo "Searching for $text ... "
defpage="page/"
pagest=""
pagenr=1
links=""
while true; do
#echo "http://www.gogoanime.com/${pagest}?s=${text}"
spage==$(wget --user-agent="$useragent" -O - "http://www.gogoanime.com/${pagest}?s=${text}" 2> /dev/null )
found=$(echo $spage | grep "<html")
if test -n "$found"; then
echo " page $pagenr ..."
linkp=$(echo $spage | sed -e "s/postlist/\n/g" | grep "Permanent Link to" | awk -F"<a " '{ print $2 }' | awk -F'\"' '{ print $2}')
for link in $linkp; do
if test -n "$(echo $link | grep "www.gogoanime.com/category")"; then
echo " skipping category link"
else
links="$links $link"
fi
done
pagenr=$(echo $pagenr + 1 | bc)
pagest=${defpage}${pagenr}
else
echo " no more pages..."
break
fi
done
echo "done"
links=$(for link in $links; do
echo $link
done | sort)
i=0
for link in $links; do
i=$(echo $i + 1 | bc)
done
total=$i
echo "Found $total episodes:" | tee -a $tmp
for link in $links; do
echo " - " $link | tee -a $tmp
done
if test -z "$dir"; then
exit 0
fi
echo -ne "\nPress [Enter] to continue, [Ctrl] + [C] to cancel. "
read
echo | tee -a $tmp
echo -n "Creating directory ... "
if test ! -d $dir; then
mkdir $dir
fi
echo -e "[ \033[32mdone\033[0m ]"
fails=""
i=0
for link in $links; do
i=$(echo $i + 1 | bc)
file=$(echo $link | awk -F'/' '{ print $4}')
echo "Fetching file $i of $total: $file ..." | tee -a $tmp
$0 file $link ${dir}/${file}.mp4 | tee -a $tmp
if test ${PIPESTATUS[0]} = 0; then
echo "done." | tee -a $tmp
else
echo -e "\033[31msomething went wrong.\033[0m" | tee -a $tmp
fails="$fails $file"
fi
done
if test -z "$fails"; then
echo "All done."
else
echo
echo "Some episodes couldn't be downloaded:"
for name in $fails; do
echo " - " $name
done
echo
echo "For more information check $tmp ..."
fi
elif test "$mode" = "file"; then
url=$2
file=$3
if test -z "$file"; then
help $0 file
exit 2
fi
extract() {
# first param: provider page url
echo -n " Extracting video-url... " 1>&2
vurl=$(wget --user-agent="$useragent" -q -O - "$1" 2> /dev/null| grep "url: " | grep '.flv\|.mp4' | tr '\"' "'" | awk -F"'" '{ print $2 }' 2> /dev/null)
vurl=$(python -c 'import sys, urllib; print urllib.unquote(sys.argv[1])' $vurl)
echo -e "[ \033[32mdone\033[0m ]" 1>&2
echo $vurl
}
download() {
# first param: video url
echo -n " Probing size... "
size=$(wget --user-agent="$useragent" "$1" --spider --server-response -O - 2>&1 | grep "Content-Length: " | awk '{ print $2 }' | egrep ".{5}")
echo -n $size
if test $size -lt 20000000 ; then
echo -e " \033[31m< 20 MB\033[0m"
return 1
fi
echo -e " > 20 MB"
echo " Starting Download... "
echo -ne "\033[?25l"
wget --user-agent="$useragent" -O "${file}" -c "$1" --progress=bar:force 2>&1 | tail -f -n +12
echo -ne "\033[?25h"
# echo -ne "\033[2A\033[0K"
echo " Download completed."
return 0
}
provider() {
# params:
# - provider name
# - provider grep-thing
text=" Try to extract "${1}"-frame..."
echo -n "$text"
for i in $(seq ${#text} 39); do
echo -n " "
done
purl=$(echo -e "${page}" | grep "${2}" | sed -e "s/#038;//g" | awk -F'\"' '{ print $2; }' 2> /dev/null)
if test -n "${purl}"; then
echo -e "[ \033[32mdone\033[0m ]"
download `extract $purl`
ret=$?
echo -n " overall download... "
if test "$ret" == 0; then
echo -e "[ \033[32mdone\033[0m ]"
exit 0
fi
fi
echo -e "[ \033[31mfail\033[0m ]"
}
export page=$(wget --user-agent="$useragent" -q -O - "${url}")
provider "playpanda" "playpanda"
provider "videowing" "videowing"
provider "playbb" "playbb.me"
provider "easyvideo" "easyvideo.me"
# Add additional providers here
echo -e " \033[31mPermanent fail!\033[0m"
exit 1
else
help $0
exit 2
fi