Anineko/anineko

245 lines
6.2 KiB
Text
Raw Normal View History

2014-11-20 19:05:30 +00:00
#!/bin/bash
2015-01-03 18:23:29 +00:00
# Copyright 2014 overflowerror (https://github.com/overflowerror/)
#
# This file is part of Anineko.
#
# Anineko is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Anineko is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Anineko. If not, see <http://www.gnu.org/licenses/>.
2014-12-14 22:52:53 +00:00
2014-12-14 21:50:27 +00:00
export useragent="Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37... nope, actually wget"
2014-11-20 19:05:30 +00:00
mode=$1
2014-11-22 23:08:12 +00:00
help() {
if test "$2" = "file"; then
echo -e \
"Usage: $1 file URL FILE"\
"\nDownloads one video from URL (GoGoAnime-Page) to FILE."
elif test "$2" = "search"; then
echo -e \
"Usage: $1 search KEYWORDS [DIRECTORY]"\
2014-12-21 23:48:03 +00:00
"\nLists all links on the result pages on a search for KEYWORDS on GoGoAnime."\
2014-11-22 23:08:12 +00:00
"\nIf DIRECTORY is given, it will be created if it doesn't exist yet and all result videos will be downloaded to that directory."
else
echo -e \
2014-12-14 22:37:14 +00:00
"Usage: $1 file|search|update"\
2014-11-22 23:08:12 +00:00
"\nDownloads videos from GoGoAnime."\
"\n"\
"\nFor more help try: $1 file"\
"\n or: $1 search"
fi
}
2014-12-14 22:37:14 +00:00
if test "$mode" = "update"; then
echo "Checking md5sum of current version..."
2016-04-21 07:39:09 +00:00
if test "$(md5sum $(which $0) | awk '{print $1}')" = "$(wget -O - "https://raw.githubusercontent.com/overflowerror/Anineko/master/anineko.md5" -q | awk '{print $1}')"; then
2014-12-14 22:37:14 +00:00
echo "Local version is up to date."
exit 0
fi
echo "Downloading update script..."
2014-12-14 22:52:53 +00:00
wget -O /tmp/$$.anineko-install "https://raw.githubusercontent.com/overflowerror/Anineko/master/install" --progress=bar:force 2>&1 | tail -f -n +12
2014-12-14 22:37:14 +00:00
echo "Download finished..."
echo "Executing..."
echo
2014-12-14 22:52:53 +00:00
bash /tmp/$$.anineko-install
2014-12-14 22:37:14 +00:00
exit $?
elif test "$mode" = "search"; then
2014-11-20 19:05:30 +00:00
text=$(echo $2 | tr " " "+")
dir=$3
2014-11-22 23:08:12 +00:00
2014-12-16 17:07:47 +00:00
tmp="/tmp/anineko.$$.log"
echo "" > $tmp
2014-11-22 23:08:12 +00:00
if test -z "$text"; then
help $0 search
exit 2
fi
2014-11-20 19:05:30 +00:00
2014-12-21 23:48:03 +00:00
echo "Searching for $text ... "
defpage="page/"
pagest=""
pagenr=1
links=""
while true; do
#echo "http://www.gogoanime.com/${pagest}?s=${text}"
spage==$(wget --user-agent="$useragent" -O - "http://www.gogoanime.com/${pagest}?s=${text}" 2> /dev/null )
found=$(echo $spage | grep "<html")
if test -n "$found"; then
echo " page $pagenr ..."
2015-01-01 16:56:32 +00:00
linkp=$(echo $spage | sed -e "s/postlist/\n/g" | grep "Permanent Link to" | awk -F"<a " '{ print $2 }' | awk -F'\"' '{ print $2}')
for link in $linkp; do
if test -n "$(echo $link | grep "www.gogoanime.com/category")"; then
echo " skipping category link"
else
links="$links $link"
fi
done
2014-12-21 23:48:03 +00:00
pagenr=$(echo $pagenr + 1 | bc)
pagest=${defpage}${pagenr}
else
echo " no more pages..."
break
fi
done
echo "done"
2014-11-20 19:05:30 +00:00
links=$(for link in $links; do
echo $link
done | sort)
i=0
for link in $links; do
i=$(echo $i + 1 | bc)
done
total=$i
2014-12-16 17:07:47 +00:00
echo "Found $total episodes:" | tee -a $tmp
2014-11-20 19:05:30 +00:00
for link in $links; do
2014-12-16 17:07:47 +00:00
echo " - " $link | tee -a $tmp
2014-11-20 19:05:30 +00:00
done
2014-11-22 23:08:12 +00:00
if test -z "$dir"; then
exit 0
fi
2014-11-20 19:05:30 +00:00
echo -ne "\nPress [Enter] to continue, [Ctrl] + [C] to cancel. "
read
2014-12-16 17:07:47 +00:00
echo | tee -a $tmp
2014-11-20 19:05:30 +00:00
echo -n "Creating directory ... "
2015-09-29 17:10:03 +00:00
if test ! -d "$dir"; then
mkdir "$dir"
2014-11-20 19:05:30 +00:00
fi
echo -e "[ \033[32mdone\033[0m ]"
2014-12-16 17:07:47 +00:00
fails=""
2014-11-20 19:05:30 +00:00
i=0
for link in $links; do
i=$(echo $i + 1 | bc)
file=$(echo $link | awk -F'/' '{ print $4}')
2014-12-16 17:07:47 +00:00
echo "Fetching file $i of $total: $file ..." | tee -a $tmp
2015-09-29 17:10:03 +00:00
$0 file $link "${dir}"/${file}.mp4 | tee -a $tmp
2014-12-16 17:07:47 +00:00
if test ${PIPESTATUS[0]} = 0; then
echo "done." | tee -a $tmp
else
echo -e "\033[31msomething went wrong.\033[0m" | tee -a $tmp
fails="$fails $file"
fi
2014-11-20 19:05:30 +00:00
done
2014-12-16 17:07:47 +00:00
if test -z "$fails"; then
echo "All done."
else
echo
echo "Some episodes couldn't be downloaded:"
for name in $fails; do
echo " - " $name
done
echo
echo "For more information check $tmp ..."
fi
2014-11-20 19:05:30 +00:00
2014-11-22 23:08:12 +00:00
elif test "$mode" = "file"; then
2014-11-20 19:05:30 +00:00
url=$2
file=$3
2014-11-22 23:08:12 +00:00
if test -z "$file"; then
help $0 file
exit 2
fi
2014-11-22 22:40:15 +00:00
extract() {
# first param: provider page url
2014-12-14 21:50:27 +00:00
echo -n " Extracting video-url... " 1>&2
2014-11-22 22:40:15 +00:00
vurl=$(wget --user-agent="$useragent" -q -O - "$1" 2> /dev/null| grep "url: " | grep '.flv\|.mp4' | tr '\"' "'" | awk -F"'" '{ print $2 }' 2> /dev/null)
2015-09-29 17:10:03 +00:00
if test -z "$vurl"; then
echo -e "[ \033[31mfail\033[0m ]" 1>&2
return
fi
2015-03-28 12:58:01 +00:00
vurl=$(python2 -c 'import sys, urllib; print urllib.unquote(sys.argv[1])' $vurl)
2014-11-22 22:40:15 +00:00
echo -e "[ \033[32mdone\033[0m ]" 1>&2
echo $vurl
2015-03-28 13:01:31 +00:00
}
2014-11-22 22:40:15 +00:00
download() {
# first param: video url
2015-09-29 17:10:03 +00:00
if test -z "$1"; then
return 1
fi
2014-12-14 21:50:27 +00:00
echo -n " Probing size... "
size=$(wget --user-agent="$useragent" "$1" --spider --server-response -O - 2>&1 | grep "Content-Length: " | awk '{ print $2 }' | egrep ".{5}")
echo -n $size
if test $size -lt 20000000 ; then
echo -e " \033[31m< 20 MB\033[0m"
return 1
fi
echo -e " > 20 MB"
2014-12-14 21:50:27 +00:00
echo " Starting Download... "
2014-12-30 20:56:00 +00:00
echo -ne "\033[?25l"
2014-11-22 22:40:15 +00:00
wget --user-agent="$useragent" -O "${file}" -c "$1" --progress=bar:force 2>&1 | tail -f -n +12
2014-12-30 20:56:00 +00:00
echo -ne "\033[?25h"
2014-11-22 22:40:15 +00:00
# echo -ne "\033[2A\033[0K"
2014-12-14 21:50:27 +00:00
echo " Download completed."
return 0
2014-11-22 22:40:15 +00:00
}
2014-12-14 21:50:27 +00:00
provider() {
# params:
# - provider name
# - provider grep-thing
text=" Try to extract "${1}"-frame..."
echo -n "$text"
for i in $(seq ${#text} 39); do
echo -n " "
done
purl=$(echo -e "${page}" | grep "${2}" | sed -e "s/#038;//g" | awk -F'\"' '{ print $2; }' 2> /dev/null)
if test -n "${purl}"; then
echo -e "[ \033[32mdone\033[0m ]"
2014-12-14 21:50:27 +00:00
download `extract $purl`
ret=$?
echo -n " overall download... "
if test "$ret" == 0; then
echo -e "[ \033[32mdone\033[0m ]"
exit 0
fi
fi
2014-12-14 21:50:27 +00:00
echo -e "[ \033[31mfail\033[0m ]"
}
2014-11-22 22:40:15 +00:00
2014-12-14 21:50:27 +00:00
export page=$(wget --user-agent="$useragent" -q -O - "${url}")
2014-12-16 16:46:39 +00:00
provider "playpanda" "playpanda"
provider "videowing" "videowing"
provider "playbb" "playbb.me"
provider "easyvideo" "easyvideo.me"
2014-12-12 20:38:40 +00:00
2014-11-22 22:40:15 +00:00
# Add additional providers here
2014-12-14 21:50:27 +00:00
2014-11-22 22:40:15 +00:00
echo -e " \033[31mPermanent fail!\033[0m"
exit 1
2014-11-22 23:08:12 +00:00
else
help $0
exit 2
2014-11-20 19:05:30 +00:00
fi