>>4095573because fuck logins, and fuck applications, wrote a dlscript
no deps beyond standard tools; wget, bash, readarray, grep, curl, etc
assumes url is first gallery page, names files with just page number, and tosses everything into whatever directory you're running out of
also "parallelizes" the download by spawning 40 fucking wget subprocesses, because why the fuck not
#!/bin/bash
if [[ $# -ne 1 ]]; then echo "Need g.e-hentai url" && exit 1; fi
declare -a gallery
declare -a imageURLs
# < <(stuff) is because if I had just piped readarray, the pipe would run it in a subshell. This is a workaround
# gets all the gallery pages
readarray gallery < <(curl $1 2>&1 | grep -o -E 'href="([^"#]+)"' | cut -d'"' -f2 | grep /g/ | grep -v -E .*\.xml | sort | uniq)
echo "fetched pages:"
for page in "${gallery[@]"; do echo "$page"; done
for page in "${gallery[@]}"; do
echo "reading url: $page"
# gets all the pages from each gallery
readarray imageURLs < <(curl $page 2>&1 | grep -o -E 'href="([^"#]+)"' | cut -d'"' -f2 | grep /s/)
for URL in "${imageURLs[@]}"; do
# gets the page number from the URL, to use as the name of image
num=$(echo "$URL" | grep -o -E "[0-9]{0,}$")
# gets the image URL from the page, and downloads it
wget $(curl $URL 2>&1 | grep -o -E 'src="([^"#]+)"' | cut -d '"' -f2 | grep -o -E "http://[0-9]{2}.*") -O $num -q &
done
wait
done
echo "done"