add gaytorss script for downloading and uploading torrent files via RSS feed
This commit is contained in:
parent
64a81ca78d
commit
38959dc37b
302
src/scripts/gaytorss.sh
Executable file
302
src/scripts/gaytorss.sh
Executable file
@ -0,0 +1,302 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p curl wget lftp xmlstarlet coreutils gnugrep gnused openssh sshpass
|
||||
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
|
||||
## CONFIG (edit if you must)
|
||||
RSS_URL='https://www.gaytor.rent/rss.php?num=10&feed=dl&cat=69,62,29,46,30,43,19,17,59,44,50,9,7,48,5,67,66,34,68,27,32,63,12,33,53,57,35,36,58,37,54,38,39,64,56,40,61,60,45,47,70,1,41,42,51,65,28&passkey=c8babd3026b740b6bb80c1811cc3fdbb'
|
||||
REMOTE_HOST='67.lw.itsby.design'
|
||||
REMOTE_PATH='/home/crouton6368z/watch'
|
||||
TMPDIR="$(mktemp -d /tmp/rss-torrents.XXXXXX)"
|
||||
LOGFILE="${TMPDIR}/rss-to-ftps.log"
|
||||
KEEP_LOCAL=false # set to true if you want to keep local .torrent files
|
||||
HISTORY_FILE="${HOME}/.rss-to-ftps.history"
|
||||
touch "$HISTORY_FILE"
|
||||
|
||||
cleanup() {
|
||||
rc=$?
|
||||
if [[ $rc -ne 0 ]]; then
|
||||
echo "ERROR: script failed (exit $rc). See $LOGFILE" >&2
|
||||
fi
|
||||
if [[ "${KEEP_LOCAL}" = false ]]; then
|
||||
rm -rf "$TMPDIR"
|
||||
else
|
||||
echo "Local files kept in $TMPDIR"
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
echo "Working in $TMPDIR" | tee "$LOGFILE"
|
||||
|
||||
# FTP credentials
|
||||
COOKIE_FILE="${HOME}/.librewolf/cookies.txt"
|
||||
|
||||
# Check if cookie file exists
|
||||
if [ ! -f "$COOKIE_FILE" ]; then
|
||||
echo "WARNING: Cookie file not found at $COOKIE_FILE" | tee -a "$LOGFILE"
|
||||
echo "This may cause authentication issues. Make sure you are logged into gaytor.rent in LibreWolf." | tee -a "$LOGFILE"
|
||||
else
|
||||
echo "Using cookies from: $COOKIE_FILE" | tee -a "$LOGFILE"
|
||||
|
||||
# Check if we need to convert cookies to curl format
|
||||
if [ -f "$COOKIE_FILE" ]; then
|
||||
# Check if it's in Netscape format (which curl can use directly)
|
||||
if head -1 "$COOKIE_FILE" | grep -q "# Netscape HTTP Cookie File"; then
|
||||
echo "Using Netscape format cookies directly" | tee -a "$LOGFILE"
|
||||
else
|
||||
echo "Converting cookies to curl format..." | tee -a "$LOGFILE"
|
||||
# Convert to curl format if needed
|
||||
CURL_COOKIES="${TMPDIR}/curl_cookies.txt"
|
||||
grep -v "^#" "$COOKIE_FILE" | grep -v "^$" | while IFS=$'\t' read -r domain flag path secure expiration name value; do
|
||||
if [ -n "$domain" ] && [ -n "$name" ] && [ -n "$value" ]; then
|
||||
echo "$domain\t$flag\t$path\t$secure\t$expiration\t$name\t$value" >> "$CURL_COOKIES"
|
||||
fi
|
||||
done
|
||||
COOKIE_FILE="$CURL_COOKIES"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
FTP_USER='crouton6368z'
|
||||
FTP_PASS='PGR7HoxZK8F4Npuh'
|
||||
|
||||
cd "$TMPDIR"
|
||||
|
||||
# Fetch RSS
|
||||
RSS_XML="${TMPDIR}/feed.xml"
|
||||
echo "Downloading RSS feed..." | tee -a "$LOGFILE"
|
||||
|
||||
# Validate passkey by checking if RSS feed contains valid content
|
||||
if grep -q "You don't have access\|Access denied\|Authentication failed\|Invalid passkey" "$RSS_XML" 2>/dev/null; then
|
||||
echo "ERROR: RSS feed indicates authentication failure. Your passkey may be invalid or expired." | tee -a "$LOGFILE"
|
||||
echo "Please check your passkey and try again." | tee -a "$LOGFILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if RSS feed has any items
|
||||
if ! grep -q "<item>" "$RSS_XML" 2>/dev/null; then
|
||||
echo "WARNING: RSS feed appears to be empty or invalid. This might indicate an authentication issue." | tee -a "$LOGFILE"
|
||||
fi
|
||||
curl -fsS --max-time 60 -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36" -b "$COOKIE_FILE" -o "$RSS_XML" "$RSS_URL" || {
|
||||
echo "Failed to download RSS feed" | tee -a "$LOGFILE"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Try robust XML extraction of enclosure URLs
|
||||
echo "Extracting .torrent URLs from feed..." | tee -a "$LOGFILE"
|
||||
mapfile -t URLs < <(
|
||||
xmlstarlet sel -N x="http://purl.org/rss/1.0/" -t \
|
||||
-m "//enclosure" -v "@url" -n "$RSS_XML" 2>/dev/null || true
|
||||
)
|
||||
|
||||
# Also try common RSS nodes: <link>, <guid>, media:content/@url
|
||||
mapfile -t EXTRA < <(
|
||||
xmlstarlet sel -t -m "//*[(local-name()='link' or local-name()='guid' or local-name()='content' )]" \
|
||||
-v "." -n "$RSS_XML" 2>/dev/null || true
|
||||
)
|
||||
|
||||
# Aggregate and filter for .torrent urls (absolute and containing .torrent)
|
||||
allurls=()
|
||||
for u in "${URLs[@]}"; do
|
||||
[[ -z "$u" ]] && continue
|
||||
allurls+=("$u")
|
||||
done
|
||||
for u in "${EXTRA[@]}"; do
|
||||
[[ "$u" =~ \.torrent ]] || continue
|
||||
allurls+=("$u")
|
||||
done
|
||||
|
||||
# Fallback: a conservative grep for http(s) links ending with .torrent
|
||||
if [ "${#allurls[@]}" -eq 0 ]; then
|
||||
echo "XML extraction returned nothing; falling back to regex grep." | tee -a "$LOGFILE"
|
||||
mapfile -t GREPURLS < <(grep -oE 'https?://[^"'\''<> ]+\.torrent[^"'\''<> ]*' "$RSS_XML" | sed 's/&/\&/g' | sort -u)
|
||||
for u in "${GREPURLS[@]}"; do
|
||||
allurls+=("$u")
|
||||
done
|
||||
fi
|
||||
|
||||
# Deduplicate and sanitize
|
||||
declare -A seen
|
||||
final_urls=()
|
||||
for u in "${allurls[@]}"; do
|
||||
# trim whitespace and decode HTML entities
|
||||
u="$(echo "$u" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' -e 's/&/\&/g' -e 's/</</g' -e 's/>/>/g' -e 's/"/"/g' -e "s/'/'/g")"
|
||||
[[ -z "$u" ]] && continue
|
||||
if [[ -z "${seen[$u]:-}" ]]; then
|
||||
seen[$u]=1
|
||||
final_urls+=("$u")
|
||||
fi
|
||||
done
|
||||
|
||||
# Filter out URLs we've already downloaded
|
||||
urls_to_fetch=()
|
||||
for url in "${final_urls[@]}"; do
|
||||
if ! grep -Fxq "$url" "$HISTORY_FILE"; then
|
||||
urls_to_fetch+=("$url")
|
||||
else
|
||||
echo "Skipping already-seen URL: $url" | tee -a "$LOGFILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${#final_urls[@]}" -eq 0 ]; then
|
||||
echo "No .torrent URLs found in feed. Exiting." | tee -a "$LOGFILE"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${#urls_to_fetch[@]}" -eq 0 ]; then
|
||||
echo "All ${#final_urls[@]} URLs already downloaded. Nothing to do." | tee -a "$LOGFILE"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Found ${#final_urls[@]} candidate URLs (${#urls_to_fetch[@]} new). Downloading..." | tee -a "$LOGFILE"
|
||||
|
||||
# Download each .torrent (skip if already downloaded)
|
||||
for url in "${urls_to_fetch[@]}"; do
|
||||
# guess filename
|
||||
fname="$(basename "${url%%\?*}")"
|
||||
# fallback if basename weird
|
||||
if [[ ! "$fname" =~ \.torrent$ ]]; then
|
||||
fname="$(echo "$url" | md5sum | awk '{print $1}').torrent"
|
||||
fi
|
||||
|
||||
if [[ -f "$fname" ]]; then
|
||||
echo "Skipping existing $fname" | tee -a "$LOGFILE"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "DEBUG: Attempting to download from URL: $url" | tee -a "$LOGFILE"
|
||||
echo "DEBUG: URL length: ${#url}" | tee -a "$LOGFILE"
|
||||
echo "DEBUG: URL contains .torrent: $([[ "$url" =~ .torrent ]] && echo "YES" || echo "NO")" | tee -a "$LOGFILE"
|
||||
|
||||
# Test the URL first with a HEAD request
|
||||
echo "DEBUG: Testing URL with HEAD request..." | tee -a "$LOGFILE"
|
||||
if curl -I --max-time 30 -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36" -b "$COOKIE_FILE" "$url" 2>&1 | tee -a "$LOGFILE"; then
|
||||
echo "DEBUG: HEAD request successful" | tee -a "$LOGFILE"
|
||||
else
|
||||
echo "DEBUG: HEAD request failed" | tee -a "$LOGFILE"
|
||||
fi
|
||||
echo "Downloading: $url -> $fname" | tee -a "$LOGFILE"
|
||||
# try curl then wget as fallback
|
||||
if ! curl -fsSL --max-time 120 -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36" -b "$COOKIE_FILE" -o "$fname" "$url"; then
|
||||
echo "curl failed for $url, trying wget..." | tee -a "$LOGFILE"
|
||||
if ! wget -q --user-agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36" --load-cookies="$COOKIE_FILE" -O "$fname" "$url"; then
|
||||
echo "Failed to download $url, skipping." | tee -a "$LOGFILE"
|
||||
rm -f "$fname" || true
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
# sanity check file is .torrent-ish (contains bencoded data)
|
||||
# Check if file was actually downloaded and is not an HTML error page
|
||||
if [ ! -s "$fname" ]; then
|
||||
echo "Warning: $fname is empty, removing..." | tee -a "$LOGFILE"
|
||||
rm -f "$fname" || true
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if file is HTML (error page)
|
||||
# Check for authentication errors
|
||||
if grep -q "You don't have access to this torrent\|Download failed\|Access denied\|Authentication failed" "$fname" 2>/dev/null; then
|
||||
echo "ERROR: Authentication failed for $url - you don't have access to this torrent" | tee -a "$LOGFILE"
|
||||
echo "This suggests your passkey may be invalid or expired, or you don't have the required permissions." | tee -a "$LOGFILE"
|
||||
rm -f "$fname" || true
|
||||
continue
|
||||
fi
|
||||
if head -c 100 "$fname" | grep -q "<html|<HTML|<!DOCTYPE"; then
|
||||
echo "Warning: $fname appears to be an HTML error page, removing..." | tee -a "$LOGFILE"
|
||||
rm -f "$fname" || true
|
||||
continue
|
||||
fi
|
||||
if ! (head -c 20 "$fname" | grep -q "^d" && grep -a -q "info" "$fname") 2>/dev/null; then
|
||||
echo "Warning: $fname doesn't look like a valid torrent file; keeping it but check manually." | tee -a "$LOGFILE"
|
||||
# Check if it's an HTML error page
|
||||
if grep -q "<html\|<HTML" "$fname" 2>/dev/null; then
|
||||
echo "File appears to be HTML (possibly an error page). URL may be invalid or require authentication." | tee -a "$LOGFILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add to history file to avoid re-downloading
|
||||
echo "$url" >> "$HISTORY_FILE"
|
||||
done
|
||||
|
||||
# Collect .torrent files in tmpdir
|
||||
torrents=(*.torrent)
|
||||
# handle case where glob doesn't match
|
||||
if [[ ${#torrents[@]} -eq 1 && "${torrents[0]}" == '*.torrent' ]]; then
|
||||
echo "No .torrent files present after download. Exiting." | tee -a "$LOGFILE"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Uploading ${#torrents[@]} .torrent file(s) to ${REMOTE_HOST}:${REMOTE_PATH}" | tee -a "$LOGFILE"
|
||||
|
||||
# Try SFTP first (SSH File Transfer Protocol on port 22)
|
||||
echo "Attempting SFTP upload..." | tee -a "$LOGFILE"
|
||||
if timeout 300 sshpass -p "${FTP_PASS}" sftp -o StrictHostKeyChecking=no -o ConnectTimeout=60 -P 22 "${FTP_USER}@${REMOTE_HOST}" <<EOF 2>&1 | tee -a "$LOGFILE"
|
||||
mkdir ${REMOTE_PATH}
|
||||
cd ${REMOTE_PATH}
|
||||
mput *.torrent
|
||||
bye
|
||||
EOF
|
||||
then
|
||||
echo "SFTP upload successful." | tee -a "$LOGFILE"
|
||||
else
|
||||
upload_rc=$?
|
||||
echo "SFTP upload failed (exit $upload_rc). Trying FTPS..." | tee -a "$LOGFILE"
|
||||
|
||||
# Fallback to FTPS
|
||||
timeout 300 lftp -u "${FTP_USER},${FTP_PASS}" "ftps://${REMOTE_HOST}" <<EOF 2>&1 | tee -a "$LOGFILE"
|
||||
set ftp:ssl-force true
|
||||
set ftp:ssl-protect-data true
|
||||
set ftp:use-feat true
|
||||
set ssl:verify-certificate false
|
||||
set ftp:passive-mode true
|
||||
set net:timeout 60
|
||||
set net:max-retries 3
|
||||
set net:reconnect-interval-base 5
|
||||
set ftp:prefer-epsv false
|
||||
debug 3
|
||||
# Make remote dir if needed, then upload
|
||||
mkdir -p "${REMOTE_PATH}" || true
|
||||
cd "${REMOTE_PATH}" || exit 1
|
||||
mput -- *.torrent
|
||||
bye
|
||||
EOF
|
||||
|
||||
ftps_rc=${PIPESTATUS[0]:-0}
|
||||
if [[ $ftps_rc -ne 0 ]]; then
|
||||
echo "FTPS also failed (exit $ftps_rc). Trying regular FTP..." | tee -a "$LOGFILE"
|
||||
|
||||
# Final fallback to regular FTP
|
||||
timeout 300 lftp -u "${FTP_USER},${FTP_PASS}" "ftp://${REMOTE_HOST}" <<EOF 2>&1 | tee -a "$LOGFILE"
|
||||
set ftp:passive-mode true
|
||||
set net:timeout 60
|
||||
set net:max-retries 3
|
||||
set net:reconnect-interval-base 5
|
||||
set ftp:prefer-epsv false
|
||||
debug 3
|
||||
# Make remote dir if needed, then upload
|
||||
mkdir -p "${REMOTE_PATH}" || true
|
||||
cd "${REMOTE_PATH}" || exit 1
|
||||
mput -- *.torrent
|
||||
bye
|
||||
EOF
|
||||
|
||||
ftp_rc=${PIPESTATUS[0]:-0}
|
||||
if [[ $ftp_rc -ne 0 ]]; then
|
||||
echo "All upload methods failed (SFTP, FTPS, FTP). See $LOGFILE" | tee -a "$LOGFILE"
|
||||
exit $ftp_rc
|
||||
fi
|
||||
echo "FTP fallback upload successful." | tee -a "$LOGFILE"
|
||||
else
|
||||
echo "FTPS upload successful." | tee -a "$LOGFILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# optional cleanup
|
||||
if [[ "${KEEP_LOCAL}" = false ]]; then
|
||||
echo "Removing local torrent files..." | tee -a "$LOGFILE"
|
||||
rm -f -- *.torrent || true
|
||||
fi
|
||||
|
||||
echo "Done. Log: $LOGFILE"
|
||||
exit 0
|
||||
Loading…
x
Reference in New Issue
Block a user