251 lines
6.8 KiB
Bash
Executable File
251 lines
6.8 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# =================================================
|
|
# Fetch Content Script
|
|
# Downloads videos using yt-dlp with special handling
|
|
# for Redgifs user profiles (auto-organizes by orientation)
|
|
# =================================================
|
|
|
|
# Enable strict error handling
|
|
set -e
|
|
set -o pipefail # Catch failures in piped commands
|
|
|
|
# Environment variables (set by job system)
|
|
FETCH_URLS="${FETCH_URLS:-}"
|
|
FETCH_MODE="${FETCH_MODE:-single}"
|
|
INBOX_PATH="${INBOX_PATH:-/media/inbox}"
|
|
GIFS_PATH="${GIFS_PATH:-/media/gifs}"
|
|
|
|
# Check dependencies
|
|
if ! command -v yt-dlp &> /dev/null; then
|
|
echo "ERROR: yt-dlp is not installed"
|
|
exit 1
|
|
fi
|
|
|
|
if ! command -v ffprobe &> /dev/null; then
|
|
echo "ERROR: ffprobe is not installed"
|
|
exit 1
|
|
fi
|
|
|
|
# Function to detect orientation using ffprobe
|
|
detect_orientation() {
|
|
local file="$1"
|
|
local width height
|
|
|
|
width=$(ffprobe -v error -select_streams v:0 \
|
|
-show_entries stream=width \
|
|
-of default=noprint_wrappers=1:nokey=1 "$file" 2>/dev/null)
|
|
height=$(ffprobe -v error -select_streams v:0 \
|
|
-show_entries stream=height \
|
|
-of default=noprint_wrappers=1:nokey=1 "$file" 2>/dev/null)
|
|
|
|
if [[ -z "$width" || -z "$height" ]]; then
|
|
echo "unknown"
|
|
return
|
|
fi
|
|
|
|
if [[ $width -ge $height ]]; then
|
|
echo "horizontal"
|
|
else
|
|
echo "vertical"
|
|
fi
|
|
}
|
|
|
|
# Function to extract creator name from Redgifs profile URL
|
|
extract_redgifs_creator() {
|
|
local url="$1"
|
|
# Pattern: https://www.redgifs.com/users/username or /users/username/...
|
|
if [[ "$url" =~ redgifs\.com/users/([^/\?]+) ]]; then
|
|
echo "${BASH_REMATCH[1]}"
|
|
else
|
|
echo ""
|
|
fi
|
|
}
|
|
|
|
# Function to check if URL is a Redgifs user profile
|
|
is_redgifs_profile() {
|
|
local url="$1"
|
|
[[ "$url" =~ redgifs\.com/users/ ]]
|
|
}
|
|
|
|
# Function to download and process a single URL
|
|
process_url() {
|
|
local url="$1"
|
|
local url_num="$2"
|
|
local total_urls="$3"
|
|
|
|
echo ""
|
|
echo "========================================"
|
|
echo "Processing URL [$url_num/$total_urls]"
|
|
echo "========================================"
|
|
echo "URL: $url"
|
|
echo ""
|
|
|
|
# Check if Redgifs user profile
|
|
if is_redgifs_profile "$url"; then
|
|
local creator
|
|
creator=$(extract_redgifs_creator "$url")
|
|
|
|
if [[ -z "$creator" ]]; then
|
|
echo "ERROR: Could not extract creator name from Redgifs URL"
|
|
return 1
|
|
fi
|
|
|
|
echo ">>> REDGIFS USER PROFILE DETECTED <<<"
|
|
echo " Creator: $creator"
|
|
echo " Destination: $INBOX_PATH/$creator/"
|
|
echo " (Files will be sorted by orientation)"
|
|
echo ""
|
|
|
|
# Create creator directory structure in inbox
|
|
local creator_dir="$INBOX_PATH/$creator"
|
|
mkdir -p "$creator_dir/horizontal"
|
|
mkdir -p "$creator_dir/vertical"
|
|
|
|
# Create temp directory for download
|
|
local temp_dir
|
|
temp_dir=$(mktemp -d)
|
|
|
|
echo "-> Downloading to temp directory..."
|
|
|
|
# Download all videos from user profile
|
|
# Using best format, mp4 container preferred
|
|
if ! yt-dlp \
|
|
-f "bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best" \
|
|
--merge-output-format mp4 \
|
|
-o "$temp_dir/%(title)s.%(ext)s" \
|
|
--restrict-filenames \
|
|
--no-overwrites \
|
|
"$url" 2>&1; then
|
|
echo "ERROR: yt-dlp download failed"
|
|
rm -rf "$temp_dir"
|
|
return 1
|
|
fi
|
|
|
|
# Process downloaded files
|
|
echo ""
|
|
echo "-> Processing downloaded files..."
|
|
|
|
local processed=0
|
|
local h_count=0
|
|
local v_count=0
|
|
|
|
for file in "$temp_dir"/*.mp4; do
|
|
[[ -f "$file" ]] || continue
|
|
|
|
local filename
|
|
filename=$(basename "$file")
|
|
local orientation
|
|
orientation=$(detect_orientation "$file")
|
|
|
|
if [[ "$orientation" == "horizontal" ]]; then
|
|
mv "$file" "$creator_dir/horizontal/$filename"
|
|
echo " [H] $filename -> horizontal/"
|
|
h_count=$((h_count + 1))
|
|
elif [[ "$orientation" == "vertical" ]]; then
|
|
mv "$file" "$creator_dir/vertical/$filename"
|
|
echo " [V] $filename -> vertical/"
|
|
v_count=$((v_count + 1))
|
|
else
|
|
# Unknown orientation - put in horizontal as default
|
|
mv "$file" "$creator_dir/horizontal/$filename"
|
|
echo " [?] $filename -> horizontal/ (orientation unknown)"
|
|
h_count=$((h_count + 1))
|
|
fi
|
|
processed=$((processed + 1))
|
|
done
|
|
|
|
# Cleanup temp directory
|
|
rm -rf "$temp_dir"
|
|
|
|
echo ""
|
|
echo "Completed: $processed files ($h_count horizontal, $v_count vertical)"
|
|
|
|
else
|
|
# Standard download to inbox
|
|
echo "-> Standard download to inbox"
|
|
echo " Destination: $INBOX_PATH/"
|
|
echo ""
|
|
|
|
# Download with best quality
|
|
if ! yt-dlp \
|
|
-f "bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best" \
|
|
--merge-output-format mp4 \
|
|
-o "$INBOX_PATH/%(title)s.%(ext)s" \
|
|
--restrict-filenames \
|
|
--no-overwrites \
|
|
"$url" 2>&1; then
|
|
echo "ERROR: yt-dlp download failed"
|
|
return 1
|
|
fi
|
|
|
|
echo ""
|
|
echo "Download complete"
|
|
fi
|
|
}
|
|
|
|
# Main execution
|
|
echo "========================================="
|
|
echo " Content Fetch Script"
|
|
echo " Mode: $FETCH_MODE"
|
|
echo "========================================="
|
|
|
|
if [[ -z "$FETCH_URLS" ]]; then
|
|
echo "ERROR: No URLs provided"
|
|
exit 1
|
|
fi
|
|
|
|
# Parse URLs (newline-separated)
|
|
mapfile -t urls <<< "$FETCH_URLS"
|
|
|
|
# Filter empty lines and trim whitespace safely
|
|
# Using printf '%s' prevents command expansion unlike echo
|
|
valid_urls=()
|
|
for url in "${urls[@]}"; do
|
|
# Safely trim whitespace without command expansion
|
|
url=$(printf '%s' "$url" | tr -d '[:space:]')
|
|
[[ -n "$url" ]] && valid_urls+=("$url")
|
|
done
|
|
|
|
total_urls=${#valid_urls[@]}
|
|
|
|
if [[ $total_urls -eq 0 ]]; then
|
|
echo "ERROR: No valid URLs provided"
|
|
exit 1
|
|
fi
|
|
|
|
echo "URLs to process: $total_urls"
|
|
echo ""
|
|
|
|
# Track results
|
|
success_count=0
|
|
fail_count=0
|
|
|
|
# Process each URL
|
|
for i in "${!valid_urls[@]}"; do
|
|
url="${valid_urls[$i]}"
|
|
url_num=$((i + 1))
|
|
|
|
if process_url "$url" "$url_num" "$total_urls"; then
|
|
success_count=$((success_count + 1))
|
|
else
|
|
fail_count=$((fail_count + 1))
|
|
echo ">>> Failed to process: $url"
|
|
fi
|
|
done
|
|
|
|
# Summary
|
|
echo ""
|
|
echo ""
|
|
echo "========================================="
|
|
echo " Fetch Complete!"
|
|
echo "========================================="
|
|
echo " Successful: $success_count"
|
|
echo " Failed: $fail_count"
|
|
echo " Total: $total_urls"
|
|
echo "========================================="
|
|
|
|
if [[ $fail_count -gt 0 ]]; then
|
|
exit 1
|
|
fi
|