
part 2
# ========================================================================
# Step 4 (Pass 1): Download at best quality, with a size cap
# ========================================================================
# Tries: best AVC1 video + best M4A audio → merged into .mp4
# If a video exceeds MAX_FILESIZE, its ID is saved for the fallback pass.
# Members-only and premiere errors cause the video to be permanently skipped.
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$Name] Pass 1: best quality under $MAX_FILESIZE"
yt-dlp \
"${common_opts[@]}" \
--match-filter "!is_live & !was_live & original_url!*=/shorts/" \
--max-filesize "$MAX_FILESIZE" \
--format "bestvideo[vcodec^=avc1]+bestaudio[ext=m4a]/best[ext=mp4]/best" \
"$URL" 2>&1 | while IFS= read -r line; do
echo "$line"
if echo "$line" | grep -q "^ERROR:"; then
# Too large → save ID for pass 2
if echo "$line" | grep -qi "larger than max-filesize"; then
vid_id=$(echo "$line" | grep -oP '(?<=\[youtube\] )[a-zA-Z0-9_-]{11}')
[[ -n "$vid_id" ]] && echo "$vid_id" >> "$SCRIPT_DIR/.size_failed_$Name"
# Permanently unavailable → skip forever
elif echo "$line" | grep -qE "members only|Join this channel|This live event|premiere"; then
vid_id=$(echo "$line" | grep -oP '(?<=\[youtube\] )[a-zA-Z0-9_-]{11}')
if [[ -n "$vid_id" ]]; then
if ! grep -q "youtube $vid_id" "$skip_file" 2>/dev/null; then
echo "youtube $vid_id" >> "$skip_file"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$Name] Added $vid_id to skip file (permanent failure)"
fi
fi
fi
log_error "[$(date '+%Y-%m-%d %H:%M:%S')] ${Name} - ${URL}: $line"
fi
done
# ========================================================================
# Step 5 (Pass 2): Retry oversized videos at lower quality
# ========================================================================
# For any video that exceeded MAX_FILESIZE in pass 1, retry at 720p max.
# If it's STILL too large, log the actual size and skip permanently.
if [[ -f "$SCRIPT_DIR/.size_failed_$Name" ]]; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$Name] Pass 2: lower quality fallback for oversized videos"
while IFS= read -r vid_id; do
[[ -z "$vid_id" ]] && continue
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$Name] Retrying $vid_id at 720p max"
yt-dlp \
--proxy "$PROXY" \
--download-archive "$archive_file" \
--extractor-args "youtube:player-client=default,-tv_simply" \
--write-thumbnail \
--convert-thumbnails jpg \
--add-metadata \
--embed-thumbnail \
--merge-output-format mp4 \
--max-filesize "$MAX_FILESIZE" \
--format "bestvideo[vcodec^=avc1][height<=720]+bestaudio[ext=m4a]/bestvideo[height<=720]+bestaudio[ext=m4a]/best[height<=720]/worst" \
--output "$DOWNLOAD_DIR/${Name} - %(title)s.%(ext)s" \
"https://www.youtube.com/watch?v=%24vid_id" 2>&1 | while IFS= read -r line; do
echo "$line"
if echo "$line" | grep -q "^ERROR:"; then
# Still too large even at 720p — give up and log the size
if echo "$line" | grep -qi "larger than max-filesize"; then
filesize_info=$(yt-dlp \
--proxy "$PROXY" \
--extractor-args "youtube:player-client=default,-tv_simply" \
--simulate \
--print "%(filesize,filesize_approx)s" \
"https://www.youtube.com/watch?v=%24vid_id" 2>/dev/null)
if [[ "$filesize_info" =~ ^[0-9]+$ ]]; then
filesize_gb=$(echo "scale=1; $filesize_info / 1073741824" | bc)
size_str="${filesize_gb}GB"
else
size_str="unknown size"
fi
if ! grep -q "youtube $vid_id" "$skip_file" 2>/dev/null; then
echo "youtube $vid_id" >> "$skip_file"
log_error "[$(date '+%Y-%m-%d %H:%M:%S')] [$Name] Skipped $vid_id - still over $MAX_FILESIZE at 720p ($size_str)"
fi
fi
log_error "[$(date '+%Y-%m-%d %H:%M:%S')] ${Name} - ${URL}: $line"
fi
done
done < "$SCRIPT_DIR/.size_failed_$Name"
rm -f "$SCRIPT_DIR/.size_failed_$Name"
else
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$Name] Pass 2: no oversized videos to retry"
fi
# Clean up any stray .description files yt-dlp may have left behind
find "$DOWNLOAD_DIR" -name "${Name} - *.description" -type f -delete
done









Scam site?
Url tunnels you trough different sites before ending on a fullscreen foced yt looking video that you cannot click away