Update .forgejo/workflows/generate-m3u.yml

This commit is contained in:
stoney420 2025-06-28 02:15:08 +02:00
parent 3451f46539
commit c582c80cc1

View file

@ -1,4 +1,4 @@
name: Generate M3U Playlist with Auto-Setup & Cleanup name: Generate M3U Playlist with Auto-Cleanup
on: on:
push: push:
@ -23,95 +23,9 @@ jobs:
git config --local user.email "actions@forgejo.plainrock127.xyz" git config --local user.email "actions@forgejo.plainrock127.xyz"
git config --local user.name "IPTV Playlist Bot" git config --local user.name "IPTV Playlist Bot"
- name: Auto-Setup Repository (First Run) - name: Pre-Cleanup Repository
run: | run: |
echo "=== Auto-Setup Check ===" echo "=== Pre-Cleanup Phase ==="
# Check if enhanced .gitignore exists
if [ ! -f .gitignore ] || ! grep -q "IPTV Playlist Generator" .gitignore; then
echo "🔧 Setting up enhanced .gitignore..."
cat > .gitignore << 'EOF'
# IPTV Playlist Generator - Enhanced .gitignore
# ===== PYTHON =====
__pycache__/
*.py[cod]
*$py.class
*.so
# ===== LOGS & TEMPORARY FILES =====
*.log
*.tmp
*_temp*
*.backup.*
temp_*
# ===== IDE & EDITOR FILES =====
.vscode/
.idea/
*.swp
*.swo
*~
.DS_Store
Thumbs.db
# ===== IPTV SPECIFIC =====
bulk_import_temp.m3u
import_temp_*.m3u
*_processing.m3u
playlist_temp.m3u
temp_playlist_*.m3u
EOF
echo "✅ Enhanced .gitignore created"
fi
# Create quick cleanup script if missing
if [ ! -f scripts/quick_cleanup.py ]; then
echo "🔧 Creating cleanup script..."
mkdir -p scripts
cat > scripts/quick_cleanup.py << 'EOF'
#!/usr/bin/env python3
"""Quick cleanup for IPTV repository"""
import os
import shutil
from pathlib import Path
def cleanup():
root = Path.cwd()
cleaned = 0
# Remove Python cache
for cache in root.rglob('__pycache__'):
if cache.is_dir():
shutil.rmtree(cache, ignore_errors=True)
cleaned += 1
# Remove temp files
for pattern in ['*.pyc', '*.pyo', '*_temp*', '*.tmp', '*~']:
for file in root.rglob(pattern):
if file.is_file() and '.git' not in str(file):
file.unlink(missing_ok=True)
cleaned += 1
# Organize logs
logs_dir = root / 'reports' / 'logs'
logs_dir.mkdir(parents=True, exist_ok=True)
for log in root.glob('*.log'):
shutil.move(str(log), str(logs_dir / log.name))
cleaned += 1
print(f"🧹 Cleaned {cleaned} items")
return cleaned
if __name__ == "__main__":
cleanup()
EOF
echo "✅ Cleanup script created"
fi
- name: Pre-Generation Cleanup
run: |
echo "=== Pre-Generation Cleanup ==="
# Remove Python cache thoroughly # Remove Python cache thoroughly
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
@ -122,11 +36,15 @@ jobs:
find . -name "*_temp*" -type f -delete 2>/dev/null || true find . -name "*_temp*" -type f -delete 2>/dev/null || true
find . -name "*.tmp" -delete 2>/dev/null || true find . -name "*.tmp" -delete 2>/dev/null || true
find . -name "*~" -delete 2>/dev/null || true find . -name "*~" -delete 2>/dev/null || true
find . -name "*.swp" -delete 2>/dev/null || true
# Organize log files # Organize log files
mkdir -p reports/logs reports/archive mkdir -p reports/logs reports/archive
find . -maxdepth 1 -name "*.log" -exec mv {} reports/logs/ \; 2>/dev/null || true find . -maxdepth 1 -name "*.log" -exec mv {} reports/logs/ \; 2>/dev/null || true
# Clean backup files older than 30 days
find backups -name "*.txt" -type f -mtime +30 -delete 2>/dev/null || true
# Compress old backups (older than 7 days) # Compress old backups (older than 7 days)
find backups -name "*.txt" -type f -mtime +7 -exec gzip {} \; 2>/dev/null || true find backups -name "*.txt" -type f -mtime +7 -exec gzip {} \; 2>/dev/null || true
@ -141,17 +59,30 @@ jobs:
if [ ! -f scripts/__init__.py ]; then if [ ! -f scripts/__init__.py ]; then
echo '# Scripts package' > scripts/__init__.py echo '# Scripts package' > scripts/__init__.py
fi fi
# Ensure proper directory structure
echo "Directory structure verified"
- name: Debug File Structure - name: Debug File Structure
run: | run: |
echo "=== File Structure ===" echo "=== File Structure Debug ==="
echo "Root files:"
ls -la | head -15
echo ""
echo "Scripts directory:"
ls -la scripts/ 2>/dev/null || echo "Scripts directory not found"
echo ""
echo "Import file status:" echo "Import file status:"
if [ -f bulk_import.m3u ]; then if [ -f bulk_import.m3u ]; then
LINES=$(wc -l < bulk_import.m3u) LINES=$(wc -l < bulk_import.m3u)
SIZE=$(du -h bulk_import.m3u | cut -f1) SIZE=$(du -h bulk_import.m3u | cut -f1)
echo "✅ bulk_import.m3u: $LINES lines, $SIZE" echo "✅ bulk_import.m3u found: $LINES lines ($SIZE)"
if [ "$LINES" -gt 2 ]; then if [ "$LINES" -gt 2 ]; then
echo "📥 Contains channels to process" echo "📥 Contains channels to process"
echo "First few lines:"
head -3 bulk_import.m3u
else
echo "📭 Empty (ready for import)"
fi fi
else else
echo "❌ bulk_import.m3u not found" echo "❌ bulk_import.m3u not found"
@ -160,39 +91,72 @@ jobs:
- name: Run Playlist Generation - name: Run Playlist Generation
run: | run: |
echo "=== Playlist Generation ===" echo "=== Playlist Generation ==="
# Check if generate_playlist.py exists
if [ ! -f scripts/generate_playlist.py ]; then
echo "❌ Error: scripts/generate_playlist.py not found"
exit 1
fi
# Run the playlist generation
echo "🚀 Starting playlist generation..."
python scripts/generate_playlist.py python scripts/generate_playlist.py
echo "✅ Playlist generation completed"
- name: Post-Generation Analysis - name: Post-Generation Analysis
run: | run: |
echo "=== Results Analysis ===" echo "=== Post-Generation Analysis ==="
if [ -f playlist.m3u ]; then if [ -f playlist.m3u ]; then
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0") CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
FILE_SIZE=$(du -h playlist.m3u | cut -f1) FILE_SIZE=$(du -h playlist.m3u | cut -f1)
echo "✅ Generated playlist.m3u:" echo "✅ Generated playlist.m3u:"
echo " - Channels: $CHANNEL_COUNT" echo " 📺 Channels: $CHANNEL_COUNT"
echo " - Size: $FILE_SIZE" echo " 📁 Size: $FILE_SIZE"
# Show top 3 countries # Show top countries if available
echo " - Top countries:" echo " 🌍 Top countries:"
grep 'group-title=' playlist.m3u | sed 's/.*group-title="//; s/".*//' | sort | uniq -c | sort -nr | head -3 || true if grep -q 'group-title=' playlist.m3u; then
grep 'group-title=' playlist.m3u | \
sed 's/.*group-title="//; s/".*//' | \
sort | uniq -c | sort -nr | head -5 | \
while read count country; do
echo " $country: $count channels"
done
else
echo " No country grouping found"
fi
else else
echo "❌ playlist.m3u not generated" echo "❌ playlist.m3u not generated"
echo "Checking for errors..."
if [ -f reports/logs/playlist_update.log ]; then
echo "Last few log entries:"
tail -10 reports/logs/playlist_update.log
fi
fi fi
if [ -f channels.txt ]; then if [ -f channels.txt ]; then
CHANNELS_SIZE=$(du -h channels.txt | cut -f1) CHANNELS_SIZE=$(du -h channels.txt | cut -f1)
echo "📁 channels.txt: $CHANNELS_SIZE" CHANNELS_LINES=$(wc -l < channels.txt)
echo "📋 channels.txt: $CHANNELS_SIZE ($CHANNELS_LINES lines)"
else
echo "📋 channels.txt: Not found"
fi fi
- name: Final Cleanup & Organization - name: Final Cleanup & Organization
run: | run: |
echo "=== Final Organization ===" echo "=== Final Organization ==="
# Run Python cleanup if script exists # Remove any remaining cache/temp files
if [ -f scripts/quick_cleanup.py ]; then find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
python scripts/quick_cleanup.py find . -name "*.pyc" -delete 2>/dev/null || true
fi find . -name "*.swp" -delete 2>/dev/null || true
find . -name ".DS_Store" -delete 2>/dev/null || true
find . -name "*~" -delete 2>/dev/null || true
# Move any stray log files to reports/logs
find . -maxdepth 1 -name "*.log" -exec mv {} reports/logs/ \; 2>/dev/null || true
# Ensure bulk_import.m3u is clean for next use # Ensure bulk_import.m3u is clean for next use
if [ -f bulk_import.m3u ]; then if [ -f bulk_import.m3u ]; then
@ -201,86 +165,115 @@ jobs:
echo "🧹 Cleaning bulk_import.m3u for next import..." echo "🧹 Cleaning bulk_import.m3u for next import..."
echo '#EXTM3U' > bulk_import.m3u echo '#EXTM3U' > bulk_import.m3u
echo '' >> bulk_import.m3u echo '' >> bulk_import.m3u
echo "✅ bulk_import.m3u cleared and ready"
else
echo "✅ bulk_import.m3u already clean"
fi fi
fi fi
# Remove any remaining clutter
find . -name "*.swp" -delete 2>/dev/null || true
find . -name ".DS_Store" -delete 2>/dev/null || true
echo "✅ Final cleanup completed" echo "✅ Final cleanup completed"
- name: Repository Health Check
run: |
echo "=== Repository Health Check ==="
# Calculate repository stats
TOTAL_FILES=$(find . -type f | grep -v '.git' | wc -l)
REPO_SIZE=$(du -sh . 2>/dev/null | cut -f1 || echo "unknown")
echo "📊 Repository Statistics:"
echo " 📁 Total files: $TOTAL_FILES"
echo " 💾 Repository size: $REPO_SIZE"
# Check for common issues
CACHE_DIRS=$(find . -type d -name "__pycache__" | wc -l)
TEMP_FILES=$(find . -name "*.tmp" -o -name "*_temp*" | wc -l)
LOG_FILES_ROOT=$(find . -maxdepth 1 -name "*.log" | wc -l)
echo "🔍 Cleanliness Check:"
echo " 🐍 Python cache dirs: $CACHE_DIRS"
echo " 🗑️ Temporary files: $TEMP_FILES"
echo " 📋 Root log files: $LOG_FILES_ROOT"
if [ "$CACHE_DIRS" -eq 0 ] && [ "$TEMP_FILES" -eq 0 ] && [ "$LOG_FILES_ROOT" -eq 0 ]; then
echo "✅ Repository is clean!"
else
echo "⚠️ Some cleanup items remain"
fi
- name: Commit Changes - name: Commit Changes
run: | run: |
echo "=== Committing Changes ===" echo "=== Committing Changes ==="
# Add specific files/directories (clean approach) # Add specific files/directories only (clean approach)
git add .gitignore || true
git add bulk_import.m3u || true git add bulk_import.m3u || true
git add channels.txt || true git add channels.txt || true
git add playlist.m3u || true git add playlist.m3u || true
git add scripts/ || true git add scripts/ || true
git add config/ || true git add config/ || true
git add reports/ || true git add reports/ || true
git add backups/*.gz || true # Only compressed backups git add backups/*.gz || true
git add templates/ || true git add templates/ || true
git add .forgejo/ || true git add .forgejo/ || true
git add README.md || true git add README.md || true
git add .gitignore || true
# Remove files that shouldn't be tracked # Remove files that shouldn't be tracked
git rm --cached *.log 2>/dev/null || true git rm --cached *.log 2>/dev/null || true
git rm --cached **/__pycache__/** 2>/dev/null || true git rm --cached **/__pycache__/** 2>/dev/null || true
git rm --cached **/*.pyc 2>/dev/null || true
# Check what we're committing # Check what we're about to commit
echo "Files staged for commit:" echo "📝 Files staged for commit:"
git diff --staged --name-only | head -10 || echo "No changes" git diff --staged --name-only | head -10 || echo "No staged changes"
if ! git diff --staged --quiet; then if ! git diff --staged --quiet; then
# Calculate stats for commit message # Calculate stats for commit message
CHANNEL_COUNT="0" CHANNEL_COUNT="0"
REPO_SIZE="unknown"
if [ -f playlist.m3u ]; then if [ -f playlist.m3u ]; then
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0") CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
fi fi
REPO_SIZE=$(du -sh . 2>/dev/null | cut -f1 || echo "unknown") REPO_SIZE=$(du -sh . 2>/dev/null | cut -f1 || echo "unknown")
# Create clean commit message # Create informative commit message
COMMIT_MSG="📺 Updated playlist: $CHANNEL_COUNT channels ($(date '+%Y-%m-%d %H:%M')) COMMIT_MSG="📺 Updated playlist: $CHANNEL_COUNT channels ($(date '+%Y-%m-%d %H:%M'))
🧹 Auto-cleaned repository: 🧹 Repository Status:
- Size: $REPO_SIZE - Channels: $CHANNEL_COUNT
- Channels: $CHANNEL_COUNT - Size: $REPO_SIZE
- Organized structure - Auto-cleaned and organized
- Ready for next import" - $(date '+%Y-%m-%d %H:%M:%S UTC')"
git commit -m "$COMMIT_MSG" git commit -m "$COMMIT_MSG"
git push git push
echo "✅ Repository updated and cleaned" echo "✅ Repository updated successfully"
echo "📺 Channels: $CHANNEL_COUNT" echo "📺 Channels: $CHANNEL_COUNT"
echo "📁 Size: $REPO_SIZE" echo "📁 Size: $REPO_SIZE"
else else
echo " No changes to commit" echo " No changes to commit"
fi fi
- name: Success Summary - name: Workflow Summary
run: | run: |
echo "=== Workflow Complete ===" echo "=== Workflow Summary ==="
echo "🎉 IPTV Playlist Generator workflow finished successfully!" echo "🎉 IPTV Playlist workflow completed!"
echo "" echo ""
if [ -f playlist.m3u ]; then if [ -f playlist.m3u ]; then
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0") CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
echo "📊 Results:" echo "✅ Success Summary:"
echo " - Channels processed: $CHANNEL_COUNT" echo " 📺 Playlist generated with $CHANNEL_COUNT channels"
echo " - Repository: Clean and organized" echo " 🧹 Repository cleaned and organized"
echo " - Ready for: Next bulk import" echo " 📁 Files properly structured"
echo " 🚀 Ready for next import"
else
echo "⚠️ Playlist not generated - check logs"
fi fi
echo "" echo ""
echo "🚀 Next steps:" echo "📋 Next Steps:"
echo " 1. Add channels to bulk_import.m3u" echo " 1. Add channels to bulk_import.m3u"
echo " 2. Push to trigger this workflow" echo " 2. Push changes to trigger workflow"
echo " 3. Your playlist will be automatically updated!" echo " 3. Playlist will be automatically updated"