Update .forgejo/workflows/generate-m3u.yml
This commit is contained in:
parent
3451f46539
commit
c582c80cc1
1 changed files with 124 additions and 131 deletions
|
@ -1,4 +1,4 @@
|
|||
name: Generate M3U Playlist with Auto-Setup & Cleanup
|
||||
name: Generate M3U Playlist with Auto-Cleanup
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -23,95 +23,9 @@ jobs:
|
|||
git config --local user.email "actions@forgejo.plainrock127.xyz"
|
||||
git config --local user.name "IPTV Playlist Bot"
|
||||
|
||||
- name: Auto-Setup Repository (First Run)
|
||||
- name: Pre-Cleanup Repository
|
||||
run: |
|
||||
echo "=== Auto-Setup Check ==="
|
||||
|
||||
# Check if enhanced .gitignore exists
|
||||
if [ ! -f .gitignore ] || ! grep -q "IPTV Playlist Generator" .gitignore; then
|
||||
echo "🔧 Setting up enhanced .gitignore..."
|
||||
cat > .gitignore << 'EOF'
|
||||
# IPTV Playlist Generator - Enhanced .gitignore
|
||||
|
||||
# ===== PYTHON =====
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
|
||||
# ===== LOGS & TEMPORARY FILES =====
|
||||
*.log
|
||||
*.tmp
|
||||
*_temp*
|
||||
*.backup.*
|
||||
temp_*
|
||||
|
||||
# ===== IDE & EDITOR FILES =====
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# ===== IPTV SPECIFIC =====
|
||||
bulk_import_temp.m3u
|
||||
import_temp_*.m3u
|
||||
*_processing.m3u
|
||||
playlist_temp.m3u
|
||||
temp_playlist_*.m3u
|
||||
EOF
|
||||
echo "✅ Enhanced .gitignore created"
|
||||
fi
|
||||
|
||||
# Create quick cleanup script if missing
|
||||
if [ ! -f scripts/quick_cleanup.py ]; then
|
||||
echo "🔧 Creating cleanup script..."
|
||||
mkdir -p scripts
|
||||
cat > scripts/quick_cleanup.py << 'EOF'
|
||||
#!/usr/bin/env python3
|
||||
"""Quick cleanup for IPTV repository"""
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
def cleanup():
|
||||
root = Path.cwd()
|
||||
cleaned = 0
|
||||
|
||||
# Remove Python cache
|
||||
for cache in root.rglob('__pycache__'):
|
||||
if cache.is_dir():
|
||||
shutil.rmtree(cache, ignore_errors=True)
|
||||
cleaned += 1
|
||||
|
||||
# Remove temp files
|
||||
for pattern in ['*.pyc', '*.pyo', '*_temp*', '*.tmp', '*~']:
|
||||
for file in root.rglob(pattern):
|
||||
if file.is_file() and '.git' not in str(file):
|
||||
file.unlink(missing_ok=True)
|
||||
cleaned += 1
|
||||
|
||||
# Organize logs
|
||||
logs_dir = root / 'reports' / 'logs'
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
for log in root.glob('*.log'):
|
||||
shutil.move(str(log), str(logs_dir / log.name))
|
||||
cleaned += 1
|
||||
|
||||
print(f"🧹 Cleaned {cleaned} items")
|
||||
return cleaned
|
||||
|
||||
if __name__ == "__main__":
|
||||
cleanup()
|
||||
EOF
|
||||
echo "✅ Cleanup script created"
|
||||
fi
|
||||
|
||||
- name: Pre-Generation Cleanup
|
||||
run: |
|
||||
echo "=== Pre-Generation Cleanup ==="
|
||||
echo "=== Pre-Cleanup Phase ==="
|
||||
|
||||
# Remove Python cache thoroughly
|
||||
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
|
||||
|
@ -122,11 +36,15 @@ jobs:
|
|||
find . -name "*_temp*" -type f -delete 2>/dev/null || true
|
||||
find . -name "*.tmp" -delete 2>/dev/null || true
|
||||
find . -name "*~" -delete 2>/dev/null || true
|
||||
find . -name "*.swp" -delete 2>/dev/null || true
|
||||
|
||||
# Organize log files
|
||||
mkdir -p reports/logs reports/archive
|
||||
find . -maxdepth 1 -name "*.log" -exec mv {} reports/logs/ \; 2>/dev/null || true
|
||||
|
||||
# Clean backup files older than 30 days
|
||||
find backups -name "*.txt" -type f -mtime +30 -delete 2>/dev/null || true
|
||||
|
||||
# Compress old backups (older than 7 days)
|
||||
find backups -name "*.txt" -type f -mtime +7 -exec gzip {} \; 2>/dev/null || true
|
||||
|
||||
|
@ -142,16 +60,29 @@ jobs:
|
|||
echo '# Scripts package' > scripts/__init__.py
|
||||
fi
|
||||
|
||||
# Ensure proper directory structure
|
||||
echo "Directory structure verified"
|
||||
|
||||
- name: Debug File Structure
|
||||
run: |
|
||||
echo "=== File Structure ==="
|
||||
echo "=== File Structure Debug ==="
|
||||
echo "Root files:"
|
||||
ls -la | head -15
|
||||
echo ""
|
||||
echo "Scripts directory:"
|
||||
ls -la scripts/ 2>/dev/null || echo "Scripts directory not found"
|
||||
echo ""
|
||||
echo "Import file status:"
|
||||
if [ -f bulk_import.m3u ]; then
|
||||
LINES=$(wc -l < bulk_import.m3u)
|
||||
SIZE=$(du -h bulk_import.m3u | cut -f1)
|
||||
echo "✅ bulk_import.m3u: $LINES lines, $SIZE"
|
||||
echo "✅ bulk_import.m3u found: $LINES lines ($SIZE)"
|
||||
if [ "$LINES" -gt 2 ]; then
|
||||
echo "📥 Contains channels to process"
|
||||
echo "First few lines:"
|
||||
head -3 bulk_import.m3u
|
||||
else
|
||||
echo "📭 Empty (ready for import)"
|
||||
fi
|
||||
else
|
||||
echo "❌ bulk_import.m3u not found"
|
||||
|
@ -160,39 +91,72 @@ jobs:
|
|||
- name: Run Playlist Generation
|
||||
run: |
|
||||
echo "=== Playlist Generation ==="
|
||||
|
||||
# Check if generate_playlist.py exists
|
||||
if [ ! -f scripts/generate_playlist.py ]; then
|
||||
echo "❌ Error: scripts/generate_playlist.py not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run the playlist generation
|
||||
echo "🚀 Starting playlist generation..."
|
||||
python scripts/generate_playlist.py
|
||||
|
||||
echo "✅ Playlist generation completed"
|
||||
|
||||
- name: Post-Generation Analysis
|
||||
run: |
|
||||
echo "=== Results Analysis ==="
|
||||
echo "=== Post-Generation Analysis ==="
|
||||
|
||||
if [ -f playlist.m3u ]; then
|
||||
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
|
||||
FILE_SIZE=$(du -h playlist.m3u | cut -f1)
|
||||
echo "✅ Generated playlist.m3u:"
|
||||
echo " - Channels: $CHANNEL_COUNT"
|
||||
echo " - Size: $FILE_SIZE"
|
||||
echo " 📺 Channels: $CHANNEL_COUNT"
|
||||
echo " 📁 Size: $FILE_SIZE"
|
||||
|
||||
# Show top 3 countries
|
||||
echo " - Top countries:"
|
||||
grep 'group-title=' playlist.m3u | sed 's/.*group-title="//; s/".*//' | sort | uniq -c | sort -nr | head -3 || true
|
||||
# Show top countries if available
|
||||
echo " 🌍 Top countries:"
|
||||
if grep -q 'group-title=' playlist.m3u; then
|
||||
grep 'group-title=' playlist.m3u | \
|
||||
sed 's/.*group-title="//; s/".*//' | \
|
||||
sort | uniq -c | sort -nr | head -5 | \
|
||||
while read count country; do
|
||||
echo " $country: $count channels"
|
||||
done
|
||||
else
|
||||
echo " No country grouping found"
|
||||
fi
|
||||
else
|
||||
echo "❌ playlist.m3u not generated"
|
||||
echo "Checking for errors..."
|
||||
if [ -f reports/logs/playlist_update.log ]; then
|
||||
echo "Last few log entries:"
|
||||
tail -10 reports/logs/playlist_update.log
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f channels.txt ]; then
|
||||
CHANNELS_SIZE=$(du -h channels.txt | cut -f1)
|
||||
echo "📁 channels.txt: $CHANNELS_SIZE"
|
||||
CHANNELS_LINES=$(wc -l < channels.txt)
|
||||
echo "📋 channels.txt: $CHANNELS_SIZE ($CHANNELS_LINES lines)"
|
||||
else
|
||||
echo "📋 channels.txt: Not found"
|
||||
fi
|
||||
|
||||
- name: Final Cleanup & Organization
|
||||
run: |
|
||||
echo "=== Final Organization ==="
|
||||
|
||||
# Run Python cleanup if script exists
|
||||
if [ -f scripts/quick_cleanup.py ]; then
|
||||
python scripts/quick_cleanup.py
|
||||
fi
|
||||
# Remove any remaining cache/temp files
|
||||
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -name "*.pyc" -delete 2>/dev/null || true
|
||||
find . -name "*.swp" -delete 2>/dev/null || true
|
||||
find . -name ".DS_Store" -delete 2>/dev/null || true
|
||||
find . -name "*~" -delete 2>/dev/null || true
|
||||
|
||||
# Move any stray log files to reports/logs
|
||||
find . -maxdepth 1 -name "*.log" -exec mv {} reports/logs/ \; 2>/dev/null || true
|
||||
|
||||
# Ensure bulk_import.m3u is clean for next use
|
||||
if [ -f bulk_import.m3u ]; then
|
||||
|
@ -201,86 +165,115 @@ jobs:
|
|||
echo "🧹 Cleaning bulk_import.m3u for next import..."
|
||||
echo '#EXTM3U' > bulk_import.m3u
|
||||
echo '' >> bulk_import.m3u
|
||||
echo "✅ bulk_import.m3u cleared and ready"
|
||||
else
|
||||
echo "✅ bulk_import.m3u already clean"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Remove any remaining clutter
|
||||
find . -name "*.swp" -delete 2>/dev/null || true
|
||||
find . -name ".DS_Store" -delete 2>/dev/null || true
|
||||
|
||||
echo "✅ Final cleanup completed"
|
||||
|
||||
- name: Repository Health Check
|
||||
run: |
|
||||
echo "=== Repository Health Check ==="
|
||||
|
||||
# Calculate repository stats
|
||||
TOTAL_FILES=$(find . -type f | grep -v '.git' | wc -l)
|
||||
REPO_SIZE=$(du -sh . 2>/dev/null | cut -f1 || echo "unknown")
|
||||
|
||||
echo "📊 Repository Statistics:"
|
||||
echo " 📁 Total files: $TOTAL_FILES"
|
||||
echo " 💾 Repository size: $REPO_SIZE"
|
||||
|
||||
# Check for common issues
|
||||
CACHE_DIRS=$(find . -type d -name "__pycache__" | wc -l)
|
||||
TEMP_FILES=$(find . -name "*.tmp" -o -name "*_temp*" | wc -l)
|
||||
LOG_FILES_ROOT=$(find . -maxdepth 1 -name "*.log" | wc -l)
|
||||
|
||||
echo "🔍 Cleanliness Check:"
|
||||
echo " 🐍 Python cache dirs: $CACHE_DIRS"
|
||||
echo " 🗑️ Temporary files: $TEMP_FILES"
|
||||
echo " 📋 Root log files: $LOG_FILES_ROOT"
|
||||
|
||||
if [ "$CACHE_DIRS" -eq 0 ] && [ "$TEMP_FILES" -eq 0 ] && [ "$LOG_FILES_ROOT" -eq 0 ]; then
|
||||
echo "✅ Repository is clean!"
|
||||
else
|
||||
echo "⚠️ Some cleanup items remain"
|
||||
fi
|
||||
|
||||
- name: Commit Changes
|
||||
run: |
|
||||
echo "=== Committing Changes ==="
|
||||
|
||||
# Add specific files/directories (clean approach)
|
||||
git add .gitignore || true
|
||||
# Add specific files/directories only (clean approach)
|
||||
git add bulk_import.m3u || true
|
||||
git add channels.txt || true
|
||||
git add playlist.m3u || true
|
||||
git add scripts/ || true
|
||||
git add config/ || true
|
||||
git add reports/ || true
|
||||
git add backups/*.gz || true # Only compressed backups
|
||||
git add backups/*.gz || true
|
||||
git add templates/ || true
|
||||
git add .forgejo/ || true
|
||||
git add README.md || true
|
||||
git add .gitignore || true
|
||||
|
||||
# Remove files that shouldn't be tracked
|
||||
git rm --cached *.log 2>/dev/null || true
|
||||
git rm --cached **/__pycache__/** 2>/dev/null || true
|
||||
git rm --cached **/*.pyc 2>/dev/null || true
|
||||
|
||||
# Check what we're committing
|
||||
echo "Files staged for commit:"
|
||||
git diff --staged --name-only | head -10 || echo "No changes"
|
||||
# Check what we're about to commit
|
||||
echo "📝 Files staged for commit:"
|
||||
git diff --staged --name-only | head -10 || echo "No staged changes"
|
||||
|
||||
if ! git diff --staged --quiet; then
|
||||
# Calculate stats for commit message
|
||||
CHANNEL_COUNT="0"
|
||||
REPO_SIZE="unknown"
|
||||
|
||||
if [ -f playlist.m3u ]; then
|
||||
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
|
||||
fi
|
||||
|
||||
REPO_SIZE=$(du -sh . 2>/dev/null | cut -f1 || echo "unknown")
|
||||
|
||||
# Create clean commit message
|
||||
# Create informative commit message
|
||||
COMMIT_MSG="📺 Updated playlist: $CHANNEL_COUNT channels ($(date '+%Y-%m-%d %H:%M'))
|
||||
|
||||
🧹 Auto-cleaned repository:
|
||||
- Size: $REPO_SIZE
|
||||
- Channels: $CHANNEL_COUNT
|
||||
- Organized structure
|
||||
- Ready for next import"
|
||||
🧹 Repository Status:
|
||||
- Channels: $CHANNEL_COUNT
|
||||
- Size: $REPO_SIZE
|
||||
- Auto-cleaned and organized
|
||||
- $(date '+%Y-%m-%d %H:%M:%S UTC')"
|
||||
|
||||
git commit -m "$COMMIT_MSG"
|
||||
git push
|
||||
|
||||
echo "✅ Repository updated and cleaned"
|
||||
echo "✅ Repository updated successfully"
|
||||
echo "📺 Channels: $CHANNEL_COUNT"
|
||||
echo "📁 Size: $REPO_SIZE"
|
||||
else
|
||||
echo "ℹ️ No changes to commit"
|
||||
fi
|
||||
|
||||
- name: Success Summary
|
||||
- name: Workflow Summary
|
||||
run: |
|
||||
echo "=== Workflow Complete ==="
|
||||
echo "🎉 IPTV Playlist Generator workflow finished successfully!"
|
||||
echo "=== Workflow Summary ==="
|
||||
echo "🎉 IPTV Playlist workflow completed!"
|
||||
echo ""
|
||||
|
||||
if [ -f playlist.m3u ]; then
|
||||
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
|
||||
echo "📊 Results:"
|
||||
echo " - Channels processed: $CHANNEL_COUNT"
|
||||
echo " - Repository: Clean and organized"
|
||||
echo " - Ready for: Next bulk import"
|
||||
echo "✅ Success Summary:"
|
||||
echo " 📺 Playlist generated with $CHANNEL_COUNT channels"
|
||||
echo " 🧹 Repository cleaned and organized"
|
||||
echo " 📁 Files properly structured"
|
||||
echo " 🚀 Ready for next import"
|
||||
else
|
||||
echo "⚠️ Playlist not generated - check logs"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "🚀 Next steps:"
|
||||
echo "📋 Next Steps:"
|
||||
echo " 1. Add channels to bulk_import.m3u"
|
||||
echo " 2. Push to trigger this workflow"
|
||||
echo " 3. Your playlist will be automatically updated!"
|
||||
echo " 2. Push changes to trigger workflow"
|
||||
echo " 3. Playlist will be automatically updated"
|
Loading…
Add table
Add a link
Reference in a new issue