Update scripts/playlist_builder.py
All checks were successful
Generate M3U Playlist with Auto-Organization / build-and-organize (push) Successful in 1m22s

This commit is contained in:
stoney420 2025-06-28 07:19:42 +02:00
parent 41ae5b6884
commit 71e2eb1d39

View file

@ -1,83 +1,88 @@
#!/usr/bin/env python3
""" """
IPTV Repository Monthly Maintenance Playlist Builder - Generates the final M3U playlist
Run automatically by workflow
""" """
import os import logging
import shutil from typing import Dict, List, Tuple
import gzip
from pathlib import Path
from datetime import datetime, timedelta
def monthly_maintenance(): class PlaylistBuilder:
"""Run monthly maintenance tasks.""" """Generate M3U playlist files."""
print("🧹 IPTV Repository Monthly Maintenance")
print("=" * 40)
root_path = Path.cwd() def __init__(self, config):
actions = [] self.config = config
self.logger = logging.getLogger(__name__)
# 1. Compress old backups def generate_m3u(self, channels: List[Dict]) -> Tuple[int, Dict]:
print("1. Compressing old backups...") """Generate the M3U playlist file and return stats."""
backups_dir = root_path / 'backups' m3u_lines = ["#EXTM3U"]
cutoff_date = datetime.now() - timedelta(days=7) valid_channels = 0
country_stats = {}
for channel in channels:
stream_name = channel.get('Stream name', '')
group_name = channel.get('Group', 'Uncategorized')
logo_url = channel.get('Logo', '')
epg_id = channel.get('EPG id', '')
stream_url = channel.get('Stream URL', '')
if not stream_name or not stream_url:
continue
# Build EXTINF line with all attributes
extinf_attrs = [
f'tvg-id="{epg_id}"',
f'tvg-logo="{logo_url}"',
f'group-title="{group_name}"',
f'tvg-name="{stream_name}"'
]
extinf_line = f"#EXTINF:-1 {' '.join(extinf_attrs)},{stream_name}"
m3u_lines.append(extinf_line)
m3u_lines.append(stream_url)
valid_channels += 1
# Update country statistics
country_stats[group_name] = country_stats.get(group_name, 0) + 1
try:
with open(self.config.playlist_file, 'w', encoding='utf-8') as f:
for line in m3u_lines:
f.write(line + '\n')
self.logger.info(f"Generated {self.config.playlist_file} with {valid_channels} channels")
# Log top countries
sorted_stats = dict(sorted(country_stats.items(), key=lambda x: x[1], reverse=True))
top_countries = dict(list(sorted_stats.items())[:5])
self.logger.info(f"Top countries: {top_countries}")
return valid_channels, country_stats
except Exception as e:
self.logger.error(f"Error writing playlist: {e}")
return 0, {}
if backups_dir.exists(): def validate_m3u_structure(self) -> bool:
for backup_file in backups_dir.glob('*.txt'): """Validate the generated M3U file structure."""
try: try:
file_date = datetime.fromtimestamp(backup_file.stat().st_mtime) with open(self.config.playlist_file, 'r', encoding='utf-8') as f:
if file_date < cutoff_date: content = f.read()
compressed_path = backup_file.with_suffix('.txt.gz')
if not compressed_path.exists(): lines = content.strip().split('\n')
with open(backup_file, 'rb') as f_in:
with gzip.open(compressed_path, 'wb') as f_out: if not lines or lines[0] != '#EXTM3U':
shutil.copyfileobj(f_in, f_out) self.logger.error("M3U file missing #EXTM3U header")
backup_file.unlink() return False
actions.append(f"Compressed: {backup_file.name}")
except Exception as e: extinf_count = sum(1 for line in lines if line.startswith('#EXTINF:'))
print(f" Warning: {e}") url_count = sum(1 for line in lines if line.startswith(('http://', 'https://', 'rtmp://')))
# 2. Archive old reports if extinf_count != url_count:
print("2. Archiving old reports...") self.logger.warning(f"M3U structure mismatch: {extinf_count} EXTINF lines vs {url_count} URLs")
reports_dir = root_path / 'reports' / 'daily'
archive_dir = root_path / 'reports' / 'archive' self.logger.info(f"M3U validation complete: {extinf_count} channels validated")
return True
cutoff_date = datetime.now() - timedelta(days=30)
except Exception as e:
if reports_dir.exists(): self.logger.error(f"Error validating M3U: {e}")
for report_file in reports_dir.glob('*.md'): return False
try:
file_date = datetime.fromtimestamp(report_file.stat().st_mtime)
if file_date < cutoff_date:
month_folder = archive_dir / file_date.strftime('%Y-%m')
month_folder.mkdir(parents=True, exist_ok=True)
new_path = month_folder / report_file.name
shutil.move(str(report_file), str(new_path))
actions.append(f"Archived: {report_file.name}")
except Exception as e:
print(f" Warning: {e}")
# 3. Clean temporary files
print("3. Cleaning temporary files...")
patterns = ['*_temp*', '*.tmp', '*~', '*.swp']
for pattern in patterns:
for temp_file in root_path.rglob(pattern):
if temp_file.is_file() and '.git' not in str(temp_file):
try:
temp_file.unlink()
actions.append(f"Removed: {temp_file.relative_to(root_path)}")
except Exception as e:
print(f" Warning: {e}")
print(f"\n✅ Monthly maintenance complete! {len(actions)} actions taken")
if actions:
for action in actions[:5]:
print(f"{action}")
if len(actions) > 5:
print(f" ... and {len(actions) - 5} more")
if __name__ == "__main__":
monthly_maintenance()