Update scripts/playlist_builder.py
All checks were successful
Generate M3U Playlist with Auto-Organization / build-and-organize (push) Successful in 1m22s
All checks were successful
Generate M3U Playlist with Auto-Organization / build-and-organize (push) Successful in 1m22s
This commit is contained in:
parent
41ae5b6884
commit
71e2eb1d39
1 changed files with 81 additions and 76 deletions
|
@ -1,83 +1,88 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
"""
|
||||||
IPTV Repository Monthly Maintenance
|
Playlist Builder - Generates the final M3U playlist
|
||||||
Run automatically by workflow
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import logging
|
||||||
import shutil
|
from typing import Dict, List, Tuple
|
||||||
import gzip
|
|
||||||
from pathlib import Path
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
def monthly_maintenance():
|
class PlaylistBuilder:
|
||||||
"""Run monthly maintenance tasks."""
|
"""Generate M3U playlist files."""
|
||||||
print("🧹 IPTV Repository Monthly Maintenance")
|
|
||||||
print("=" * 40)
|
|
||||||
|
|
||||||
root_path = Path.cwd()
|
def __init__(self, config):
|
||||||
actions = []
|
self.config = config
|
||||||
|
self.logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# 1. Compress old backups
|
def generate_m3u(self, channels: List[Dict]) -> Tuple[int, Dict]:
|
||||||
print("1. Compressing old backups...")
|
"""Generate the M3U playlist file and return stats."""
|
||||||
backups_dir = root_path / 'backups'
|
m3u_lines = ["#EXTM3U"]
|
||||||
cutoff_date = datetime.now() - timedelta(days=7)
|
valid_channels = 0
|
||||||
|
country_stats = {}
|
||||||
|
|
||||||
if backups_dir.exists():
|
for channel in channels:
|
||||||
for backup_file in backups_dir.glob('*.txt'):
|
stream_name = channel.get('Stream name', '')
|
||||||
try:
|
group_name = channel.get('Group', 'Uncategorized')
|
||||||
file_date = datetime.fromtimestamp(backup_file.stat().st_mtime)
|
logo_url = channel.get('Logo', '')
|
||||||
if file_date < cutoff_date:
|
epg_id = channel.get('EPG id', '')
|
||||||
compressed_path = backup_file.with_suffix('.txt.gz')
|
stream_url = channel.get('Stream URL', '')
|
||||||
if not compressed_path.exists():
|
|
||||||
with open(backup_file, 'rb') as f_in:
|
|
||||||
with gzip.open(compressed_path, 'wb') as f_out:
|
|
||||||
shutil.copyfileobj(f_in, f_out)
|
|
||||||
backup_file.unlink()
|
|
||||||
actions.append(f"Compressed: {backup_file.name}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" Warning: {e}")
|
|
||||||
|
|
||||||
# 2. Archive old reports
|
if not stream_name or not stream_url:
|
||||||
print("2. Archiving old reports...")
|
continue
|
||||||
reports_dir = root_path / 'reports' / 'daily'
|
|
||||||
archive_dir = root_path / 'reports' / 'archive'
|
|
||||||
|
|
||||||
cutoff_date = datetime.now() - timedelta(days=30)
|
# Build EXTINF line with all attributes
|
||||||
|
extinf_attrs = [
|
||||||
|
f'tvg-id="{epg_id}"',
|
||||||
|
f'tvg-logo="{logo_url}"',
|
||||||
|
f'group-title="{group_name}"',
|
||||||
|
f'tvg-name="{stream_name}"'
|
||||||
|
]
|
||||||
|
|
||||||
if reports_dir.exists():
|
extinf_line = f"#EXTINF:-1 {' '.join(extinf_attrs)},{stream_name}"
|
||||||
for report_file in reports_dir.glob('*.md'):
|
m3u_lines.append(extinf_line)
|
||||||
try:
|
m3u_lines.append(stream_url)
|
||||||
file_date = datetime.fromtimestamp(report_file.stat().st_mtime)
|
valid_channels += 1
|
||||||
if file_date < cutoff_date:
|
|
||||||
month_folder = archive_dir / file_date.strftime('%Y-%m')
|
|
||||||
month_folder.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
new_path = month_folder / report_file.name
|
# Update country statistics
|
||||||
shutil.move(str(report_file), str(new_path))
|
country_stats[group_name] = country_stats.get(group_name, 0) + 1
|
||||||
actions.append(f"Archived: {report_file.name}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" Warning: {e}")
|
|
||||||
|
|
||||||
# 3. Clean temporary files
|
try:
|
||||||
print("3. Cleaning temporary files...")
|
with open(self.config.playlist_file, 'w', encoding='utf-8') as f:
|
||||||
patterns = ['*_temp*', '*.tmp', '*~', '*.swp']
|
for line in m3u_lines:
|
||||||
|
f.write(line + '\n')
|
||||||
|
|
||||||
for pattern in patterns:
|
self.logger.info(f"Generated {self.config.playlist_file} with {valid_channels} channels")
|
||||||
for temp_file in root_path.rglob(pattern):
|
|
||||||
if temp_file.is_file() and '.git' not in str(temp_file):
|
|
||||||
try:
|
|
||||||
temp_file.unlink()
|
|
||||||
actions.append(f"Removed: {temp_file.relative_to(root_path)}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" Warning: {e}")
|
|
||||||
|
|
||||||
print(f"\n✅ Monthly maintenance complete! {len(actions)} actions taken")
|
# Log top countries
|
||||||
if actions:
|
sorted_stats = dict(sorted(country_stats.items(), key=lambda x: x[1], reverse=True))
|
||||||
for action in actions[:5]:
|
top_countries = dict(list(sorted_stats.items())[:5])
|
||||||
print(f" ✅ {action}")
|
self.logger.info(f"Top countries: {top_countries}")
|
||||||
if len(actions) > 5:
|
|
||||||
print(f" ... and {len(actions) - 5} more")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
return valid_channels, country_stats
|
||||||
monthly_maintenance()
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error writing playlist: {e}")
|
||||||
|
return 0, {}
|
||||||
|
|
||||||
|
def validate_m3u_structure(self) -> bool:
|
||||||
|
"""Validate the generated M3U file structure."""
|
||||||
|
try:
|
||||||
|
with open(self.config.playlist_file, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
lines = content.strip().split('\n')
|
||||||
|
|
||||||
|
if not lines or lines[0] != '#EXTM3U':
|
||||||
|
self.logger.error("M3U file missing #EXTM3U header")
|
||||||
|
return False
|
||||||
|
|
||||||
|
extinf_count = sum(1 for line in lines if line.startswith('#EXTINF:'))
|
||||||
|
url_count = sum(1 for line in lines if line.startswith(('http://', 'https://', 'rtmp://')))
|
||||||
|
|
||||||
|
if extinf_count != url_count:
|
||||||
|
self.logger.warning(f"M3U structure mismatch: {extinf_count} EXTINF lines vs {url_count} URLs")
|
||||||
|
|
||||||
|
self.logger.info(f"M3U validation complete: {extinf_count} channels validated")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error validating M3U: {e}")
|
||||||
|
return False
|
Loading…
Add table
Add a link
Reference in a new issue