Updated playlist: 1 channels - 2025-06-28 01:10
This commit is contained in:
parent
4cc17938a8
commit
d3c84ae556
29 changed files with 23 additions and 1470 deletions
BIN
backups/archive/channels_20250627_222923.gz
Normal file
BIN
backups/archive/channels_20250627_222923.gz
Normal file
Binary file not shown.
BIN
backups/archive/channels_20250627_225834.gz
Normal file
BIN
backups/archive/channels_20250627_225834.gz
Normal file
Binary file not shown.
|
@ -1,5 +0,0 @@
|
|||
Group = Sports
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
BIN
backups/channels_20250627_224445.txt.gz
Normal file
BIN
backups/channels_20250627_224445.txt.gz
Normal file
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
Group = 🇬🇧 United Kingdom
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
||||
|
||||
Group = Sports
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
|
@ -1,11 +0,0 @@
|
|||
Group = 🇬🇧 United Kingdom
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
||||
|
||||
Group = 🇬🇧 United Kingdom
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
BIN
backups/channels_20250627_230205.txt.gz
Normal file
BIN
backups/channels_20250627_230205.txt.gz
Normal file
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
Group = 🇬🇧 United Kingdom
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
||||
|
||||
Group = 🇬🇧 United Kingdom
|
||||
Stream name = Sky Sports Mix FHD
|
||||
Logo = https://i.ibb.co/7kYj5gc/sky-mix.png
|
||||
EPG id = SkySp.Mix.HD.uk
|
||||
Stream URL = https://a1xs.vip/2000008
|
BIN
backups/channels_20250628_002450.txt.gz
Normal file
BIN
backups/channels_20250628_002450.txt.gz
Normal file
Binary file not shown.
|
@ -1,260 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Web Interface: Comprehensive IPTV Repository Cleanup
|
||||
This script will be run automatically by the workflow
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import gzip
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def cleanup_repository():
|
||||
"""Main cleanup function for web interface."""
|
||||
print("🎯 IPTV Repository Comprehensive Cleanup")
|
||||
print("=" * 50)
|
||||
|
||||
root_path = Path.cwd()
|
||||
cleaned = []
|
||||
|
||||
print(f"📁 Working in: {root_path}")
|
||||
print("🚀 Starting cleanup...")
|
||||
|
||||
try:
|
||||
# 1. Create proper directory structure
|
||||
print("📁 Creating directory structure...")
|
||||
directories = [
|
||||
'data/archive',
|
||||
'reports/logs', 'reports/daily', 'reports/archive',
|
||||
'backups/archive',
|
||||
'templates'
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
(root_path / directory).mkdir(parents=True, exist_ok=True)
|
||||
cleaned.append(f"Created: {directory}/")
|
||||
|
||||
# 2. Clean up backups folder
|
||||
print("💾 Cleaning backups folder...")
|
||||
backups_dir = root_path / 'backups'
|
||||
|
||||
if backups_dir.exists():
|
||||
backup_files = sorted(backups_dir.glob('channels_*.txt'),
|
||||
key=lambda x: x.stat().st_mtime, reverse=True)
|
||||
|
||||
if len(backup_files) > 3:
|
||||
# Keep 3 most recent, compress the rest
|
||||
for old_backup in backup_files[3:]:
|
||||
try:
|
||||
archive_dir = backups_dir / 'archive'
|
||||
archive_dir.mkdir(exist_ok=True)
|
||||
compressed_path = archive_dir / f"{old_backup.stem}.gz"
|
||||
with open(old_backup, 'rb') as f_in:
|
||||
with gzip.open(compressed_path, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
old_backup.unlink()
|
||||
cleaned.append(f"Compressed & archived: {old_backup.name}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# Compress remaining backups
|
||||
for backup_file in backups_dir.glob('channels_*.txt'):
|
||||
try:
|
||||
compressed_path = backup_file.with_suffix('.txt.gz')
|
||||
if not compressed_path.exists():
|
||||
with open(backup_file, 'rb') as f_in:
|
||||
with gzip.open(compressed_path, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
backup_file.unlink()
|
||||
cleaned.append(f"Compressed: {backup_file.name}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# 3. Organize reports
|
||||
print("📋 Organizing reports...")
|
||||
reports_dir = root_path / 'reports'
|
||||
|
||||
# Move scattered report files to daily/
|
||||
for report_file in reports_dir.glob('playlist_report_*.md'):
|
||||
try:
|
||||
daily_dir = reports_dir / 'daily'
|
||||
daily_dir.mkdir(exist_ok=True)
|
||||
new_path = daily_dir / report_file.name
|
||||
shutil.move(str(report_file), str(new_path))
|
||||
cleaned.append(f"Moved: {report_file.name} → reports/daily/")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# Archive old reports (older than 7 days)
|
||||
cutoff_date = datetime.now() - timedelta(days=7)
|
||||
daily_dir = reports_dir / 'daily'
|
||||
|
||||
if daily_dir.exists():
|
||||
for report_file in daily_dir.glob('*.md'):
|
||||
try:
|
||||
file_date = datetime.fromtimestamp(report_file.stat().st_mtime)
|
||||
if file_date < cutoff_date:
|
||||
month_folder = reports_dir / 'archive' / file_date.strftime('%Y-%m')
|
||||
month_folder.mkdir(parents=True, exist_ok=True)
|
||||
new_path = month_folder / report_file.name
|
||||
shutil.move(str(report_file), str(new_path))
|
||||
cleaned.append(f"Archived: {report_file.name}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# 4. Remove Python cache completely
|
||||
print("🐍 Removing Python cache...")
|
||||
for cache_dir in root_path.rglob('__pycache__'):
|
||||
if cache_dir.is_dir():
|
||||
try:
|
||||
shutil.rmtree(cache_dir)
|
||||
cleaned.append(f"Removed: {cache_dir.relative_to(root_path)}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
for pyc_file in list(root_path.rglob('*.pyc')) + list(root_path.rglob('*.pyo')):
|
||||
try:
|
||||
pyc_file.unlink()
|
||||
cleaned.append(f"Removed: {pyc_file.relative_to(root_path)}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# 5. Clean scripts folder
|
||||
print("🔧 Cleaning scripts folder...")
|
||||
scripts_dir = root_path / 'scripts'
|
||||
|
||||
# Remove scripts/config if it exists and move files to main config
|
||||
scripts_config = scripts_dir / 'config'
|
||||
if scripts_config.exists():
|
||||
try:
|
||||
main_config = root_path / 'config'
|
||||
main_config.mkdir(exist_ok=True)
|
||||
for config_file in scripts_config.rglob('*'):
|
||||
if config_file.is_file():
|
||||
new_path = main_config / config_file.name
|
||||
if not new_path.exists():
|
||||
shutil.move(str(config_file), str(new_path))
|
||||
cleaned.append(f"Moved: {config_file.name} from scripts/config/")
|
||||
shutil.rmtree(scripts_config)
|
||||
cleaned.append("Removed: scripts/config/ directory")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# Ensure __init__.py exists
|
||||
init_file = scripts_dir / '__init__.py'
|
||||
if not init_file.exists():
|
||||
with open(init_file, 'w') as f:
|
||||
f.write('# IPTV Scripts Package\n')
|
||||
cleaned.append("Created: scripts/__init__.py")
|
||||
|
||||
# 6. Clean root directory
|
||||
print("🧹 Cleaning root directory...")
|
||||
|
||||
# Remove setup scripts from root
|
||||
for setup_file in root_path.glob('setup_*.py'):
|
||||
try:
|
||||
setup_file.unlink()
|
||||
cleaned.append(f"Removed: {setup_file.name}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# Move log files to proper location
|
||||
logs_dir = reports_dir / 'logs'
|
||||
logs_dir.mkdir(exist_ok=True)
|
||||
|
||||
for log_file in root_path.glob('*.log'):
|
||||
try:
|
||||
new_path = logs_dir / log_file.name
|
||||
shutil.move(str(log_file), str(new_path))
|
||||
cleaned.append(f"Moved: {log_file.name} → reports/logs/")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# Remove temporary files
|
||||
patterns = ['*_temp*', '*.tmp', '*~', '*.swp', '*.swo']
|
||||
for pattern in patterns:
|
||||
for temp_file in root_path.glob(pattern):
|
||||
if temp_file.is_file():
|
||||
try:
|
||||
temp_file.unlink()
|
||||
cleaned.append(f"Removed: {temp_file.name}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# 7. Create data snapshot
|
||||
print("📊 Creating data snapshot...")
|
||||
channels_file = root_path / 'channels.txt'
|
||||
if channels_file.exists():
|
||||
try:
|
||||
today = datetime.now()
|
||||
data_dir = root_path / 'data' / today.strftime('%Y-%m')
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
snapshot_name = f"channels_{today.strftime('%Y%m%d')}.txt"
|
||||
snapshot_path = data_dir / snapshot_name
|
||||
|
||||
if not snapshot_path.exists():
|
||||
shutil.copy2(channels_file, snapshot_path)
|
||||
cleaned.append(f"Created: data snapshot {snapshot_name}")
|
||||
except Exception as e:
|
||||
print(f" Warning: {e}")
|
||||
|
||||
# 8. Remove this cleanup script after running
|
||||
cleanup_script = root_path / 'comprehensive_cleanup.py'
|
||||
if cleanup_script.exists():
|
||||
try:
|
||||
cleanup_script.unlink()
|
||||
cleaned.append("Removed: comprehensive_cleanup.py (cleanup complete)")
|
||||
except Exception as e:
|
||||
print(f" Warning: Could not remove cleanup script: {e}")
|
||||
|
||||
print(f"\n✅ Cleanup complete! Processed {len(cleaned)} items")
|
||||
|
||||
if cleaned:
|
||||
print("\n🔧 Actions taken:")
|
||||
for item in cleaned[:10]: # Show first 10
|
||||
print(f" ✅ {item}")
|
||||
if len(cleaned) > 10:
|
||||
print(f" ... and {len(cleaned) - 10} more items")
|
||||
|
||||
# Repository status
|
||||
print(f"\n📊 Repository status:")
|
||||
try:
|
||||
total_files = len(list(root_path.rglob('*')))
|
||||
repo_size = sum(f.stat().st_size for f in root_path.rglob('*')
|
||||
if f.is_file() and '.git' not in str(f))
|
||||
repo_size_mb = repo_size / (1024 * 1024)
|
||||
|
||||
print(f" 📁 Total files: {total_files}")
|
||||
print(f" 💾 Repository size: {repo_size_mb:.1f} MB")
|
||||
|
||||
# Check cleanliness
|
||||
cache_dirs = len(list(root_path.rglob('__pycache__')))
|
||||
temp_files = len(list(root_path.rglob('*.tmp')))
|
||||
log_files_root = len(list(root_path.glob('*.log')))
|
||||
|
||||
print(f" 🧹 Cache directories: {cache_dirs}")
|
||||
print(f" 🗑️ Temp files: {temp_files}")
|
||||
print(f" 📋 Root log files: {log_files_root}")
|
||||
|
||||
if cache_dirs == 0 and temp_files == 0 and log_files_root == 0:
|
||||
print(" ✅ Repository is now clean!")
|
||||
else:
|
||||
print(" 🟡 Some cleanup items remain")
|
||||
|
||||
except Exception as e:
|
||||
print(f" Could not calculate stats: {e}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error during cleanup: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = cleanup_repository()
|
||||
if success:
|
||||
print("\n🎉 Repository cleanup successful!")
|
||||
else:
|
||||
print("\n⚠️ Repository cleanup completed with warnings")
|
4
reports/daily/report_20250628_011019.md
Normal file
4
reports/daily/report_20250628_011019.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Playlist Report - Sat Jun 28 01:10:19 UTC 2025
|
||||
- Channels: 1
|
||||
- Size: 4.0K
|
||||
- Generated: Sat Jun 28 01:10:19 UTC 2025
|
19
reports/playlist_report_20250628_011018.md
Normal file
19
reports/playlist_report_20250628_011018.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
# IPTV Playlist Generation Report
|
||||
**Generated:** 2025-06-28 01:10:18
|
||||
|
||||
## Summary Statistics
|
||||
- **Total channels processed:** 2
|
||||
- **Valid channels:** 1
|
||||
- **Duplicates removed:** 1
|
||||
- **New channels imported:** 0
|
||||
- **Countries detected:** 1
|
||||
|
||||
## Channel Distribution by Country
|
||||
- **🇬🇧 United Kingdom:** 1 channels
|
||||
|
||||
## Configuration
|
||||
- **Remove duplicates:** True
|
||||
- **Auto country detection:** True
|
||||
- **Quality detection:** True
|
||||
- **Adult content filtering:** True
|
||||
- **Health check enabled:** False
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"example_keyword": "🇺🇸 United States",
|
||||
"another_keyword": "🇬🇧 United Kingdom"
|
||||
}
|
|
@ -1,600 +0,0 @@
|
|||
{
|
||||
"country_patterns": {
|
||||
"🇺🇸 United States": [
|
||||
"cbs",
|
||||
"nbc",
|
||||
"abc",
|
||||
"fox",
|
||||
"espn",
|
||||
"cnn",
|
||||
"hbo",
|
||||
" usa",
|
||||
" us ",
|
||||
".us",
|
||||
"america",
|
||||
"nfl"
|
||||
],
|
||||
"🇬🇧 United Kingdom": [
|
||||
"bbc",
|
||||
"itv",
|
||||
"sky",
|
||||
"channel 4",
|
||||
"e4",
|
||||
" uk",
|
||||
".uk",
|
||||
"british",
|
||||
"premier league"
|
||||
],
|
||||
"🇨🇦 Canada": [
|
||||
"cbc",
|
||||
"ctv",
|
||||
"global",
|
||||
"canada",
|
||||
"canadian",
|
||||
" ca ",
|
||||
".ca"
|
||||
],
|
||||
"🇩🇪 Germany": [
|
||||
"ard",
|
||||
"zdf",
|
||||
"rtl",
|
||||
"sat.1",
|
||||
"pro7",
|
||||
"germany",
|
||||
"german",
|
||||
" de ",
|
||||
".de"
|
||||
],
|
||||
"🇫🇷 France": [
|
||||
"tf1",
|
||||
"france 2",
|
||||
"m6",
|
||||
"canal+",
|
||||
"france",
|
||||
"french",
|
||||
" fr ",
|
||||
".fr"
|
||||
],
|
||||
"🇪🇸 Spain": [
|
||||
"tve",
|
||||
"antena 3",
|
||||
"telecinco",
|
||||
"spain",
|
||||
"spanish",
|
||||
" es ",
|
||||
".es"
|
||||
],
|
||||
"🇮🇹 Italy": [
|
||||
"rai",
|
||||
"mediaset",
|
||||
"canale 5",
|
||||
"italy",
|
||||
"italian",
|
||||
" it ",
|
||||
".it"
|
||||
],
|
||||
"🇳🇱 Netherlands": [
|
||||
"npo",
|
||||
"rtl nl",
|
||||
"netherlands",
|
||||
"dutch",
|
||||
"holland",
|
||||
" nl ",
|
||||
".nl"
|
||||
],
|
||||
"🇧🇪 Belgium": [
|
||||
"vtm",
|
||||
"één",
|
||||
"canvas",
|
||||
"belgium",
|
||||
"belgian",
|
||||
" be ",
|
||||
".be"
|
||||
],
|
||||
"🇨🇭 Switzerland": [
|
||||
"srf",
|
||||
"rts",
|
||||
"switzerland",
|
||||
"swiss",
|
||||
" ch ",
|
||||
".ch"
|
||||
],
|
||||
"🇦🇹 Austria": [
|
||||
"orf",
|
||||
"austria",
|
||||
"austrian",
|
||||
" at ",
|
||||
".at"
|
||||
],
|
||||
"🇵🇹 Portugal": [
|
||||
"rtp",
|
||||
"sic",
|
||||
"tvi",
|
||||
"portugal",
|
||||
"portuguese",
|
||||
" pt ",
|
||||
".pt"
|
||||
],
|
||||
"🇮🇪 Ireland": [
|
||||
"rte",
|
||||
"tg4",
|
||||
"ireland",
|
||||
"irish",
|
||||
" ie ",
|
||||
".ie"
|
||||
],
|
||||
"🇸🇪 Sweden": [
|
||||
"svt",
|
||||
"tv4",
|
||||
"sweden",
|
||||
"swedish",
|
||||
" se ",
|
||||
".se"
|
||||
],
|
||||
"🇳🇴 Norway": [
|
||||
"nrk",
|
||||
"tv 2 no",
|
||||
"norway",
|
||||
"norwegian",
|
||||
" no ",
|
||||
".no"
|
||||
],
|
||||
"🇩🇰 Denmark": [
|
||||
"dr",
|
||||
"tv2 dk",
|
||||
"denmark",
|
||||
"danish",
|
||||
" dk ",
|
||||
".dk"
|
||||
],
|
||||
"🇫🇮 Finland": [
|
||||
"yle",
|
||||
"mtv3",
|
||||
"finland",
|
||||
"finnish",
|
||||
" fi ",
|
||||
".fi"
|
||||
],
|
||||
"🇮🇸 Iceland": [
|
||||
"ruv",
|
||||
"iceland",
|
||||
"icelandic",
|
||||
" is ",
|
||||
".is"
|
||||
],
|
||||
"🇷🇺 Russia": [
|
||||
"channel one",
|
||||
"rossiya",
|
||||
"ntv",
|
||||
"russia",
|
||||
"russian",
|
||||
" ru ",
|
||||
".ru"
|
||||
],
|
||||
"🇵🇱 Poland": [
|
||||
"tvp",
|
||||
"polsat",
|
||||
"tvn",
|
||||
"poland",
|
||||
"polish",
|
||||
" pl ",
|
||||
".pl"
|
||||
],
|
||||
"🇨🇿 Czech Republic": [
|
||||
"ct",
|
||||
"nova",
|
||||
"prima",
|
||||
"czech",
|
||||
" cz ",
|
||||
".cz"
|
||||
],
|
||||
"🇸🇰 Slovakia": [
|
||||
"rtvs",
|
||||
"markiza",
|
||||
"slovakia",
|
||||
"slovak",
|
||||
" sk ",
|
||||
".sk"
|
||||
],
|
||||
"🇭🇺 Hungary": [
|
||||
"mtv hu",
|
||||
"rtl klub",
|
||||
"hungary",
|
||||
"hungarian",
|
||||
" hu ",
|
||||
".hu"
|
||||
],
|
||||
"🇺🇦 Ukraine": [
|
||||
"1+1",
|
||||
"inter",
|
||||
"ictv",
|
||||
"ukraine",
|
||||
"ukrainian",
|
||||
" ua ",
|
||||
".ua"
|
||||
],
|
||||
"🇷🇴 Romania": [
|
||||
"tvr",
|
||||
"pro tv",
|
||||
"romania",
|
||||
"romanian",
|
||||
" ro ",
|
||||
".ro"
|
||||
],
|
||||
"🇧🇬 Bulgaria": [
|
||||
"btv",
|
||||
"nova bg",
|
||||
"bulgaria",
|
||||
"bulgarian",
|
||||
" bg ",
|
||||
".bg"
|
||||
],
|
||||
"🇭🇷 Croatia": [
|
||||
"hrt",
|
||||
"nova tv hr",
|
||||
"croatia",
|
||||
"croatian",
|
||||
" hr ",
|
||||
".hr"
|
||||
],
|
||||
"🇷🇸 Serbia": [
|
||||
"rts",
|
||||
"pink",
|
||||
"serbia",
|
||||
"serbian",
|
||||
" rs ",
|
||||
".rs"
|
||||
],
|
||||
"🇬🇷 Greece": [
|
||||
"ert",
|
||||
"mega gr",
|
||||
"greece",
|
||||
"greek",
|
||||
" gr ",
|
||||
".gr"
|
||||
],
|
||||
"🇧🇷 Brazil": [
|
||||
"globo",
|
||||
"band",
|
||||
"sbt",
|
||||
"brazil",
|
||||
"brasil",
|
||||
" br ",
|
||||
".br"
|
||||
],
|
||||
"🇦🇷 Argentina": [
|
||||
"telefe",
|
||||
"canal 13",
|
||||
"argentina",
|
||||
" ar ",
|
||||
".ar"
|
||||
],
|
||||
"🇲🇽 Mexico": [
|
||||
"televisa",
|
||||
"tv azteca",
|
||||
"mexico",
|
||||
"méxico",
|
||||
" mx ",
|
||||
".mx"
|
||||
],
|
||||
"🇨🇱 Chile": [
|
||||
"tvn",
|
||||
"mega",
|
||||
"chile",
|
||||
"chilean",
|
||||
" cl ",
|
||||
".cl"
|
||||
],
|
||||
"🇨🇴 Colombia": [
|
||||
"caracol",
|
||||
"rcn",
|
||||
"colombia",
|
||||
"colombian",
|
||||
" co ",
|
||||
".co"
|
||||
],
|
||||
"🇵🇪 Peru": [
|
||||
"america tv pe",
|
||||
"peru",
|
||||
"peruvian",
|
||||
" pe ",
|
||||
".pe"
|
||||
],
|
||||
"🇻🇪 Venezuela": [
|
||||
"venevision",
|
||||
"venezuela",
|
||||
"venezuelan",
|
||||
" ve ",
|
||||
".ve"
|
||||
],
|
||||
"🇨🇳 China": [
|
||||
"cctv",
|
||||
"phoenix",
|
||||
"china",
|
||||
"chinese",
|
||||
" cn ",
|
||||
".cn"
|
||||
],
|
||||
"🇯🇵 Japan": [
|
||||
"nhk",
|
||||
"fuji",
|
||||
"tv asahi",
|
||||
"japan",
|
||||
"japanese",
|
||||
" jp ",
|
||||
".jp"
|
||||
],
|
||||
"🇰🇷 South Korea": [
|
||||
"kbs",
|
||||
"sbs kr",
|
||||
"mbc kr",
|
||||
"korea",
|
||||
"korean",
|
||||
" kr ",
|
||||
".kr"
|
||||
],
|
||||
"🇰🇵 North Korea": [
|
||||
"kctv",
|
||||
"north korea",
|
||||
"dprk"
|
||||
],
|
||||
"🇹🇼 Taiwan": [
|
||||
"cts",
|
||||
"ctv",
|
||||
"tvbs",
|
||||
"taiwan",
|
||||
"taiwanese",
|
||||
" tw ",
|
||||
".tw"
|
||||
],
|
||||
"🇭🇰 Hong Kong": [
|
||||
"tvb",
|
||||
"atv",
|
||||
"hong kong",
|
||||
"hongkong",
|
||||
" hk ",
|
||||
".hk"
|
||||
],
|
||||
"🇹🇭 Thailand": [
|
||||
"ch3",
|
||||
"ch7",
|
||||
"thai pbs",
|
||||
"thailand",
|
||||
"thai",
|
||||
" th ",
|
||||
".th"
|
||||
],
|
||||
"🇻🇳 Vietnam": [
|
||||
"vtv",
|
||||
"htv",
|
||||
"vietnam",
|
||||
"vietnamese",
|
||||
" vn ",
|
||||
".vn"
|
||||
],
|
||||
"🇮🇩 Indonesia": [
|
||||
"tvri",
|
||||
"sctv",
|
||||
"rcti",
|
||||
"indonesia",
|
||||
"indonesian",
|
||||
" id ",
|
||||
".id"
|
||||
],
|
||||
"🇲🇾 Malaysia": [
|
||||
"tv1",
|
||||
"tv3",
|
||||
"astro",
|
||||
"malaysia",
|
||||
"malaysian",
|
||||
" my ",
|
||||
".my",
|
||||
"my:"
|
||||
],
|
||||
"🇸🇬 Singapore": [
|
||||
"channel 5",
|
||||
"channel 8",
|
||||
"singapore",
|
||||
" sg ",
|
||||
".sg"
|
||||
],
|
||||
"🇵🇭 Philippines": [
|
||||
"abs-cbn",
|
||||
"gma",
|
||||
"philippines",
|
||||
"filipino",
|
||||
" ph ",
|
||||
".ph"
|
||||
],
|
||||
"🇮🇳 India": [
|
||||
"star plus",
|
||||
"zee tv",
|
||||
"colors",
|
||||
"sony tv",
|
||||
"india",
|
||||
"indian",
|
||||
"hindi",
|
||||
" in ",
|
||||
".in"
|
||||
],
|
||||
"🇵🇰 Pakistan": [
|
||||
"ptv",
|
||||
"geo tv",
|
||||
"ary",
|
||||
"pakistan",
|
||||
"pakistani",
|
||||
" pk ",
|
||||
".pk"
|
||||
],
|
||||
"🇧🇩 Bangladesh": [
|
||||
"btv",
|
||||
"channel i",
|
||||
"bangladesh",
|
||||
"bangladeshi",
|
||||
" bd ",
|
||||
".bd"
|
||||
],
|
||||
"🇱🇰 Sri Lanka": [
|
||||
"rupavahini",
|
||||
"sirasa",
|
||||
"sri lanka",
|
||||
" lk ",
|
||||
".lk"
|
||||
],
|
||||
"🇳🇵 Nepal": [
|
||||
"nepal tv",
|
||||
"kantipur",
|
||||
"nepal",
|
||||
"nepali",
|
||||
" np ",
|
||||
".np"
|
||||
],
|
||||
"🇦🇫 Afghanistan": [
|
||||
"rta",
|
||||
"tolo tv",
|
||||
"afghanistan",
|
||||
"afghan",
|
||||
" af ",
|
||||
".af"
|
||||
],
|
||||
"🇦🇺 Australia": [
|
||||
"abc au",
|
||||
"seven",
|
||||
"nine",
|
||||
"ten",
|
||||
"australia",
|
||||
"australian",
|
||||
"aussie",
|
||||
" au ",
|
||||
".au"
|
||||
],
|
||||
"🇳🇿 New Zealand": [
|
||||
"tvnz",
|
||||
"tvnz 1",
|
||||
"tvnz 2",
|
||||
"three nz",
|
||||
"tvnz duke",
|
||||
"new zealand",
|
||||
"kiwi",
|
||||
" nz ",
|
||||
".nz"
|
||||
],
|
||||
"🇸🇦 Arabic": [
|
||||
"al jazeera",
|
||||
"mbc",
|
||||
"lbc",
|
||||
"dubai tv",
|
||||
"arabic",
|
||||
"arab",
|
||||
"qatar",
|
||||
"dubai",
|
||||
"saudi"
|
||||
],
|
||||
"🇮🇱 Israel": [
|
||||
"kan",
|
||||
"keshet 12",
|
||||
"israel",
|
||||
"israeli",
|
||||
"hebrew",
|
||||
" il ",
|
||||
".il"
|
||||
],
|
||||
"🇹🇷 Turkey": [
|
||||
"trt",
|
||||
"atv",
|
||||
"kanal d",
|
||||
"turkey",
|
||||
"turkish",
|
||||
" tr ",
|
||||
".tr",
|
||||
"tr |"
|
||||
],
|
||||
"🇮🇷 Iran": [
|
||||
"irib",
|
||||
"press tv",
|
||||
"iran",
|
||||
"iranian",
|
||||
"persian",
|
||||
" ir ",
|
||||
".ir"
|
||||
],
|
||||
"🇪🇬 Egypt": [
|
||||
"nile tv",
|
||||
"cbc egypt",
|
||||
"egypt",
|
||||
"egyptian",
|
||||
" eg ",
|
||||
".eg"
|
||||
],
|
||||
"🇿🇦 South Africa": [
|
||||
"sabc",
|
||||
"etv",
|
||||
"mnet",
|
||||
"south africa",
|
||||
" za ",
|
||||
".za"
|
||||
],
|
||||
"🇳🇬 Nigeria": [
|
||||
"nta",
|
||||
"channels tv",
|
||||
"nigeria",
|
||||
"nigerian",
|
||||
" ng ",
|
||||
".ng"
|
||||
]
|
||||
},
|
||||
"country_prefixes": {
|
||||
"🇺🇦 Ukraine": [
|
||||
"ua |"
|
||||
],
|
||||
"🇵🇱 Poland": [
|
||||
"pl |"
|
||||
],
|
||||
"🇹🇷 Turkey": [
|
||||
"tr |"
|
||||
],
|
||||
"🇲🇾 Malaysia": [
|
||||
"my:",
|
||||
"my |"
|
||||
],
|
||||
"🇬🇧 United Kingdom": [
|
||||
"uk:",
|
||||
"uk |"
|
||||
],
|
||||
"🇺🇸 United States": [
|
||||
"us:",
|
||||
"us |"
|
||||
]
|
||||
},
|
||||
"quality_patterns": {
|
||||
"4K": [
|
||||
"4k",
|
||||
"uhd",
|
||||
"2160p"
|
||||
],
|
||||
"FHD": [
|
||||
"fhd",
|
||||
"1080p",
|
||||
"1080"
|
||||
],
|
||||
"HD": [
|
||||
"hd",
|
||||
"720p",
|
||||
"720"
|
||||
],
|
||||
"SD": [
|
||||
"sd",
|
||||
"480p",
|
||||
"360p"
|
||||
]
|
||||
},
|
||||
"adult_keywords": [
|
||||
"xxx",
|
||||
"adult",
|
||||
"porn",
|
||||
"sex",
|
||||
"erotic",
|
||||
"playboy",
|
||||
"18+"
|
||||
]
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"remove_duplicates": true,
|
||||
"sort_channels": true,
|
||||
"backup_before_import": true,
|
||||
"auto_cleanup_import": true,
|
||||
"auto_detect_country": true,
|
||||
"detect_quality": true,
|
||||
"skip_adult_content": true,
|
||||
"min_channel_name_length": 2,
|
||||
"max_workers": 4,
|
||||
"enable_health_check": false,
|
||||
"health_check_timeout": 5,
|
||||
"create_backup": true,
|
||||
"max_backups": 5,
|
||||
"log_level": "INFO"
|
||||
}
|
|
@ -1,552 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Complete IPTV Repository Setup & Cleanup
|
||||
This script will set up all the cleanup automation and organize your repository
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
def run_command(cmd, description):
|
||||
"""Run a command and show the result."""
|
||||
print(f"🔧 {description}...")
|
||||
try:
|
||||
result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
|
||||
if result.returncode == 0:
|
||||
print(f" ✅ Success")
|
||||
return True
|
||||
else:
|
||||
print(f" ⚠️ Warning: {result.stderr.strip()}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f" ❌ Error: {e}")
|
||||
return False
|
||||
|
||||
def create_file(file_path, content, description):
|
||||
"""Create a file with given content."""
|
||||
print(f"📝 Creating {description}...")
|
||||
try:
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
print(f" ✅ Created: {file_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f" ❌ Error creating {file_path}: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Set up complete repository cleanup and automation."""
|
||||
print("🎯 IPTV Repository Complete Setup")
|
||||
print("=" * 50)
|
||||
print("This will set up automated cleanup and organization for your repository")
|
||||
print()
|
||||
|
||||
root_path = Path.cwd()
|
||||
|
||||
# Check if we're in the right directory
|
||||
if not (root_path / 'scripts').exists():
|
||||
print("❌ Error: Please run this script from your repository root directory")
|
||||
print(" (The directory should contain a 'scripts' folder)")
|
||||
return
|
||||
|
||||
print(f"📁 Working in: {root_path}")
|
||||
print()
|
||||
|
||||
# Step 1: Create enhanced .gitignore
|
||||
gitignore_content = """# IPTV Playlist Generator - Enhanced .gitignore
|
||||
|
||||
# ===== PYTHON =====
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# ===== LOGS & TEMPORARY FILES =====
|
||||
*.log
|
||||
*.tmp
|
||||
*_temp*
|
||||
*.backup.*
|
||||
temp_*
|
||||
.temp/
|
||||
|
||||
# Specific log patterns
|
||||
playlist_update.log
|
||||
import_*.log
|
||||
health_check_*.log
|
||||
|
||||
# ===== BACKUP FILES =====
|
||||
# Keep backups directory but ignore temporary backups
|
||||
backups/temp_*
|
||||
backups/*.tmp
|
||||
*.backup
|
||||
*~
|
||||
|
||||
# ===== DEVELOPMENT & TESTING =====
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# ===== IDE & EDITOR FILES =====
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# ===== ENVIRONMENT & CONFIG =====
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
|
||||
# ===== IPTV SPECIFIC =====
|
||||
# Temporary import files (keep the main one)
|
||||
bulk_import_temp.m3u
|
||||
import_temp_*.m3u
|
||||
*_processing.m3u
|
||||
|
||||
# Large test files
|
||||
test_large_*.m3u
|
||||
sample_*.m3u
|
||||
|
||||
# Generated temporary playlists
|
||||
playlist_temp.m3u
|
||||
temp_playlist_*.m3u
|
||||
|
||||
# ===== DOCUMENTATION BUILDS =====
|
||||
docs/_build/
|
||||
site/
|
||||
|
||||
# ===== ARCHIVE & COMPRESSED =====
|
||||
*.tar.gz
|
||||
*.zip
|
||||
*.rar
|
||||
logs_archive_*.tar.gz
|
||||
|
||||
# ===== SYSTEM FILES =====
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
ehthumbs.db
|
||||
Thumbs.db"""
|
||||
|
||||
create_file(root_path / '.gitignore', gitignore_content, "Enhanced .gitignore")
|
||||
|
||||
# Step 2: Create repository health monitor
|
||||
repo_health_content = '''#!/usr/bin/env python3
|
||||
"""
|
||||
Repository Health Monitor - Simplified for immediate use
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
import gzip
|
||||
|
||||
class SimpleRepoCleanup:
|
||||
"""Simplified repository cleanup for immediate use."""
|
||||
|
||||
def __init__(self):
|
||||
self.root_path = Path.cwd()
|
||||
self.cleaned_items = []
|
||||
|
||||
def run_cleanup(self):
|
||||
"""Run complete cleanup."""
|
||||
print("🧹 Starting repository cleanup...")
|
||||
|
||||
# 1. Remove Python cache
|
||||
self._clean_python_cache()
|
||||
|
||||
# 2. Remove temporary files
|
||||
self._clean_temp_files()
|
||||
|
||||
# 3. Organize log files
|
||||
self._organize_logs()
|
||||
|
||||
# 4. Compress old backups
|
||||
self._compress_backups()
|
||||
|
||||
# 5. Ensure directory structure
|
||||
self._ensure_directories()
|
||||
|
||||
# 6. Clean import file
|
||||
self._clean_import_file()
|
||||
|
||||
print(f"\\n✅ Cleanup complete! {len(self.cleaned_items)} items processed")
|
||||
|
||||
return self.cleaned_items
|
||||
|
||||
def _clean_python_cache(self):
|
||||
"""Remove Python cache files."""
|
||||
print(" 🐍 Cleaning Python cache...")
|
||||
|
||||
# Remove __pycache__ directories
|
||||
for cache_dir in self.root_path.rglob('__pycache__'):
|
||||
if cache_dir.is_dir():
|
||||
try:
|
||||
shutil.rmtree(cache_dir)
|
||||
self.cleaned_items.append(f"Removed cache: {cache_dir.relative_to(self.root_path)}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Remove .pyc and .pyo files
|
||||
for pyc_file in list(self.root_path.rglob('*.pyc')) + list(self.root_path.rglob('*.pyo')):
|
||||
try:
|
||||
pyc_file.unlink()
|
||||
self.cleaned_items.append(f"Removed: {pyc_file.relative_to(self.root_path)}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _clean_temp_files(self):
|
||||
"""Remove temporary files."""
|
||||
print(" 🗑️ Cleaning temporary files...")
|
||||
|
||||
patterns = ['*_temp*', '*.tmp', '*~', '*.swp', '*.swo']
|
||||
|
||||
for pattern in patterns:
|
||||
for temp_file in self.root_path.rglob(pattern):
|
||||
if temp_file.is_file() and '.git' not in str(temp_file):
|
||||
try:
|
||||
temp_file.unlink()
|
||||
self.cleaned_items.append(f"Removed temp: {temp_file.relative_to(self.root_path)}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _organize_logs(self):
|
||||
"""Organize log files."""
|
||||
print(" 📋 Organizing log files...")
|
||||
|
||||
logs_dir = self.root_path / 'reports' / 'logs'
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for log_file in self.root_path.glob('*.log'):
|
||||
try:
|
||||
new_location = logs_dir / log_file.name
|
||||
shutil.move(str(log_file), str(new_location))
|
||||
self.cleaned_items.append(f"Moved log: {log_file.name} → reports/logs/")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _compress_backups(self):
|
||||
"""Compress old backup files."""
|
||||
print(" 🗜️ Compressing old backups...")
|
||||
|
||||
backup_dir = self.root_path / 'backups'
|
||||
if backup_dir.exists():
|
||||
cutoff_date = datetime.now() - timedelta(days=7)
|
||||
|
||||
for backup_file in backup_dir.glob('*.txt'):
|
||||
try:
|
||||
file_date = datetime.fromtimestamp(backup_file.stat().st_mtime)
|
||||
if file_date < cutoff_date:
|
||||
# Compress with gzip
|
||||
with open(backup_file, 'rb') as f_in:
|
||||
with gzip.open(f"{backup_file}.gz", 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
backup_file.unlink()
|
||||
self.cleaned_items.append(f"Compressed: {backup_file.name}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _ensure_directories(self):
|
||||
"""Ensure proper directory structure."""
|
||||
print(" 📁 Ensuring directory structure...")
|
||||
|
||||
directories = [
|
||||
'config',
|
||||
'backups',
|
||||
'reports/logs',
|
||||
'reports/archive',
|
||||
'templates'
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
(self.root_path / directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _clean_import_file(self):
|
||||
"""Clean the import file if needed."""
|
||||
print(" 📥 Checking import file...")
|
||||
|
||||
import_file = self.root_path / 'bulk_import.m3u'
|
||||
if import_file.exists():
|
||||
try:
|
||||
with open(import_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read().strip()
|
||||
|
||||
lines = content.split('\\n')
|
||||
if len(lines) > 2 or any('#EXTINF' in line for line in lines):
|
||||
with open(import_file, 'w', encoding='utf-8') as f:
|
||||
f.write('#EXTM3U\\n')
|
||||
self.cleaned_items.append("Cleared bulk_import.m3u (ready for next import)")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def main():
|
||||
"""Run the cleanup."""
|
||||
cleanup = SimpleRepoCleanup()
|
||||
cleanup.run_cleanup()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
'''
|
||||
|
||||
create_file(root_path / 'scripts' / 'quick_cleanup.py', repo_health_content, "Quick cleanup script")
|
||||
|
||||
# Step 3: Create the enhanced workflow
|
||||
workflow_content = """name: Generate M3U Playlist with Auto-Cleanup
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-and-cleanup:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --local user.email "actions@forgejo.plainrock127.xyz"
|
||||
git config --local user.name "IPTV Playlist Bot"
|
||||
|
||||
- name: Auto-Cleanup Repository
|
||||
run: |
|
||||
echo "=== Auto-Cleanup Phase ==="
|
||||
|
||||
# Remove Python cache thoroughly
|
||||
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -name "*.pyc" -delete 2>/dev/null || true
|
||||
find . -name "*.pyo" -delete 2>/dev/null || true
|
||||
|
||||
# Remove temporary files
|
||||
find . -name "*_temp*" -type f -delete 2>/dev/null || true
|
||||
find . -name "*.tmp" -delete 2>/dev/null || true
|
||||
find . -name "*~" -delete 2>/dev/null || true
|
||||
|
||||
# Clean backup files older than 30 days
|
||||
find backups -name "*.txt" -type f -mtime +30 -delete 2>/dev/null || true
|
||||
|
||||
# Organize log files
|
||||
mkdir -p reports/logs reports/archive
|
||||
find . -maxdepth 1 -name "*.log" -exec mv {} reports/logs/ \\; 2>/dev/null || true
|
||||
|
||||
# Compress old backups (older than 7 days)
|
||||
find backups -name "*.txt" -type f -mtime +7 -exec gzip {} \\; 2>/dev/null || true
|
||||
|
||||
echo "✅ Cleanup completed"
|
||||
|
||||
- name: Setup Directories
|
||||
run: |
|
||||
echo "=== Directory Setup ==="
|
||||
mkdir -p config backups reports/logs reports/archive templates
|
||||
|
||||
# Create scripts/__init__.py if missing
|
||||
if [ ! -f scripts/__init__.py ]; then
|
||||
echo '# Scripts package' > scripts/__init__.py
|
||||
fi
|
||||
|
||||
- name: Run Playlist Generation
|
||||
run: |
|
||||
echo "=== Playlist Generation ==="
|
||||
python scripts/generate_playlist.py
|
||||
|
||||
- name: Post-Generation Analysis
|
||||
run: |
|
||||
echo "=== Results Analysis ==="
|
||||
|
||||
if [ -f playlist.m3u ]; then
|
||||
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
|
||||
FILE_SIZE=$(du -h playlist.m3u | cut -f1)
|
||||
echo "✅ playlist.m3u: $CHANNEL_COUNT channels ($FILE_SIZE)"
|
||||
else
|
||||
echo "❌ playlist.m3u not generated"
|
||||
fi
|
||||
|
||||
- name: Final Cleanup & Organization
|
||||
run: |
|
||||
echo "=== Final Organization ==="
|
||||
|
||||
# Ensure bulk_import.m3u is clean
|
||||
if [ -f bulk_import.m3u ]; then
|
||||
LINE_COUNT=$(wc -l < bulk_import.m3u)
|
||||
if [ "$LINE_COUNT" -gt 2 ]; then
|
||||
echo '#EXTM3U' > bulk_import.m3u
|
||||
echo '' >> bulk_import.m3u
|
||||
echo "🧹 Cleaned bulk_import.m3u"
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Commit Changes
|
||||
run: |
|
||||
echo "=== Committing Changes ==="
|
||||
|
||||
# Add specific files only
|
||||
git add bulk_import.m3u || true
|
||||
git add channels.txt || true
|
||||
git add playlist.m3u || true
|
||||
git add scripts/ || true
|
||||
git add config/ || true
|
||||
git add reports/ || true
|
||||
git add backups/*.gz || true
|
||||
git add templates/ || true
|
||||
git add .forgejo/ || true
|
||||
git add README.md || true
|
||||
git add .gitignore || true
|
||||
|
||||
if ! git diff --staged --quiet; then
|
||||
CHANNEL_COUNT="0"
|
||||
if [ -f playlist.m3u ]; then
|
||||
CHANNEL_COUNT=$(grep -c "^#EXTINF" playlist.m3u 2>/dev/null || echo "0")
|
||||
fi
|
||||
|
||||
git commit -m "📺 Updated playlist: $CHANNEL_COUNT channels ($(date '+%Y-%m-%d %H:%M')) - Auto-cleaned"
|
||||
git push
|
||||
echo "✅ Repository updated and cleaned"
|
||||
else
|
||||
echo "ℹ️ No changes to commit"
|
||||
fi"""
|
||||
|
||||
workflow_dir = root_path / '.forgejo' / 'workflows'
|
||||
create_file(workflow_dir / 'generate-m3u.yml', workflow_content, "Enhanced Forgejo workflow")
|
||||
|
||||
# Step 4: Run immediate cleanup
|
||||
print("\n" + "=" * 50)
|
||||
print("🧹 IMMEDIATE CLEANUP")
|
||||
print("=" * 50)
|
||||
|
||||
# Import and run the cleanup
|
||||
import sys
|
||||
sys.path.insert(0, str(root_path / 'scripts'))
|
||||
|
||||
try:
|
||||
exec(repo_health_content)
|
||||
cleanup = SimpleRepoCleanup()
|
||||
cleaned_items = cleanup.run_cleanup()
|
||||
except Exception as e:
|
||||
print(f"Running manual cleanup instead: {e}")
|
||||
|
||||
# Manual cleanup
|
||||
print("🧹 Running manual cleanup...")
|
||||
cleaned_items = []
|
||||
|
||||
# Remove Python cache
|
||||
for cache_dir in root_path.rglob('__pycache__'):
|
||||
if cache_dir.is_dir():
|
||||
try:
|
||||
shutil.rmtree(cache_dir)
|
||||
cleaned_items.append(f"Removed: {cache_dir.relative_to(root_path)}")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Remove temp files
|
||||
patterns = ['*.pyc', '*.pyo', '*_temp*', '*.tmp', '*~']
|
||||
for pattern in patterns:
|
||||
for file_path in root_path.rglob(pattern):
|
||||
if file_path.is_file() and '.git' not in str(file_path):
|
||||
try:
|
||||
file_path.unlink()
|
||||
cleaned_items.append(f"Removed: {file_path.relative_to(root_path)}")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Organize logs
|
||||
logs_dir = root_path / 'reports' / 'logs'
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for log_file in root_path.glob('*.log'):
|
||||
try:
|
||||
shutil.move(str(log_file), str(logs_dir / log_file.name))
|
||||
cleaned_items.append(f"Moved: {log_file.name} → reports/logs/")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Step 5: Setup git hooks (optional)
|
||||
print("\n" + "=" * 50)
|
||||
print("🔧 OPTIONAL GIT SETUP")
|
||||
print("=" * 50)
|
||||
|
||||
# Check if this is a git repository
|
||||
if (root_path / '.git').exists():
|
||||
print("📝 Setting up git configuration...")
|
||||
|
||||
# Add files to git
|
||||
run_command("git add .gitignore", "Adding .gitignore")
|
||||
run_command("git add scripts/quick_cleanup.py", "Adding cleanup script")
|
||||
run_command("git add .forgejo/workflows/generate-m3u.yml", "Adding enhanced workflow")
|
||||
|
||||
print(" ✅ Files staged for commit")
|
||||
else:
|
||||
print(" ℹ️ Not a git repository, skipping git setup")
|
||||
|
||||
# Step 6: Final summary
|
||||
print("\n" + "=" * 50)
|
||||
print("✅ SETUP COMPLETE!")
|
||||
print("=" * 50)
|
||||
|
||||
print(f"📊 Summary:")
|
||||
print(f" - Cleaned {len(cleaned_items)} items")
|
||||
print(f" - Created enhanced .gitignore")
|
||||
print(f" - Added quick cleanup script")
|
||||
print(f" - Updated Forgejo workflow")
|
||||
print(f" - Organized directory structure")
|
||||
|
||||
print(f"\n🎯 What's New:")
|
||||
print(f" 📁 reports/logs/ - All logs organized here")
|
||||
print(f" 🧹 scripts/quick_cleanup.py - Manual cleanup tool")
|
||||
print(f" 🔄 Enhanced workflow - Automatic cleanup on push")
|
||||
print(f" 🚫 .gitignore - Prevents future clutter")
|
||||
|
||||
print(f"\n🚀 Next Steps:")
|
||||
print(f" 1. Commit these changes: git commit -m 'Setup automated cleanup'")
|
||||
print(f" 2. Push to trigger workflow: git push")
|
||||
print(f" 3. For manual cleanup: python scripts/quick_cleanup.py")
|
||||
print(f" 4. Your repository will now stay clean automatically!")
|
||||
|
||||
print(f"\n🎉 Your IPTV repository is now clean and organized!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue