Update scripts/generate_playlist.py
Some checks failed
📺 Generate M3U Playlist / build (push) Failing after 1m20s
Some checks failed
📺 Generate M3U Playlist / build (push) Failing after 1m20s
This commit is contained in:
parent
2f64ae81b8
commit
e2d5b4ab53
1 changed files with 76 additions and 4 deletions
|
@ -312,7 +312,69 @@ def remove_duplicates(channels, settings):
|
||||||
|
|
||||||
return unique_channels
|
return unique_channels
|
||||||
|
|
||||||
def update_existing_channels_with_country_detection():
|
def clean_corrupted_channels():
|
||||||
|
"""Clean up any corrupted entries in existing channels.txt"""
|
||||||
|
if not os.path.exists(CHANNELS_FILE):
|
||||||
|
return
|
||||||
|
|
||||||
|
log_message("Cleaning up any corrupted entries in channels.txt...", "INFO")
|
||||||
|
|
||||||
|
with open(CHANNELS_FILE, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
channel_blocks = re.split(r'\n\s*\n+', content.strip())
|
||||||
|
cleaned_channels = []
|
||||||
|
fixed_count = 0
|
||||||
|
|
||||||
|
for block in channel_blocks:
|
||||||
|
if block.strip():
|
||||||
|
channel = parse_channel_block(block)
|
||||||
|
if channel:
|
||||||
|
# Clean corrupted Stream URL
|
||||||
|
stream_url = channel.get('Stream URL', '')
|
||||||
|
if '#EXTINF' in stream_url or 'group-title=' in stream_url:
|
||||||
|
# Extract just the URL part
|
||||||
|
if '#EXTINF' in stream_url:
|
||||||
|
stream_url = stream_url.split('#EXTINF')[0].strip()
|
||||||
|
if 'group-title=' in stream_url:
|
||||||
|
stream_url = stream_url.split('group-title=')[0].strip()
|
||||||
|
channel['Stream URL'] = stream_url
|
||||||
|
fixed_count += 1
|
||||||
|
log_message(f"Fixed corrupted URL for: {channel.get('Stream name')}", "INFO")
|
||||||
|
|
||||||
|
# Clean corrupted Logo URL
|
||||||
|
logo_url = channel.get('Logo', '')
|
||||||
|
if logo_url and ('group-title=' in logo_url or '#EXTINF' in logo_url):
|
||||||
|
if 'group-title=' in logo_url:
|
||||||
|
logo_url = logo_url.split('group-title=')[0].strip()
|
||||||
|
if '#EXTINF' in logo_url:
|
||||||
|
logo_url = logo_url.split('#EXTINF')[0].strip()
|
||||||
|
channel['Logo'] = logo_url
|
||||||
|
fixed_count += 1
|
||||||
|
log_message(f"Fixed corrupted logo for: {channel.get('Stream name')}", "INFO")
|
||||||
|
|
||||||
|
cleaned_channels.append(channel)
|
||||||
|
|
||||||
|
if fixed_count > 0:
|
||||||
|
log_message(f"Fixed {fixed_count} corrupted entries, rewriting file...", "INFO")
|
||||||
|
|
||||||
|
# Backup and rewrite
|
||||||
|
backup_name = f"{CHANNELS_FILE}.backup.{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||||
|
try:
|
||||||
|
import shutil
|
||||||
|
shutil.copy2(CHANNELS_FILE, backup_name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(CHANNELS_FILE, 'w', encoding='utf-8') as f:
|
||||||
|
for i, channel in enumerate(cleaned_channels):
|
||||||
|
if i > 0:
|
||||||
|
f.write("\n\n")
|
||||||
|
f.write(convert_to_channels_txt_block(channel))
|
||||||
|
|
||||||
|
log_message(f"Successfully cleaned and rewrote channels.txt", "INFO")
|
||||||
|
else:
|
||||||
|
log_message("No corrupted entries found to fix", "INFO")
|
||||||
"""Re-detect countries for existing channels - FORCE UPDATE ALL."""
|
"""Re-detect countries for existing channels - FORCE UPDATE ALL."""
|
||||||
if not os.path.exists(CHANNELS_FILE):
|
if not os.path.exists(CHANNELS_FILE):
|
||||||
return
|
return
|
||||||
|
@ -390,12 +452,19 @@ def process_import():
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
# Pre-process the content to fix common issues
|
# Pre-process the content to fix common issues
|
||||||
log_message("Pre-processing M3U content to fix common issues...", "INFO")
|
log_message("Pre-processing M3U content with AGGRESSIVE fixing...", "INFO")
|
||||||
|
|
||||||
# Fix missing newlines between entries
|
# Fix the most common issue: missing newlines between URL and next EXTINF
|
||||||
content = re.sub(r'(https?://[^\s]+)(#EXTINF)', r'\1\n\2', content)
|
content = re.sub(r'(https?://[^\s#]+)(#EXTINF)', r'\1\n\2', content)
|
||||||
content = re.sub(r'(\.m3u8?)(#EXTINF)', r'\1\n\2', content)
|
content = re.sub(r'(\.m3u8?)(#EXTINF)', r'\1\n\2', content)
|
||||||
content = re.sub(r'(\.ts)(#EXTINF)', r'\1\n\2', content)
|
content = re.sub(r'(\.ts)(#EXTINF)', r'\1\n\2', content)
|
||||||
|
content = re.sub(r'(\d+)(#EXTINF)', r'\1\n\2', content)
|
||||||
|
|
||||||
|
# Fix missing newlines between different sections
|
||||||
|
content = re.sub(r'(group-title="[^"]*")([A-Z][a-z]+:)', r'\1\n#EXTINF:-1 \2', content)
|
||||||
|
|
||||||
|
# Ensure EXTINF always starts on new line
|
||||||
|
content = re.sub(r'([^#\n])#EXTINF', r'\1\n#EXTINF', content)
|
||||||
|
|
||||||
# Split into lines after fixing
|
# Split into lines after fixing
|
||||||
lines = content.split('\n')
|
lines = content.split('\n')
|
||||||
|
@ -563,6 +632,9 @@ def generate_playlist():
|
||||||
settings = load_settings()
|
settings = load_settings()
|
||||||
group_overrides = load_group_overrides()
|
group_overrides = load_group_overrides()
|
||||||
|
|
||||||
|
# First clean any existing corrupted entries
|
||||||
|
clean_corrupted_channels()
|
||||||
|
|
||||||
update_existing_channels_with_country_detection()
|
update_existing_channels_with_country_detection()
|
||||||
|
|
||||||
imported_channels = process_import()
|
imported_channels = process_import()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue