Update scripts/generate_playlist.py
All checks were successful
📺 Generate M3U Playlist / build (push) Successful in 1m35s
All checks were successful
📺 Generate M3U Playlist / build (push) Successful in 1m35s
This commit is contained in:
parent
6f525d5451
commit
c09b534a7a
1 changed files with 23 additions and 19 deletions
|
@ -137,11 +137,12 @@ def detect_country_from_channel(channel_name, epg_id="", logo_url=""):
|
|||
# Check patterns - order matters, more specific first
|
||||
# First check for country prefixes (more specific)
|
||||
country_prefixes = {
|
||||
"🇺🇦 Ukraine": ["ua |", "ukraine"],
|
||||
"🇵🇱 Poland": ["pl |", "poland"],
|
||||
"🇹🇷 Turkey": ["tr |", "turkey"],
|
||||
"🇲🇾 Malaysia": ["my:", "malaysia"],
|
||||
"🇬🇧 United Kingdom": ["uk:", "united kingdom"]
|
||||
"🇺🇦 Ukraine": ["ua |"],
|
||||
"🇵🇱 Poland": ["pl |"],
|
||||
"🇹🇷 Turkey": ["tr |"],
|
||||
"🇲🇾 Malaysia": ["my:", "my |"],
|
||||
"🇬🇧 United Kingdom": ["uk:", "uk |"],
|
||||
"🇺🇸 United States": ["us:", "us |"]
|
||||
}
|
||||
|
||||
for country, prefixes in country_prefixes.items():
|
||||
|
@ -312,13 +313,13 @@ def remove_duplicates(channels, settings):
|
|||
return unique_channels
|
||||
|
||||
def update_existing_channels_with_country_detection():
|
||||
"""Re-detect countries for existing channels."""
|
||||
"""Re-detect countries for existing channels - FORCE UPDATE ALL."""
|
||||
if not os.path.exists(CHANNELS_FILE):
|
||||
return
|
||||
|
||||
settings = load_settings()
|
||||
|
||||
log_message("Re-detecting countries for existing channels...", "INFO")
|
||||
log_message("FORCE re-detecting countries for ALL existing channels...", "INFO")
|
||||
|
||||
with open(CHANNELS_FILE, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
@ -332,25 +333,28 @@ def update_existing_channels_with_country_detection():
|
|||
channel = parse_channel_block(block)
|
||||
if channel:
|
||||
old_group = channel.get('Group', 'Uncategorized')
|
||||
detected = detect_country_from_channel(
|
||||
channel.get('Stream name', ''),
|
||||
channel.get('EPG id', ''),
|
||||
channel.get('Logo', '')
|
||||
)
|
||||
stream_name = channel.get('Stream name', '')
|
||||
epg_id = channel.get('EPG id', '')
|
||||
logo_url = channel.get('Logo', '')
|
||||
|
||||
if detected != "Uncategorized":
|
||||
channel['Group'] = detected
|
||||
if old_group != detected:
|
||||
changes += 1
|
||||
log_message(f"Updated: '{channel.get('Stream name')}' → {detected}", "INFO")
|
||||
# FORCE detection for ALL channels, regardless of current group
|
||||
detected = detect_country_from_channel(stream_name, epg_id, logo_url)
|
||||
|
||||
# Always update the group
|
||||
channel['Group'] = detected
|
||||
if old_group != detected:
|
||||
changes += 1
|
||||
log_message(f"FORCED UPDATE: '{stream_name}' from '{old_group}' to '{detected}'", "INFO")
|
||||
|
||||
updated_channels.append(channel)
|
||||
|
||||
if changes > 0:
|
||||
if updated_channels:
|
||||
# Always rewrite the file
|
||||
backup_name = f"{CHANNELS_FILE}.backup.{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
try:
|
||||
import shutil
|
||||
shutil.copy2(CHANNELS_FILE, backup_name)
|
||||
log_message(f"Created backup: {backup_name}", "INFO")
|
||||
except:
|
||||
pass
|
||||
|
||||
|
@ -360,7 +364,7 @@ def update_existing_channels_with_country_detection():
|
|||
f.write("\n\n")
|
||||
f.write(convert_to_channels_txt_block(channel))
|
||||
|
||||
log_message(f"Updated {changes} channels with country detection", "INFO")
|
||||
log_message(f"FORCE updated ALL {len(updated_channels)} channels ({changes} changes made)", "INFO")
|
||||
|
||||
def process_import():
|
||||
"""Process bulk M3U import with comprehensive filtering."""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue