import re import os import json from datetime import datetime # --- Simple Configuration --- CHANNELS_FILE = 'channels.txt' PLAYLIST_FILE = 'playlist.m3u' IMPORT_FILE = 'bulk_import.m3u' LOG_FILE = 'playlist_update.log' # Config files (optional) SETTINGS_FILE = 'config/settings.json' GROUP_OVERRIDES_FILE = 'config/group_overrides.json' def log_message(message, level="INFO"): """Logs messages to file and prints them.""" timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") formatted_message = f"[{timestamp}] {level}: {message}" try: with open(LOG_FILE, 'a', encoding='utf-8') as f: f.write(formatted_message + "\n") except Exception as e: print(f"ERROR: Could not write to log: {e}") print(formatted_message) def load_settings(): """Load settings with defaults.""" default_settings = { "remove_duplicates": True, "sort_channels": True, "backup_before_import": True, "auto_cleanup_import": True, "auto_detect_country": True } if os.path.exists(SETTINGS_FILE): try: with open(SETTINGS_FILE, 'r', encoding='utf-8') as f: settings = json.load(f) return {**default_settings, **settings} except Exception as e: log_message(f"Could not load settings, using defaults: {e}", "WARNING") return default_settings def load_group_overrides(): """Load group overrides.""" if os.path.exists(GROUP_OVERRIDES_FILE): try: with open(GROUP_OVERRIDES_FILE, 'r', encoding='utf-8') as f: return json.load(f) except Exception as e: log_message(f"Could not load group overrides: {e}", "WARNING") return {} def detect_country_from_channel(channel_name, epg_id="", logo_url=""): """ Simple country detection that will definitely work. """ # Convert to lowercase for easier matching name_lower = channel_name.lower() epg_lower = epg_id.lower() log_message(f"Detecting country for: '{channel_name}' (EPG: '{epg_id}')", "DEBUG") # UK Detection if "sky" in name_lower or ".uk" in epg_lower or "british" in name_lower or "bbc" in name_lower or "itv" in name_lower: log_message(f"Detected UK for: {channel_name}", "INFO") return "πŸ‡¬πŸ‡§ United Kingdom" # US Detection if "usa" in name_lower or "us " in name_lower or ".us" in epg_lower or "america" in name_lower or "cnn" in name_lower or "espn" in name_lower or "fox" in name_lower: log_message(f"Detected US for: {channel_name}", "INFO") return "πŸ‡ΊπŸ‡Έ United States" # Canada Detection if "canada" in name_lower or "cbc" in name_lower or ".ca" in epg_lower or "ctv" in name_lower: log_message(f"Detected Canada for: {channel_name}", "INFO") return "πŸ‡¨πŸ‡¦ Canada" # Germany Detection if "german" in name_lower or ".de" in epg_lower or "ard" in name_lower or "zdf" in name_lower: log_message(f"Detected Germany for: {channel_name}", "INFO") return "πŸ‡©πŸ‡ͺ Germany" # France Detection if "france" in name_lower or ".fr" in epg_lower or "tf1" in name_lower: log_message(f"Detected France for: {channel_name}", "INFO") return "πŸ‡«πŸ‡· France" # No match found log_message(f"No country detected for: {channel_name}", "DEBUG") return "Uncategorized" def apply_auto_country_detection(channel, group_overrides, settings): """ Enhanced version of apply_group_overrides that includes auto-detection. """ stream_name = channel.get('Stream name', '') epg_id = channel.get('EPG id', '') logo_url = channel.get('Logo', '') current_group = channel.get('Group', 'Uncategorized') # First try manual overrides (highest priority) stream_name_lower = stream_name.lower() for key, new_group in group_overrides.items(): if key.lower() in stream_name_lower: channel['Group'] = new_group log_message(f"Manual override: '{stream_name}' β†’ {new_group}", "DEBUG") return channel # If auto-detection is enabled, try it if settings.get('auto_detect_country', True): detected_country = detect_country_from_channel(stream_name, epg_id, logo_url) # Only override if we detected something specific (not "Uncategorized") if detected_country != "Uncategorized": channel['Group'] = detected_country log_message(f"Auto-detected: '{stream_name}' β†’ {detected_country}", "INFO") else: # Keep existing group or set to Uncategorized if current_group in ['', 'Unknown', 'Other']: channel['Group'] = "Uncategorized" else: # Auto-detection disabled, use manual overrides only if current_group in ['', 'Unknown', 'Other']: channel['Group'] = "Uncategorized" return channel def parse_channel_block(block): """Parse a channel block from channels.txt.""" channel_data = {} lines = block.strip().split('\n') for line in lines: if '=' in line: key, value = line.split('=', 1) key = key.strip() value = value.strip() channel_data[key] = value return channel_data def parse_m3u_entry(extinf_line, url_line): """Parse M3U entry.""" channel = {} # Extract attributes tvg_id_match = re.search(r'tvg-id="([^"]*)"', extinf_line) tvg_logo_match = re.search(r'tvg-logo="([^"]*)"', extinf_line) group_title_match = re.search(r'group-title="([^"]*)"', extinf_line) tvg_name_match = re.search(r'tvg-name="([^"]*)"', extinf_line) channel['EPG id'] = tvg_id_match.group(1) if tvg_id_match else '' channel['Logo'] = tvg_logo_match.group(1) if tvg_logo_match else '' channel['Group'] = group_title_match.group(1) if group_title_match else 'Uncategorized' channel['TVG Name'] = tvg_name_match.group(1) if tvg_name_match else '' # Stream name after the last comma stream_name_match = re.search(r',(.+)$', extinf_line) channel['Stream name'] = stream_name_match.group(1).strip() if stream_name_match else 'Unknown Channel' channel['Stream URL'] = url_line.strip() return channel def convert_to_channels_txt_block(channel_data): """Convert to channels.txt format.""" block = [] block.append(f"Group = {channel_data.get('Group', 'Uncategorized')}") block.append(f"Stream name = {channel_data.get('Stream name', 'Unknown Channel')}") block.append(f"Logo = {channel_data.get('Logo', '')}") block.append(f"EPG id = {channel_data.get('EPG id', '')}") block.append(f"Stream URL = {channel_data.get('Stream URL', '')}") return "\n".join(block) def get_channel_signature(channel): """Create unique signature for duplicate detection.""" stream_name = channel.get('Stream name', '').strip().lower() stream_url = channel.get('Stream URL', '').strip().lower() # Clean name stream_name_clean = re.sub(r'\s+', ' ', stream_name) stream_name_clean = re.sub(r'[^\w\s]', '', stream_name_clean) return f"{stream_name_clean}|{stream_url}" def remove_duplicates(channels, settings): """Remove duplicate channels.""" if not settings.get('remove_duplicates', True): log_message("Duplicate removal disabled", "INFO") return channels seen_signatures = set() unique_channels = [] duplicate_count = 0 for channel in channels: signature = get_channel_signature(channel) if signature not in seen_signatures: seen_signatures.add(signature) unique_channels.append(channel) else: duplicate_count += 1 if duplicate_count > 0: log_message(f"Removed {duplicate_count} duplicate channels", "INFO") else: log_message("No duplicates found", "INFO") return unique_channels def update_existing_channels_with_country_detection(): """Re-process existing channels.txt to apply country detection to old channels.""" if not os.path.exists(CHANNELS_FILE): log_message("No channels.txt file found", "WARNING") return settings = load_settings() group_overrides = load_group_overrides() log_message("Starting to re-detect countries for ALL existing channels...", "INFO") # Read existing channels with open(CHANNELS_FILE, 'r', encoding='utf-8') as f: content = f.read() log_message(f"Read {len(content)} characters from channels.txt", "DEBUG") channel_blocks = re.split(r'\n\s*\n+', content.strip()) log_message(f"Found {len(channel_blocks)} channel blocks", "INFO") updated_channels = [] changes_made = 0 for i, block in enumerate(channel_blocks): if block.strip(): channel = parse_channel_block(block) if channel: old_group = channel.get('Group', 'Uncategorized') stream_name = channel.get('Stream name', 'Unknown') epg_id = channel.get('EPG id', '') log_message(f"Processing channel {i+1}: '{stream_name}' (currently in '{old_group}')", "DEBUG") # Force apply auto-detection regardless of current group detected_country = detect_country_from_channel(stream_name, epg_id, "") # Always update if we detected something specific if detected_country != "Uncategorized": channel['Group'] = detected_country changes_made += 1 log_message(f"CHANGED: '{stream_name}' from '{old_group}' to '{detected_country}'", "INFO") else: log_message(f"NO CHANGE: '{stream_name}' stays as '{old_group}'", "DEBUG") updated_channels.append(channel) # Always rewrite the file if we have channels if updated_channels: log_message(f"Rewriting channels.txt with {len(updated_channels)} channels ({changes_made} changes made)", "INFO") # Create backup backup_name = f"{CHANNELS_FILE}.backup.{datetime.now().strftime('%Y%m%d_%H%M%S')}" try: import shutil shutil.copy2(CHANNELS_FILE, backup_name) log_message(f"Created backup: {backup_name}", "INFO") except Exception as e: log_message(f"Could not create backup: {e}", "WARNING") # Write updated channels try: with open(CHANNELS_FILE, 'w', encoding='utf-8') as f: for i, channel in enumerate(updated_channels): if i > 0: f.write("\n\n") block_content = convert_to_channels_txt_block(channel) f.write(block_content) log_message(f"Successfully rewrote channels.txt with country detection", "INFO") except Exception as e: log_message(f"ERROR writing channels.txt: {e}", "ERROR") else: log_message("No channels found to update", "WARNING") def process_import(): """Process bulk import file.""" settings = load_settings() group_overrides = load_group_overrides() if not os.path.exists(IMPORT_FILE): log_message(f"No {IMPORT_FILE} found, skipping import", "INFO") return [] log_message(f"Processing {IMPORT_FILE}...", "INFO") imported_channels = [] try: with open(IMPORT_FILE, 'r', encoding='utf-8') as f: lines = f.readlines() log_message(f"Found {len(lines)} lines in import file", "INFO") i = 0 while i < len(lines): line = lines[i].strip() if line.startswith('#EXTINF:'): if i + 1 < len(lines): extinf_line = line url_line = lines[i+1].strip() if not url_line or url_line.startswith('#'): i += 1 continue channel_data = parse_m3u_entry(extinf_line, url_line) channel_data = apply_auto_country_detection(channel_data, group_overrides, settings) if channel_data.get('Stream name') and channel_data.get('Stream URL'): imported_channels.append(channel_data) i += 2 else: i += 1 else: i += 1 log_message(f"Parsed {len(imported_channels)} channels from import", "INFO") # Remove duplicates from import if imported_channels: imported_channels = remove_duplicates(imported_channels, settings) # Check existing channels existing_channels = [] if os.path.exists(CHANNELS_FILE): with open(CHANNELS_FILE, 'r', encoding='utf-8') as f: content = f.read() channel_blocks = re.split(r'\n\s*\n+', content.strip()) for block in channel_blocks: if block.strip(): existing_channels.append(parse_channel_block(block)) existing_signatures = {get_channel_signature(ch) for ch in existing_channels} new_channels = [] for channel in imported_channels: if get_channel_signature(channel) not in existing_signatures: new_channels.append(channel) imported_channels = new_channels log_message(f"Final import: {len(imported_channels)} new channels", "INFO") # Write to channels.txt if imported_channels: lines_before = 0 if os.path.exists(CHANNELS_FILE): with open(CHANNELS_FILE, 'r', encoding='utf-8') as f: lines_before = len(f.readlines()) with open(CHANNELS_FILE, 'a', encoding='utf-8') as f: for i, channel in enumerate(imported_channels): if i > 0 or lines_before > 0: f.write("\n\n") block_content = convert_to_channels_txt_block(channel) f.write(block_content) log_message(f"Successfully imported {len(imported_channels)} channels", "INFO") else: log_message("No new channels to import", "INFO") except Exception as e: log_message(f"Error processing import: {e}", "ERROR") return imported_channels # Clean up import file if settings.get('auto_cleanup_import', True): try: os.remove(IMPORT_FILE) log_message(f"Cleaned up {IMPORT_FILE}", "INFO") except Exception as e: log_message(f"Could not remove {IMPORT_FILE}: {e}", "WARNING") return imported_channels def generate_playlist(): """Main function.""" # Clear log if os.path.exists(LOG_FILE): open(LOG_FILE, 'w').close() log_message("Starting playlist generation...", "INFO") settings = load_settings() group_overrides = load_group_overrides() log_message(f"Settings loaded: {settings}", "INFO") log_message(f"Group overrides loaded: {group_overrides}", "INFO") # FIRST: Update existing channels with country detection update_existing_channels_with_country_detection() # Process import imported_channels = process_import() log_message(f"Import returned {len(imported_channels)} channels", "INFO") # Read channels.txt (now with updated countries) if not os.path.exists(CHANNELS_FILE): log_message(f"Error: {CHANNELS_FILE} not found", "ERROR") return with open(CHANNELS_FILE, 'r', encoding='utf-8') as f: content = f.read() # Parse channels channel_blocks = re.split(r'\n\s*\n+', content.strip()) parsed_channels = [] for block in channel_blocks: if block.strip(): channel = parse_channel_block(block) if channel: # Country detection already applied above, just load the channels parsed_channels.append(channel) log_message(f"Parsed {len(parsed_channels)} channels", "INFO") # Remove duplicates parsed_channels = remove_duplicates(parsed_channels, settings) # Sort channels if settings.get('sort_channels', True): parsed_channels.sort(key=lambda x: (x.get('Group', '').lower(), x.get('Stream name', '').lower())) log_message("Channels sorted by country and name", "INFO") # Build M3U m3u_lines = ["#EXTM3U"] valid_channels = 0 # Count channels by country for stats country_stats = {} for channel in parsed_channels: stream_name = channel.get('Stream name', '') group_name = channel.get('Group', 'Uncategorized') logo_url = channel.get('Logo', '') epg_id = channel.get('EPG id', '') stream_url = channel.get('Stream URL', '') if not stream_name or not stream_url: continue extinf_attrs = [ f'tvg-id="{epg_id}"', f'tvg-logo="{logo_url}"', f'group-title="{group_name}"', f'tvg-name="{stream_name}"' ] extinf_line = f"#EXTINF:-1 {' '.join(extinf_attrs)},{stream_name}" m3u_lines.append(extinf_line) m3u_lines.append(stream_url) valid_channels += 1 # Count by country country_stats[group_name] = country_stats.get(group_name, 0) + 1 # Write M3U try: with open(PLAYLIST_FILE, 'w', encoding='utf-8') as f: for line in m3u_lines: f.write(line + '\n') log_message(f"Generated {PLAYLIST_FILE} with {valid_channels} channels", "INFO") # Log country statistics log_message(f"Channels by country: {dict(sorted(country_stats.items(), key=lambda x: x[1], reverse=True))}", "INFO") except Exception as e: log_message(f"Error writing playlist: {e}", "ERROR") log_message("Playlist generation complete", "INFO") if __name__ == "__main__": generate_playlist()