#!/usr/bin/env python3
"""
Multi-Bridge Network Monitor for Amateur Radio C-Bridge Networks.
Queries multiple C-Bridges and generates reports on repeater associations.
Integrates with RadioID.net for additional repeater information.
Enhanced with PostgreSQL database integration to infer talkgroups from call history.

2020-02-21 KB1B nedecn@kb1b.org
"""

import requests
import json
import sys
import re
from datetime import datetime, timedelta
from typing import Dict, List, Tuple, Optional
from collections import defaultdict
import argparse
import time
from html.parser import HTMLParser

debug = False

# PostgreSQL support (optional)
try:
    import psycopg2
    from psycopg2.extras import RealDictCursor
    PSYCOPG2_AVAILABLE = True
except ImportError:
    PSYCOPG2_AVAILABLE = False
    print("Warning: psycopg2 not available. Install with: pip install psycopg2-binary")


class DatabaseTalkgroupInference:
    """Infer talkgroups and timeslots from PostgreSQL call history."""
    
    def __init__(self, db_config: Dict):
        """
        Initialize database connection.
        
        Args:
            db_config: Dict with 'host', 'database', 'user', 'password', 'port', 'table'
        """
        if not PSYCOPG2_AVAILABLE:
            raise ImportError("psycopg2 is required for database features")
        
        self.db_config = db_config
        self.table_name = db_config.get('table', 'nedecn')  # Default to 'nedecn' if not specified
        self.conn = None
        self.connect()
    
    def connect(self):
        """Establish database connection."""
        try:
            self.conn = psycopg2.connect(
                host=self.db_config.get('host', 'localhost'),
                database=self.db_config.get('database', 'radio'),
                user=self.db_config.get('user', 'radio'),
                password=self.db_config.get('password', ''),
                port=self.db_config.get('port', 5432)
            )
            print(f"✓ Connected to PostgreSQL database: {self.db_config.get('database')}")
            print(f"  Using table: {self.table_name}")
        except Exception as e:
            print(f"✗ Database connection failed: {e}")
            raise
    
    def close(self):
        """Close database connection."""
        if self.conn:
            self.conn.close()
    
    def infer_talkgroups_for_repeater(self, repeater_id: str, days: int = 90, min_calls: int = 5) -> Dict:
        """
        Infer talkgroups used by a repeater based on call history.
        
        Args:
            repeater_id: Repeater DMR ID
            days: Number of days of history to analyze
            min_calls: Minimum number of calls to include talkgroup
            
        Returns:
            Dict with 'talkgroups' list (no timeslot inference)
        """
        if not self.conn:
            return {'talkgroups': []}
        
        try:
            cursor = self.conn.cursor(cursor_factory=RealDictCursor)
            
            # Calculate date range
            end_date = datetime.now().date()
            start_date = end_date - timedelta(days=days)
            
            # Query for talkgroup usage on this repeater
            query = f"""
                SELECT 
                    tgid,
                    talk as talkgroup_name,
                    COUNT(*) as call_count,
                    COUNT(DISTINCT dmrid) as unique_users,
                    MIN(ymd) as first_seen,
                    MAX(ymd) as last_seen
                FROM {self.table_name}
                WHERE rptr = %s
                    AND ymd >= %s
                    AND ymd <= %s
                    AND tgid IS NOT NULL
                    AND tgid != ''
                GROUP BY tgid, talk
                HAVING COUNT(*) >= %s
                ORDER BY call_count DESC
            """
            
            cursor.execute(query, (repeater_id, start_date, end_date, min_calls))
            results = cursor.fetchall()
            
            # Return all talkgroups without timeslot organization
            talkgroups = []
            
            for row in results:
                tg_info = {
                    'tgid': row['tgid'],
                    'name': row['talkgroup_name'] or f"TG{row['tgid']}",
                    'call_count': row['call_count'],
                    'unique_users': row['unique_users'],
                    'first_seen': row['first_seen'].isoformat() if row['first_seen'] else None,
                    'last_seen': row['last_seen'].isoformat() if row['last_seen'] else None
                }
                talkgroups.append(tg_info)
            
            cursor.close()
            
            if debug:
                print(f"  Found {len(talkgroups)} talkgroups for repeater {repeater_id}")
            
            return {'talkgroups': talkgroups}
            
        except Exception as e:
            print(f"  Warning: Database query failed for repeater {repeater_id}: {e}")
            return {'talkgroups': []}
    
    def get_repeater_activity_summary(self, repeater_id: str, days: int = 7) -> Dict:
        """
        Get activity summary for a repeater.
        
        Returns:
            Dict with total_calls, unique_users, active_days, last_activity
        """
        if not self.conn:
            return {}
        
        try:
            cursor = self.conn.cursor(cursor_factory=RealDictCursor)
            
            end_date = datetime.now().date()
            start_date = end_date - timedelta(days=days)
            
            query = f"""
                SELECT 
                    COUNT(*) as total_calls,
                    COUNT(DISTINCT dmrid) as unique_users,
                    COUNT(DISTINCT ymd) as active_days,
                    MAX(ymd) as last_activity
                FROM {self.table_name}
                WHERE rptr = %s
                    AND ymd >= %s
                    AND ymd <= %s
            """
            
            cursor.execute(query, (repeater_id, start_date, end_date))
            result = cursor.fetchone()
            cursor.close()
            
            return {
                'total_calls': result['total_calls'] or 0,
                'unique_users': result['unique_users'] or 0,
                'active_days': result['active_days'] or 0,
                'last_activity': result['last_activity'].isoformat() if result['last_activity'] else None
            }
            
        except Exception as e:
            if debug:
                print(f"  Warning: Activity query failed for {repeater_id}: {e}")
            return {}


class RadioIDLookup:
    """Handle lookups to RadioID.net API."""
    
    def __init__(self, cache_file: str = "radioid_cache.json"):
        self.cache_file = cache_file
        self.cache = {}
        self.network_cache = {}
        self.talkgroup_cache = {}
        self.load_cache()
        self.base_url = "https://radioid.net/api/dmr"
    
    def parse_details_field(self, details: str) -> Dict[str, str]:
        """
        Parse the details field to extract timeslot and talkgroup information.
        
        Args:
            details: Details string from RadioID API containing talkgroup info
            
        Returns:
            Dict with ts1 and ts2 talkgroup information
        """
        if not details:
            return {}
        
        result = {'ts1': [], 'ts2': []}
        
        # Split by <br> tags
        lines = details.replace('<br>', '\n').split('\n')
        
        for line in lines:
            line = line.strip()
            
            # Look for Time Slot #1 patterns
            ts1_match = re.search(r'Time Slot #1.*?Group Call (\d+)\s*=\s*([^*\n]+)(\*)?', line)
            if ts1_match:
                tg_id = ts1_match.group(1)
                tg_name = ts1_match.group(2).strip()
                ptt_activated = '*' if ts1_match.group(3) else ''
                result['ts1'].append(f"TG{tg_id} {tg_name}{ptt_activated}")
            
            # Look for Time Slot #2 patterns
            ts2_match = re.search(r'Time Slot #2.*?Group Call (\d+)\s*=\s*([^*\n]+)(\*)?', line)
            if ts2_match:
                tg_id = ts2_match.group(1)
                tg_name = ts2_match.group(2).strip()
                ptt_activated = '*' if ts2_match.group(3) else ''
                result['ts2'].append(f"TG{tg_id} {tg_name}{ptt_activated}")
        
        # Join the talkgroups into readable strings
        return {
            'ts1': ', '.join(result['ts1']) if result['ts1'] else '',
            'ts2': ', '.join(result['ts2']) if result['ts2'] else ''
        }
    
    def load_cache(self):
        """Load cached RadioID lookups."""
        try:
            with open(self.cache_file, 'r') as f:
                cache_data = json.load(f)
                # Handle new format with separate caches
                if isinstance(cache_data, dict) and 'repeaters' in cache_data:
                    self.cache = cache_data.get('repeaters', {})
                    self.network_cache = cache_data.get('networks', {})
                    self.talkgroup_cache = cache_data.get('talkgroups', {})
                else:
                    # Old format - just repeater data
                    self.cache = cache_data
        except FileNotFoundError:
            pass
        except Exception as e:
            print(f"Warning: Could not load cache: {e}")
    
    def save_cache(self):
        """Save RadioID cache to disk."""
        try:
            cache_data = {
                'repeaters': self.cache,
                'networks': self.network_cache,
                'talkgroups': self.talkgroup_cache,
                'timestamp': datetime.now().isoformat()
            }
            with open(self.cache_file, 'w') as f:
                json.dump(cache_data, f, indent=2)
        except Exception as e:
            print(f"Warning: Could not save cache: {e}")
    
    def lookup_repeater(self, dmr_id: str) -> Optional[Dict]:
        """
        Look up repeater information by DMR ID.
        
        Args:
            dmr_id: DMR ID as string (e.g., "310904")
            
        Returns:
            Dict with repeater info or None
        """
        # Check cache first
        if dmr_id in self.cache:
            cached = self.cache[dmr_id]
            if debug and cached:
                ts1_len = len(cached.get('ts1', '')) if cached.get('ts1') else 0
                ts2_len = len(cached.get('ts2', '')) if cached.get('ts2') else 0
                print(f"    (cached - TS1: {ts1_len} chars, TS2: {ts2_len} chars)")
            return cached
        
        try:
            # Query RadioID.net API
            url = f"{self.base_url}/repeater/?id={dmr_id}"
            response = requests.get(url, timeout=10)
            
            if response.status_code == 200:
                data = response.json()
                if data and 'results' in data and len(data['results']) > 0:
                    result = data['results'][0]
                    
                    # Parse details field for timeslot data
                    details = result.get('details', '')
                    
                    if debug and details:
                        print(f"    Details field length: {len(details)} chars")
                    
                    ts_data = self.parse_details_field(details)
                    
                    if debug:
                        print(f"    Parsed TS1: {len(ts_data.get('ts1', ''))} chars")
                        print(f"    Parsed TS2: {len(ts_data.get('ts2', ''))} chars")
                    
                    info = {
                        'callsign': result.get('callsign', ''),
                        'city': result.get('city', ''),
                        'state': result.get('state', ''),
                        'country': result.get('country', ''),
                        'frequency': result.get('frequency', ''),
                        'color_code': result.get('color_code', ''),
                        'lat': result.get('lat', ''),
                        'lng': result.get('lng', ''),
                        'trustee': result.get('trustee', ''),
                        'details': details,
                        'ipsc_network': result.get('ipsc_network', ''),
                        'ts1': ts_data.get('ts1', ''),
                        'ts2': ts_data.get('ts2', ''),
                    }
                    
                    # Look up IPSC network details if available
                    if info['ipsc_network']:
                        network_info = self.lookup_network(info['ipsc_network'])
                        if network_info:
                            info['network_details'] = network_info
                    
                    self.cache[dmr_id] = info
                    return info
            
            # Not found or error
            self.cache[dmr_id] = None
            return None
            
        except Exception as e:
            print(f"  Warning: RadioID lookup failed for {dmr_id}: {e}")
            return None
        finally:
            # Be nice to the API - small delay
            time.sleep(0.1)
    
    def lookup_network(self, network_id: str) -> Optional[Dict]:
        """
        Look up IPSC network information.
        
        Args:
            network_id: Network ID as string
            
        Returns:
            Dict with network info or None
        """
        if not network_id:
            return None
        
        # Check cache
        if network_id in self.network_cache:
            return self.network_cache[network_id]
        
        try:
            url = f"{self.base_url}/network/?id={network_id}"
            response = requests.get(url, timeout=10)
            
            if response.status_code == 200:
                data = response.json()
                if data and 'results' in data and len(data['results']) > 0:
                    result = data['results'][0]
                    info = {
                        'name': result.get('name', ''),
                        'country': result.get('country', ''),
                        'region': result.get('region', ''),
                    }
                    self.network_cache[network_id] = info
                    return info
            
            return None
            
        except Exception:
            return None
        finally:
            time.sleep(0.1)
    
    def extract_dmr_id(self, text: str) -> Optional[str]:
        """
        Extract DMR ID from text like "W1NLK - Norwalk CT USA -- 310910"
        
        Returns:
            DMR ID as string or None
        """
        if not text:
            return None
        
        # Look for pattern: -- followed by digits
        match = re.search(r'--\s*(\d+)', text)
        if match:
            return match.group(1)
        
        # Also try peer_id format
        match = re.search(r'\b(\d{6,7})\b', text)
        if match:
            return match.group(1)
        
        return None


class RepeaterData:
    """Merged data from C-Bridge and RadioID.net"""
    
    def __init__(self):
        self.callsign = None
        self.location = None
        self.peer_id = None
        self.connected_to = None
        self.frequency = None
        self.color_code = None
        self.city = None
        self.state = None
        self.country = None
        self.ipsc_network = None
        self.network_name = None
        self.network_country = None
        self.ts1 = None
        self.ts2 = None
        self.talkgroups_inferred = None  # Inferred from database (no timeslot)
        self.db_talkgroups = None  # Full database talkgroup data
        self.activity_stats = None  # Activity statistics from database
        self.bridges = []  # List of bridges this repeater is on
        self.conflicts = {}  # Track conflicting data
    
    def merge_cbridge_data(self, bridge_name: str, peer_info: Dict):
        """Merge data from C-Bridge"""
        self.bridges.append(bridge_name)
        
        if not self.location:
            self.location = peer_info.get('location', '')
        
        if not self.peer_id:
            self.peer_id = peer_info.get('peer_id', '')
        
        if not self.connected_to:
            self.connected_to = peer_info.get('connected_to', '')
    
    def merge_radioid_data(self, radioid_info: Dict):
        """Merge data from RadioID.net"""
        if not radioid_info:
            return
        
        # Extract callsign from location if not set
        if not self.callsign and self.location:
            match = re.match(r'^([A-Z0-9]+)', self.location)
            if match:
                self.callsign = match.group(1)
        
        # Merge RadioID data, checking for conflicts
        for field in ['frequency', 'color_code', 'city', 'state', 'country', 'ts1', 'ts2']:
            radioid_value = radioid_info.get(field, '')
            if radioid_value:
                current_value = getattr(self, field)
                if current_value and current_value != radioid_value:
                    self.conflicts[field] = {'cbridge': current_value, 'radioid': radioid_value}
                setattr(self, field, radioid_value)
        
        # Network information
        if radioid_info.get('ipsc_network'):
            self.ipsc_network = radioid_info['ipsc_network']
            network_details = radioid_info.get('network_details', {})
            if network_details:
                self.network_name = network_details.get('name', '')
                self.network_country = network_details.get('country', '')
    
    def merge_database_talkgroups(self, db_talkgroups: Dict, activity_stats: Dict):
        """
        Merge inferred talkgroup data from database.
        
        Args:
            db_talkgroups: Dict with 'talkgroups' list (no timeslot designation)
            activity_stats: Activity statistics dict
        """
        self.db_talkgroups = db_talkgroups
        self.activity_stats = activity_stats
        
        # Create formatted string for talkgroups, excluding those already in RadioID data
        if db_talkgroups.get('talkgroups'):
            # Extract talkgroup IDs from RadioID.net data (ts1 and ts2)
            radioid_tg_ids = set()
            
            # Parse ts1 RadioID data
            if self.ts1:
                # Extract TG numbers from strings like "TG3131 Connecticut, TG31 North America"
                for match in re.finditer(r'TG(\d+)', self.ts1):
                    radioid_tg_ids.add(match.group(1))
            
            # Parse ts2 RadioID data
            if self.ts2:
                for match in re.finditer(r'TG(\d+)', self.ts2):
                    radioid_tg_ids.add(match.group(1))
            
            # Build list of talkgroups not already in RadioID data
            tg_items = []
            for tg in db_talkgroups['talkgroups'][:20]:  # Check more, display up to 10 unique
                # Skip if this talkgroup is already in RadioID data
                if tg['tgid'] not in radioid_tg_ids:
                    tg_items.append(f"TG{tg['tgid']} {tg['name']} ({tg['call_count']} calls)")
                    if len(tg_items) >= 10:  # Limit to top 10 unique
                        break
            
            self.talkgroups_inferred = ', '.join(tg_items) if tg_items else None
    
    def get_formatted_location(self) -> str:
        """Get formatted location string."""
        # Try to parse from connected_to field first
        if self.connected_to:
            match = re.search(r'-\s+([^-]+?)\s+--', self.connected_to)
            if match:
                return match.group(1).strip()
        
        # Build from city/state/country
        if self.city:
            location_parts = [self.city]
            
            if self.state:
                # Common state abbreviations
                state_abbrev = {
                    'Connecticut': 'CT', 'Massachusetts': 'MA', 'Maine': 'ME', 'New Hampshire': 'NH',
                    'Vermont': 'VT', 'Rhode Island': 'RI', 'New York': 'NY', 'New Jersey': 'NJ',
                    'Pennsylvania': 'PA', 'California': 'CA', 'Texas': 'TX', 'Florida': 'FL',
                    'Ohio': 'OH', 'Michigan': 'MI', 'Illinois': 'IL', 'Indiana': 'IN',
                    'Wisconsin': 'WI', 'Minnesota': 'MN', 'Iowa': 'IA', 'Missouri': 'MO'
                }
                location_parts.append(state_abbrev.get(self.state, self.state))
            
            if self.country:
                if 'united' in self.country.lower():
                    location_parts.append('USA')
                elif 'canada' in self.country.lower():
                    location_parts.append('CAN')
                else:
                    location_parts.append(self.country[:3].upper())
            
            return ' '.join(location_parts)
        
        # Fallback to location field
        return self.location or '—'
    
    def to_dict(self) -> Dict:
        """Convert to dictionary for JSON export"""
        base_dict = {
            'callsign': self.callsign,
            'location': self.location,
            'formatted_location': self.get_formatted_location(),
            'peer_id': self.peer_id,
            'connected_to': self.connected_to,
            'frequency': self.frequency,
            'color_code': self.color_code,
            'city': self.city,
            'state': self.state,
            'country': self.country,
            'ipsc_network': self.ipsc_network,
            'network_name': self.network_name,
            'network_country': self.network_country,
            'ts1': self.ts1,
            'ts2': self.ts2,
            'talkgroups_inferred': self.talkgroups_inferred,
            'bridges': self.bridges,
            'conflicts': self.conflicts
        }
        
        # Add database talkgroups and activity if available
        if self.db_talkgroups:
            base_dict['db_talkgroups'] = self.db_talkgroups
        
        if self.activity_stats:
            base_dict['activity_stats'] = self.activity_stats
        
        return base_dict


class CBridgeMonitor:
    """Monitor multiple C-Bridges and track repeater associations."""
    
    def __init__(self, timeout: int = 10, use_radioid: bool = True, db_config: Optional[Dict] = None):
        self.timeout = timeout
        self.bridges = {}
        self.repeaters = {}  # Unified repeater data by peer_id
        self.use_radioid = use_radioid
        self.radioid = RadioIDLookup() if use_radioid else None
        self.db = None
        
        # Initialize database connection if config provided
        if db_config and PSYCOPG2_AVAILABLE:
            try:
                self.db = DatabaseTalkgroupInference(db_config)
            except Exception as e:
                print(f"Warning: Could not initialize database: {e}")
        
    def parse_cbridge_data(self, content: str) -> Dict:
        """Parse C-Bridge peer watch data into structured dictionary."""
        data_dict = {}
        
        for line in content.strip().split('\n'):
            line = line.strip()
            if not line or line.startswith('#') or '=' not in line:
                continue
            
            parts = line.split('=', 1)
            if len(parts) != 2:
                continue
            
            key_part = parts[0]
            value_part = parts[1]
            
            # Parse key: Callsign-Location\tID\f/R?m
            key_components = key_part.split('\t')
            
            callsign_location = key_components[0]
            id_and_rest = key_components[1] if len(key_components) > 1 else ""
            peer_id = id_and_rest.split('\f')[0] if '\f' in id_and_rest else id_and_rest
            
            # Handle case where peer_id is not in tab-separated format
            # e.g., "KB1B - Cambridge MA -- 313500" instead of "KB1B-Location\t312500"
            if not peer_id:
                # Try to extract ID from callsign_location using the "-- ID" pattern
                match = re.search(r'--\s*(\d{6,7})\s*$', callsign_location)
                if match:
                    peer_id = match.group(1)
                    if debug:
                        print(f"  Recovered peer_id {peer_id} from location string")
            
            # Skip if we still don't have a peer_id
            if not peer_id:
                if debug:
                    print(f"  Warning: Could not extract peer_id from: {callsign_location}")
                continue
            
            # Parse value to extract connected peer info
            value_parts = value_part.split('\t')
            connected_peer = None
            
            for part in value_parts:
                if re.search(r'[A-Z0-9]{2,6}\s*-\s*\w+', part):
                    connected_peer = part.split('\f')[0].strip()
                    break
            
            entry = {
                'peer_id': peer_id,
                'location': callsign_location,
                'connected_to': connected_peer if connected_peer else None,
                'raw_data': value_part
            }
            
            # Use peer_id as the dictionary key for proper deduplication
            data_dict[peer_id] = entry
        
        return data_dict
    
    def fetch_bridge_data(self, bridge_name: str, url: str) -> Tuple[bool, Dict]:
        """
        Fetch data from a single C-Bridge.
        
        Returns:
            Tuple of (success, data_dict)
        """
        try:
            response = requests.get(url, timeout=self.timeout)
            response.raise_for_status()
            
            content = response.text
            
            # Try JSON first, then custom format
            try:
                data = json.loads(content)
            except json.JSONDecodeError:
                data = self.parse_cbridge_data(content)
            
            return True, data
            
        except requests.exceptions.Timeout:
            print(f"  ✗ Timeout fetching {bridge_name}")
            return False, {}
        except requests.exceptions.RequestException as e:
            print(f"  ✗ Error fetching {bridge_name}: {e}")
            return False, {}
        except Exception as e:
            print(f"  ✗ Unexpected error with {bridge_name}: {e}")
            return False, {}
    
    def merge_repeater_data(self, bridge_name: str, peer_info: Dict):
        """Merge repeater data from a bridge into unified repeater database"""
        peer_id = peer_info.get('peer_id', '')
        
        if not peer_id:
            return
        
        # Create or get repeater entry
        if peer_id not in self.repeaters:
            self.repeaters[peer_id] = RepeaterData()
        
        repeater = self.repeaters[peer_id]
        repeater.merge_cbridge_data(bridge_name, peer_info)
        
        # Enrich with RadioID data if enabled
        if self.use_radioid and self.radioid and not repeater.ts1:
            # Extract DMR ID from connected_to field
            dmr_id = None
            connected_to = peer_info.get('connected_to', '')
            
            if connected_to:
                match = re.search(r'--\s*(\d+)', connected_to)
                if match:
                    dmr_id = match.group(1)
            
            if dmr_id:
                if debug:
                    print(f"  Looking up RadioID for DMR ID: {dmr_id} ({peer_info.get('location')})")
                radioid_info = self.radioid.lookup_repeater(dmr_id)
                if radioid_info:
                    repeater.merge_radioid_data(radioid_info)
        
        # Enrich with database talkgroup inference if available
        if self.db:
            # Extract repeater ID for database lookup
            dmr_id = None
            connected_to = peer_info.get('connected_to', '')
            
            if connected_to:
                match = re.search(r'--\s*(\d+)', connected_to)
                if match:
                    dmr_id = match.group(1)
            
            if dmr_id:
                if debug:
                    print(f"  Querying database for talkgroups on repeater {dmr_id}")
                
                db_talkgroups = self.db.infer_talkgroups_for_repeater(dmr_id, days=90, min_calls=5)
                activity_stats = self.db.get_repeater_activity_summary(dmr_id, days=7)
                
                if db_talkgroups:
                    repeater.merge_database_talkgroups(db_talkgroups, activity_stats)
    
    def query_bridges(self, bridge_configs: List[Dict]):
        """
        Query multiple C-Bridges and aggregate data.
        
        Args:
            bridge_configs: List of dicts with 'name', 'url', and optional 'description'
        """
        print(f"\n{'='*80}")
        print(f"Querying {len(bridge_configs)} C-Bridges...")
        if self.use_radioid:
            print("RadioID.net enrichment enabled")
        if self.db:
            print("PostgreSQL database talkgroup inference enabled")
        print(f"{'='*80}\n")
        
        for config in bridge_configs:
            bridge_name = config['name']
            url = config['url']
            
            print(f"Fetching {bridge_name}...", end=' ')
            success, data = self.fetch_bridge_data(bridge_name, url)
            
            if success:
                print(f"✓ ({len(data)} peers)")
                self.bridges[bridge_name] = {
                    'url': url,
                    'description': config.get('description', ''),
                    'peers': data,
                    'timestamp': datetime.now().isoformat()
                }
                
                # Merge into unified repeater database
                for peer_name, peer_info in data.items():
                    self.merge_repeater_data(bridge_name, peer_info)
        
        # Save RadioID cache if used
        if self.use_radioid and self.radioid:
            self.radioid.save_cache()
            print(f"\nRadioID cache saved ({len(self.radioid.cache)} entries)")
    
    def generate_summary_report(self) -> str:
        """Generate a text summary report."""
        report = []
        report.append("="*80)
        report.append("C-BRIDGE NETWORK SUMMARY REPORT")
        report.append(f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
        report.append("="*80)
        report.append("")
        
        # Bridge summary
        report.append(f"Total Bridges Monitored: {len(self.bridges)}")
        report.append(f"Total Unique Repeaters: {len(self.repeaters)}")
        if self.db:
            report.append("Database Talkgroup Inference: ENABLED")
        report.append("")
        
        # Unified repeater table
        report.append("-"*80)
        report.append("UNIFIED REPEATER DATABASE:")
        report.append("-"*80)
        
        for peer_id in sorted(self.repeaters.keys()):
            repeater = self.repeaters[peer_id]
            status = "✓" if repeater.connected_to else "✗"
            
            report.append(f"\n{status} {repeater.location} (ID: {peer_id})")
            report.append(f"   Bridges: {', '.join(repeater.bridges)}")
            
            if repeater.connected_to:
                report.append(f"   Connected: {repeater.connected_to}")
            
            info_parts = []
            if repeater.frequency:
                info_parts.append(f"Freq: {repeater.frequency} MHz")
            if repeater.color_code:
                info_parts.append(f"CC{repeater.color_code}")
            if repeater.city and repeater.state:
                info_parts.append(f"{repeater.city}, {repeater.state}")
            if repeater.network_name:
                info_parts.append(f"Network: {repeater.network_name}")
            
            if info_parts:
                report.append(f"   {' | '.join(info_parts)}")
            
            # RadioID talkgroups
            if repeater.ts1:
                report.append(f"   RadioID TS1: {repeater.ts1}")
            if repeater.ts2:
                report.append(f"   RadioID TS2: {repeater.ts2}")
            
            # Database-inferred talkgroups (no timeslot designation)
            if repeater.talkgroups_inferred:
                report.append(f"   DB Inferred TGs: {repeater.talkgroups_inferred}")
            
            # Activity stats
            if repeater.activity_stats:
                stats = repeater.activity_stats
                report.append(f"   Activity (7d): {stats.get('total_calls', 0)} calls, "
                            f"{stats.get('unique_users', 0)} users, "
                            f"{stats.get('active_days', 0)} days active")
            
            if repeater.conflicts:
                report.append(f"   ⚠️  Conflicts detected: {repeater.conflicts}")
        
        report.append("\n" + "="*80)
        
        return "\n".join(report)
    
    def generate_html_report(self, filename: str = "cbridge_report.html"):
        """Generate an enhanced HTML report with database-inferred talkgroups."""
        html = []
        html.append("<!DOCTYPE html>")
        html.append("<html>")
        html.append("<head>")
        html.append("  <meta charset='UTF-8'>")
        html.append("  <title>C-Bridge Network Report with Database Talkgroups</title>")
        html.append("  <style>")
        html.append("    body { font-family: Arial, sans-serif; margin: 20px; background: #f5f5f5; }")
        html.append("    h1 { color: #333; border-bottom: 3px solid #0066cc; padding-bottom: 10px; }")
        html.append("    h2 { color: #0066cc; margin-top: 30px; }")
        html.append("    .timestamp { color: #666; font-size: 14px; }")
        html.append("    table { border-collapse: collapse; width: 100%; margin: 20px 0; background: white; font-size: 12px; }")
        html.append("    th { background: #0066cc; color: white; padding: 10px; text-align: left; font-size: 12px; white-space: nowrap; position: sticky; top: 0; }")
        html.append("    td { padding: 8px; border-bottom: 1px solid #ddd; vertical-align: top; }")
        html.append("    td:first-child { white-space: nowrap; }")
        html.append("    tr:hover { background: #f0f0f0; }")
        html.append("    .connected { color: green; }")
        html.append("    .disconnected { color: #999; }")
        html.append("    .stats { background: #e6f2ff; padding: 15px; border-radius: 5px; margin: 10px 0; }")
        html.append("    .ts-info { font-size: 11px; line-height: 1.6; }")
        html.append("    .ts-label { font-weight: bold; color: #FF9800; display: block; margin-top: 5px; }")
        html.append("    .ts-label:first-child { margin-top: 0; }")
        html.append("    .ts-source { font-size: 10px; color: #666; font-style: italic; }")
        html.append("    .db-inferred { background: #fff3cd; padding: 2px 4px; border-radius: 3px; }")
        html.append("    .radioid { background: #d1ecf1; padding: 2px 4px; border-radius: 3px; }")
        html.append("    .activity { font-size: 10px; color: #28a745; font-weight: bold; }")
        html.append("    .tg-item { display: inline-block; margin: 2px 0; }")
        html.append("  </style>")
        html.append("</head>")
        html.append("<body>")
        
        html.append("  <h1>C-Bridge Network Report with Database Talkgroup Inference</h1>")
        html.append(f"  <p class='timestamp'>Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}</p>")
        
        # Summary stats
        html.append("  <div class='stats'>")
        html.append(f"    <strong>Bridges Monitored:</strong> {len(self.bridges)} | ")
        html.append(f"    <strong>Total Repeaters:</strong> {len(self.repeaters)}")
        if self.use_radioid:
            html.append(" | <strong>RadioID.net data:</strong> Enabled")
        if self.db:
            html.append(" | <strong>Database Talkgroup Inference:</strong> <span class='db-inferred'>ENABLED</span>")
        html.append("  </div>")
        
        # Main table
        html.append("  <h2>Repeater Details with Inferred Talkgroups from Call History</h2>")
        html.append("  <table>")
        html.append("    <tr>")
        html.append("      <th>Bridge</th>")
        html.append("      <th>Peer ID</th>")
        html.append("      <th>Repeater ID</th>")
        html.append("      <th>Call Sign</th>")
        html.append("      <th>Location</th>")
        html.append("      <th>Frequency</th>")
        html.append("      <th>Color Code</th>")
        html.append("      <th>Time Slot & Talkgroup Info</th>")
        html.append("    </tr>")
        
        # Build and sort table rows
        table_rows = []
        
        for peer_id in self.repeaters.keys():
            repeater = self.repeaters[peer_id]
            
            # Extract repeater ID from connected_to field
            repeater_id = ""
            if repeater.connected_to:
                match = re.search(r'--\s*(\d+)', repeater.connected_to)
                if match:
                    repeater_id = match.group(1)
            if not repeater_id:
                repeater_id = peer_id
            
            status_class = "connected" if repeater.connected_to else "disconnected"
            
            for bridge in repeater.bridges:
                table_rows.append({
                    'bridge': bridge,
                    'repeater_id': repeater_id,
                    'peer_id': peer_id,
                    'repeater': repeater,
                    'status_class': status_class
                })
        
        # Sort by bridge, then repeater ID
        table_rows.sort(key=lambda x: (x['bridge'], x['repeater_id']))
        
        # Generate HTML for sorted rows
        for row in table_rows:
            bridge = row['bridge']
            repeater_id = row['repeater_id']
            peer_id = row['peer_id']
            repeater = row['repeater']
            status_class = row['status_class']
            
            html.append("    <tr>")
            
            # Bridge
            html.append(f"      <td>{bridge}</td>")
            
            # Peer ID
            html.append(f"      <td>{peer_id}</td>")
            
            # Repeater ID
            html.append(f"      <td class='{status_class}'>{repeater_id}</td>")
            
            # Call Sign
            callsign = repeater.callsign or ""
            if not callsign and repeater.location:
                match = re.match(r'^([A-Z0-9]+)', repeater.location)
                if match:
                    callsign = match.group(1)
            html.append(f"      <td>{callsign or '—'}</td>")
            
            # Location
            location_text = repeater.get_formatted_location()
            html.append(f"      <td>{location_text}</td>")
            
            # Frequency
            freq_text = f"{repeater.frequency} MHz" if repeater.frequency else "—"
            html.append(f"      <td>{freq_text}</td>")
            
            # Color Code
            cc_text = repeater.color_code if repeater.color_code else "—"
            html.append(f"      <td>{cc_text}</td>")
            
            # Time Slot & Talkgroup Info (combined)
            html.append("      <td class='ts-info'>")
            
            # Activity stats if available
            if repeater.activity_stats:
                stats = repeater.activity_stats
                html.append(f"        <div class='activity'>")
                html.append(f"📊 Last 7 days: {stats.get('total_calls', 0)} calls, "
                          f"{stats.get('unique_users', 0)} users")
                html.append(f"        </div>")
            
            # TS1 - RadioID data
            if repeater.ts1:
                html.append(f"        <span class='ts-label'>TS1 <span class='ts-source radioid'>(Obtained from RadioID.net)</span>:</span>")
                html.append(f"        {repeater.ts1}<br>")
            
            # TS2 - RadioID data
            if repeater.ts2:
                html.append(f"        <span class='ts-label'>TS2 <span class='ts-source radioid'>(Obtained from RadioID.net)</span>:</span>")
                html.append(f"        {repeater.ts2}<br>")
            
            # Database-inferred talkgroups (no timeslot designation)
            if repeater.talkgroups_inferred:
                html.append(f"        <span class='ts-label'>Talkgroups <span class='ts-source db-inferred'>(Inferred From Call History)</span>:</span>")
                html.append(f"        {repeater.talkgroups_inferred}<br>")
            
            # If no timeslot data at all
            if not any([repeater.ts1, repeater.ts2, repeater.talkgroups_inferred]):
                html.append("        —")
            
            html.append("      </td>")
            html.append("    </tr>")
        
        html.append("  </table>")
        
        html.append("</body>")
        html.append("</html>")
        
        with open(filename, 'w') as f:
            f.write("\n".join(html))
        
        print(f"\nHTML report saved to: {filename}")
    
    def export_json(self, filename: str = "cbridge_data.json"):
        """Export all collected data as JSON."""
        export_data = {
            'timestamp': datetime.now().isoformat(),
            'bridges': self.bridges,
            'repeaters': {peer_id: rep.to_dict() for peer_id, rep in self.repeaters.items()},
            'database_enabled': self.db is not None,
            'radioid_enabled': self.use_radioid
        }
        
        with open(filename, 'w') as f:
            json.dump(export_data, f, indent=2)
        
        print(f"JSON data exported to: {filename}")
    
    def __del__(self):
        """Cleanup database connection."""
        if self.db:
            self.db.close()


def main():
    """Main entry point."""
    global debug
    
    parser = argparse.ArgumentParser(
        description='Monitor multiple C-Bridges with PostgreSQL talkgroup inference'
    )
    parser.add_argument('config', 
                       help='JSON config file with bridge list and database config',
                       nargs='?',
                       default='bridges.json')
    parser.add_argument('-o', '--output',
                       help='Output HTML filename',
                       default='cbridge_report.html')
    parser.add_argument('--json',
                       help='Export JSON data file',
                       default='cbridge_data.json')
    parser.add_argument('--timeout',
                       help='Request timeout in seconds',
                       type=int,
                       default=10)
    parser.add_argument('--no-radioid',
                       help='Disable RadioID.net lookups',
                       action='store_true')
    parser.add_argument('--no-db',
                       help='Disable database talkgroup inference',
                       action='store_true')
    parser.add_argument('--db-days',
                       help='Days of history for talkgroup inference',
                       type=int,
                       default=90)
    parser.add_argument('--db-min-calls',
                       help='Minimum calls to include talkgroup',
                       type=int,
                       default=5)
    parser.add_argument('--debug',
                       help='Enable debug output',
                       action='store_true')
    
    args = parser.parse_args()
    debug = args.debug
    
    # Default configuration
    bridge_configs = [
        {
            'name': 'KB1B-Bridge',
            'url': 'http://10.10.10.10:42420/data.txt?param=ajaxpeerwatchpage',
            'description': 'Primary Amateur Bridge'
        },
    ]
    
    db_config = None
    
    # Try to load from config file
    try:
        with open(args.config, 'r') as f:
            config_data = json.load(f)
            if 'bridges' in config_data:
                bridge_configs = config_data['bridges']
                print(f"Loaded {len(bridge_configs)} bridges from {args.config}")
            
            # Load database config if present
            if 'database' in config_data and not args.no_db:
                db_config = config_data['database']
                print(f"Database config loaded: {db_config.get('database', 'N/A')}")
            
    except FileNotFoundError:
        print(f"Config file {args.config} not found, using default configuration")
        print(f"\nCreate a JSON file with this format:")
        example_config = {
            'bridges': bridge_configs,
            'database': {
                'host': 'localhost',
                'database': 'radio',
                'user': 'radio',
                'password': 'your_password',
                'port': 5432,
                'table': 'nedecn'
            }
        }
        print(json.dumps(example_config, indent=2))
        print()
    except Exception as e:
        print(f"Error loading config: {e}")
    
    # Create monitor and query bridges
    monitor = CBridgeMonitor(
        timeout=args.timeout, 
        use_radioid=not args.no_radioid,
        db_config=db_config if not args.no_db else None
    )
    monitor.query_bridges(bridge_configs)
    
    # Generate reports
    print("\n" + monitor.generate_summary_report())
    monitor.generate_html_report(args.output)
    monitor.export_json(args.json)
    
    print(f"\n{'='*80}")
    print("Report generation complete!")
    print(f"{'='*80}\n")


if __name__ == "__main__":
    main()

