"""
Routes for admin site settings management.
Only accessible to authenticated users (can be extended to admin-only later).
"""
import logging
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession

from app.api.deps import get_db, get_current_user
from app.schemas.site_settings import SiteSettingsUpdate, SiteSettingsResponse
from app.services.site_settings_service import SiteSettingsService

router = APIRouter(prefix="/admin/settings", tags=["admin-settings"])
logger = logging.getLogger(__name__)


@router.get("/", response_model=SiteSettingsResponse)
async def get_site_settings(
    db: AsyncSession = Depends(get_db),
    current_user=Depends(get_current_user)
):
    """
    Get current site settings including scraping configuration.
    
    Accessible to authenticated users.
    Can be restricted to admin-only by adding role checks.
    """
    try:
        logger.info(f"[API] Admin {current_user.email} requesting site settings")
        settings = await SiteSettingsService.get_settings(db)
        return settings
    except Exception as e:
        logger.error(f"[API] Error retrieving settings: {str(e)}", exc_info=True)
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Failed to retrieve settings"
        )


@router.patch("/", response_model=SiteSettingsResponse)
async def update_site_settings(
    settings_update: SiteSettingsUpdate,
    db: AsyncSession = Depends(get_db),
    current_user=Depends(get_current_user)
):
    """
    Update site settings including scraping configuration.
    
    **Note:** This endpoint will automatically update the scheduler.
    - If `scraping_enabled` is changed to True: job will be scheduled
    - If `scraping_enabled` is changed to False: job will be cancelled
    - If schedule parameters change: existing job will be rescheduled
    
    Parameters:
    - scraping_enabled: Enable/disable automated scraping
    - scraping_frequency: 'daily', 'weekly', or 'custom'
    - scraping_time: Time to run in client's timezone. Format: HH:MM
    - client_timezone: Client timezone (e.g., 'Europe/Madrid', 'America/New_York'). Scheduler converts to UTC automatically.
    - scraping_day_of_week: For weekly scraping (monday-sunday)
    - scraping_custom_cron: Custom cron expression (for frequency='custom')
    - enable_discovery: Enable SERP API discovery during scraping
    - headless_mode: Run browser in headless mode
    - timeout_seconds: Page load timeout (5-60 seconds)
    
    Example:
    ```json
    {
        "scraping_enabled": true,
        "scraping_frequency": "daily",
        "scraping_time": "09:00",
        "client_timezone": "Europe/Madrid",
        "enable_discovery": true,
        "headless_mode": true,
        "timeout_seconds": 15
    }
    ```
    """
    try:
        logger.info(f"[API] Admin {current_user.email} updating site settings")
        logger.info(f"[API] Update data: {settings_update.model_dump(exclude_unset=True)}")
        
        updated_settings = await SiteSettingsService.update_settings(db, settings_update)
        
        logger.info(f"[API] ✅ Settings updated successfully")
        logger.info(f"[API] Scraping Status: {'ENABLED' if updated_settings.scraping_enabled else 'DISABLED'}")
        
        return updated_settings
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"[API] Error updating settings: {str(e)}", exc_info=True)
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Failed to update settings"
        )


@router.post("/scraping/enable", response_model=SiteSettingsResponse)
async def enable_scraping(
    db: AsyncSession = Depends(get_db),
    current_user=Depends(get_current_user)
):
    """
    Enable automated scraping.
    Uses current frequency and timing settings.
    """
    try:
        logger.info(f"[API] Admin {current_user.email} enabling scheduled scraping")
        settings = await SiteSettingsService.enable_scraping(db)
        logger.info(f"[API] ✅ Scraping enabled with frequency: {settings.scraping_frequency}")
        return settings
    except Exception as e:
        logger.error(f"[API] Error enabling scraping: {str(e)}", exc_info=True)
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Failed to enable scraping"
        )


@router.post("/scraping/disable", response_model=SiteSettingsResponse)
async def disable_scraping(
    db: AsyncSession = Depends(get_db),
    current_user=Depends(get_current_user)
):
    """
    Disable automated scraping.
    Scheduled jobs will be cancelled immediately.
    """
    try:
        logger.info(f"[API] Admin {current_user.email} disabling scheduled scraping")
        settings = await SiteSettingsService.disable_scraping(db)
        logger.info(f"[API] ✅ Scraping disabled")
        return settings
    except Exception as e:
        logger.error(f"[API] Error disabling scraping: {str(e)}", exc_info=True)
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Failed to disable scraping"
        )
