import requests
import logging
import json
from .config import GROQ_API_KEY
from .search_tool import perform_web_search, get_search_summary, get_structured_search_results
import asyncio
import aiohttp
import json
from typing import AsyncGenerator, Dict, Any
from bs4 import BeautifulSoup

# Set up logging
logger = logging.getLogger(__name__)

def query_groq(query: str, search_context: str = ""):
    """
    Query Groq API with optional search context
    """
    try:
        # Correct Groq API endpoint
        url = "https://api.groq.com/openai/v1/chat/completions"
        
        headers = {
            "Authorization": f"Bearer {GROQ_API_KEY}",
            "Content-Type": "application/json"
        }
        
        # Enhanced prompt with search context
        enhanced_query = query
        if search_context:
            enhanced_query = f"""
Based on the following search information and user query, provide a comprehensive response:

Search Context: {search_context}

User Query: {query}

Please provide a detailed, helpful response that incorporates relevant information from the search context while directly answering the user's question.
"""
        
        data = {
            "messages": [
                {
                    "role": "system",
                    "content": "You are a helpful AI assistant. When provided with search context, use it to enhance your responses while ensuring accuracy and relevance."
                },
                {
                    "role": "user", 
                    "content": enhanced_query
                }
            ],
            "model": "gemma2-9b-it",
            "temperature": 0.7,
            "max_tokens": 1024
        }

        logger.info(f"API URL: {url}")
        logger.info(f"Enhanced query length: {len(enhanced_query)}")
        
        response = requests.post(url, headers=headers, json=data, timeout=30)
        
        logger.info(f"Response status: {response.status_code}")
        
        if response.status_code == 200:
            response_json = response.json()
            if 'choices' in response_json and len(response_json['choices']) > 0:
                message = response_json['choices'][0]['message']['content']
                return message.strip()
            else:
                logger.error("No choices found in response")
                return "No response generated"
        else:
            # Return the actual error message from Groq
            try:
                error_data = response.json()
                error_msg = error_data.get('error', {}).get('message', 'Unknown error')
                logger.error(f"Groq API error details: {error_data}")
                return f"Groq API Error: {error_msg}"
            except:
                return f"API Error {response.status_code}: {response.text}"
            
    except Exception as e:
        logger.error(f"Exception in query_groq: {str(e)}")
        return f"Exception: {str(e)}"

def should_search(query: str) -> bool:
    """
    Determine if a query would benefit from web search
    """
    search_indicators = [
        "current", "latest", "recent", "news", "today", "now",
        "weather", "stock", "price", "update", "trending",
        "what is", "who is", "when did", "where is", "how to",
        "best", "top", "compare", "vs", "versus", "difference",
        "tutorial", "guide", "learn", "explain"
    ]
    
    query_lower = query.lower()
    return any(indicator in query_lower for indicator in search_indicators)

def generate_response(query: str, enable_search: bool = True):
    """
    Generate response with optional web search integration
    """
    logger.info(f"Generate response called with query: '{query}'")
    
    if not query or query.strip() == "":
        return "Please provide a valid question."
    
    if not GROQ_API_KEY:
        return "GROQ_API_KEY not found in config"
    
    if len(GROQ_API_KEY) < 10:
        return f"GROQ_API_KEY seems invalid (length: {len(GROQ_API_KEY)})"
    
    logger.info(f"Using API key: {GROQ_API_KEY[:10]}...")
    
    search_context = ""
    if enable_search and should_search(query):
        logger.info("Query identified as benefiting from search")
        search_results = perform_web_search(query, num_results=3)
        if search_results and len(search_results) > 0:
            search_context = "\n".join(search_results)
            logger.info(f"Search context gathered: {len(search_context)} characters")
    
    return query_groq(query, search_context)


def generate_response_with_search_results(query: str):
    """
    Generate response and return both the response and search results separately
    """
    logger.info(f"Generate response with search results for query: '{query}'")
    
    if not query or query.strip() == "":
        return "Please provide a valid question.", []
    
    # Perform search
    search_results = []
    if should_search(query):
        logger.info("Performing web search...")
        search_results = perform_web_search(query, num_results=5)
        logger.info(f"Search completed. Found {len(search_results)} results")
    
    # Generate AI response with search context
    search_context = ""
    if search_results and len(search_results) > 0:
        search_context = "\n".join(search_results[:3])  # Use top 3 for context
    
    ai_response = query_groq(query, search_context)
    
    return ai_response, search_results


async def query_groq_stream(query: str, search_context: str = ""):
    """
    Query Groq API with streaming response
    """
    try:
        url = "https://api.groq.com/openai/v1/chat/completions"
        
        headers = {
            "Authorization": f"Bearer {GROQ_API_KEY}",
            "Content-Type": "application/json"
        }
        
        # Enhanced prompt with search context
        enhanced_query = query
        if search_context:
            enhanced_query = f"""
Based on the following search information and user query, provide a comprehensive response:

Search Context: {search_context}

User Query: {query}

Please provide a detailed, helpful response that incorporates relevant information from the search context while directly answering the user's question.
"""
        
        data = {
            "messages": [
                {
                    "role": "system",
                    "content": "You are a helpful AI assistant. When provided with search context, use it to enhance your responses while ensuring accuracy and relevance."
                },
                {
                    "role": "user", 
                    "content": enhanced_query
                }
            ],
            "model": "llama3-8b-8192",
            "temperature": 0.7,
            "max_tokens": 1024,
            "stream": True  # Enable streaming
        }

        logger.info(f"Starting streaming API call to: {url}")
        
        async with aiohttp.ClientSession() as session:
            async with session.post(url, headers=headers, json=data, timeout=30) as response:
                if response.status != 200:
                    error_text = await response.text()
                    logger.error(f"API Error {response.status}: {error_text}")
                    yield f"API Error {response.status}: {error_text}"
                    return
                
                buffer = ""
                async for chunk in response.content.iter_chunked(1024):
                    buffer += chunk.decode('utf-8')
                    
                    # Process complete lines
                    while '\n' in buffer:
                        line, buffer = buffer.split('\n', 1)
                        line = line.strip()
                        
                        if line.startswith('data: '):
                            data_str = line[6:]  # Remove 'data: ' prefix
                            
                            if data_str == '[DONE]':
                                return
                            
                            if data_str:
                                try:
                                    data_obj = json.loads(data_str)
                                    if 'choices' in data_obj and len(data_obj['choices']) > 0:
                                        delta = data_obj['choices'][0].get('delta', {})
                                        if 'content' in delta:
                                            yield delta['content']
                                except json.JSONDecodeError:
                                    continue
                                        
    except Exception as e:
        logger.error(f"Exception in query_groq_stream: {str(e)}")
        yield f"Exception: {str(e)}"

async def generate_response_with_search_results_stream(query: str) -> AsyncGenerator[Dict[str, Any], None]:
    """
    Generate streaming response and return both the response chunks and structured search results
    Updated to match the non-streaming method's search result structure
    """
    logger.info(f"Generate streaming response with search results for query: '{query}'")
    
    if not query or query.strip() == "":
        yield {"type": "response_chunk", "data": "Please provide a valid question."}
        return
    
    # Perform search first (non-streaming)
    search_results = []
    structured_search_results = []
    
    if should_search(query):
        logger.info("Performing web search...")
        # Run search in background thread to avoid blocking
        loop = asyncio.get_event_loop()
        
        # Get structured search results (matching non-streaming method)
        structured_search_results = await loop.run_in_executor(
            None, get_structured_search_results, query, 5
        )
        
        # Create formatted search results for backward compatibility
        search_results = []
        for i, result in enumerate(structured_search_results, 1):
            formatted_result = f"{i}. {result['content']}\n🔗 URL: {result['url']}"
            search_results.append(formatted_result)
        
        logger.info(f"Search completed. Found {len(search_results)} formatted results and {len(structured_search_results)} structured results")
        
        # Yield both formatted and structured search results
        yield {
            "type": "search_results", 
            "data": search_results,
            "structured_data": structured_search_results
        }
    
    # Generate AI response with search context (streaming)
    search_context = ""
    if structured_search_results:
        # Create context from structured search results
        context_items = []
        for i, result in enumerate(structured_search_results[:3]):  # Use top 3 for context
            context_items.append(
                f"Source {i+1}: {result.get('title', 'No title')}\n"
                f"Content: {result.get('snippet', 'No snippet')}\n"
                f"URL: {result['url']}"
            )
        search_context = "\n\n".join(context_items)
    
    # Stream the AI response
    async for chunk in query_groq_stream(query, search_context):
        yield {"type": "response_chunk", "data": chunk}


async def generate_response_stream(query: str, enable_search: bool = True):
    """
    Generate streaming response with optional web search integration
    """
    logger.info(f"Generate streaming response called with query: '{query}'")
    
    if not query or query.strip() == "":
        yield "Please provide a valid question."
        return
    
    if not GROQ_API_KEY:
        yield "GROQ_API_KEY not found in config"
        return
    
    if len(GROQ_API_KEY) < 10:
        yield f"GROQ_API_KEY seems invalid (length: {len(GROQ_API_KEY)})"
        return
    
    logger.info(f"Using API key: {GROQ_API_KEY[:10]}...")
    
    search_context = ""
    if enable_search and should_search(query):
        logger.info("Query identified as benefiting from search")
        # Run search in background thread
        loop = asyncio.get_event_loop()
        search_results = await loop.run_in_executor(
            None, perform_web_search, query, 3
        )
        if search_results and len(search_results) > 0:
            search_context = "\n".join(search_results)
            logger.info(f"Search context gathered: {len(search_context)} characters")
    
    async for chunk in query_groq_stream(query, search_context):
        yield chunk
        
from .search_tool import search_engine        

def generate_response_with_llm_search(query: str):
    """
    Generate response with LLM and return structured search results
    """
    logger.info(f"Generate response with LLM search for query: '{query}'")
    
    if not query or query.strip() == "":
        return "Please provide a valid question.", []
    
    # Perform search and get structured results
    search_results = []
    if should_search(query):
        logger.info("Performing web search...")
        raw_results = search_engine.search_multiple_engines(query, num_results=5)
        
        if raw_results:
            # Convert SearchResult objects to formatted strings
            for i, result in enumerate(raw_results, 1):
                formatted_result = (
                    f"[{result.source.upper()}] 🔍 {result.title}\n"
                    f"{result.snippet}\n"
                    f"🔗 URL: {result.url}\n"
                    f"🌐 Domain: {result.domain}\n"
                    f"📊 Relevance: {'⭐' * min(5, max(1, int(result.relevance_score * 5)))} ({result.relevance_score:.2f})"
                )
                search_results.append(formatted_result)
        
        logger.info(f"Search completed. Found {len(search_results)} results")
    
    # Generate AI response with search context
    search_context = ""
    if search_results:
        # Create context from search results
        context_items = []
        for i, result_str in enumerate(search_results[:3]):  # Use top 3 for context
            lines = result_str.split('\n')
            title = lines[0].split('🔍')[1].strip() if '🔍' in lines[0] else ""
            snippet = lines[1] if len(lines) > 1 else ""
            url = lines[2].replace('🔗 URL:', '').strip() if len(lines) > 2 else ""
            
            context_items.append(f"Source {i+1}: {title}\nContent: {snippet}\nURL: {url}")
        
        search_context = "\n\n".join(context_items)
    
    ai_response = query_groq(query, search_context)
    
    return ai_response, search_results