from marker.llms.llm_service import LLMService
from langchain_community.chat_models import ChatOllama  # you're already using this

class OllamaLLMService(LLMService):
    def __init__(self, model: str = "llama3:8b", base_url: str = "http://127.0.0.1:11434"):
        self.chat = ChatOllama(
            model=model,
            base_url=base_url
        )

    def call(self, prompt: str, max_tokens: int = 512, temperature: float = 0.7):
        response = self.chat.invoke(prompt)
        return response.content.strip() 