# This file was auto-generated by Fern from our API Definition.

import typing

import httpx

from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from .environment import LlamaCloudEnvironment
from .resources.chat_apps.client import AsyncChatAppsClient, ChatAppsClient
from .resources.component_definitions.client import AsyncComponentDefinitionsClient, ComponentDefinitionsClient
from .resources.data_sinks.client import AsyncDataSinksClient, DataSinksClient
from .resources.data_sources.client import AsyncDataSourcesClient, DataSourcesClient
from .resources.embedding_model_configs.client import AsyncEmbeddingModelConfigsClient, EmbeddingModelConfigsClient
from .resources.evals.client import AsyncEvalsClient, EvalsClient
from .resources.files.client import AsyncFilesClient, FilesClient
from .resources.jobs.client import AsyncJobsClient, JobsClient
from .resources.llama_extract.client import AsyncLlamaExtractClient, LlamaExtractClient
from .resources.organizations.client import AsyncOrganizationsClient, OrganizationsClient
from .resources.parsing.client import AsyncParsingClient, ParsingClient
from .resources.pipelines.client import AsyncPipelinesClient, PipelinesClient
from .resources.projects.client import AsyncProjectsClient, ProjectsClient
from .resources.reports.client import AsyncReportsClient, ReportsClient
from .resources.retrievers.client import AsyncRetrieversClient, RetrieversClient


class LlamaCloud:
    def __init__(
        self,
        *,
        base_url: typing.Optional[str] = None,
        environment: LlamaCloudEnvironment = LlamaCloudEnvironment.DEFAULT,
        token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None,
        timeout: typing.Optional[float] = 60,
        httpx_client: typing.Optional[httpx.Client] = None
    ):
        self._client_wrapper = SyncClientWrapper(
            base_url=_get_base_url(base_url=base_url, environment=environment),
            token=token,
            httpx_client=httpx.Client(timeout=timeout) if httpx_client is None else httpx_client,
        )
        self.data_sinks = DataSinksClient(client_wrapper=self._client_wrapper)
        self.data_sources = DataSourcesClient(client_wrapper=self._client_wrapper)
        self.embedding_model_configs = EmbeddingModelConfigsClient(client_wrapper=self._client_wrapper)
        self.organizations = OrganizationsClient(client_wrapper=self._client_wrapper)
        self.projects = ProjectsClient(client_wrapper=self._client_wrapper)
        self.files = FilesClient(client_wrapper=self._client_wrapper)
        self.pipelines = PipelinesClient(client_wrapper=self._client_wrapper)
        self.retrievers = RetrieversClient(client_wrapper=self._client_wrapper)
        self.jobs = JobsClient(client_wrapper=self._client_wrapper)
        self.evals = EvalsClient(client_wrapper=self._client_wrapper)
        self.parsing = ParsingClient(client_wrapper=self._client_wrapper)
        self.component_definitions = ComponentDefinitionsClient(client_wrapper=self._client_wrapper)
        self.chat_apps = ChatAppsClient(client_wrapper=self._client_wrapper)
        self.llama_extract = LlamaExtractClient(client_wrapper=self._client_wrapper)
        self.reports = ReportsClient(client_wrapper=self._client_wrapper)


class AsyncLlamaCloud:
    def __init__(
        self,
        *,
        base_url: typing.Optional[str] = None,
        environment: LlamaCloudEnvironment = LlamaCloudEnvironment.DEFAULT,
        token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None,
        timeout: typing.Optional[float] = 60,
        httpx_client: typing.Optional[httpx.AsyncClient] = None
    ):
        self._client_wrapper = AsyncClientWrapper(
            base_url=_get_base_url(base_url=base_url, environment=environment),
            token=token,
            httpx_client=httpx.AsyncClient(timeout=timeout) if httpx_client is None else httpx_client,
        )
        self.data_sinks = AsyncDataSinksClient(client_wrapper=self._client_wrapper)
        self.data_sources = AsyncDataSourcesClient(client_wrapper=self._client_wrapper)
        self.embedding_model_configs = AsyncEmbeddingModelConfigsClient(client_wrapper=self._client_wrapper)
        self.organizations = AsyncOrganizationsClient(client_wrapper=self._client_wrapper)
        self.projects = AsyncProjectsClient(client_wrapper=self._client_wrapper)
        self.files = AsyncFilesClient(client_wrapper=self._client_wrapper)
        self.pipelines = AsyncPipelinesClient(client_wrapper=self._client_wrapper)
        self.retrievers = AsyncRetrieversClient(client_wrapper=self._client_wrapper)
        self.jobs = AsyncJobsClient(client_wrapper=self._client_wrapper)
        self.evals = AsyncEvalsClient(client_wrapper=self._client_wrapper)
        self.parsing = AsyncParsingClient(client_wrapper=self._client_wrapper)
        self.component_definitions = AsyncComponentDefinitionsClient(client_wrapper=self._client_wrapper)
        self.chat_apps = AsyncChatAppsClient(client_wrapper=self._client_wrapper)
        self.llama_extract = AsyncLlamaExtractClient(client_wrapper=self._client_wrapper)
        self.reports = AsyncReportsClient(client_wrapper=self._client_wrapper)


def _get_base_url(*, base_url: typing.Optional[str] = None, environment: LlamaCloudEnvironment) -> str:
    if base_url is not None:
        return base_url
    elif environment is not None:
        return environment.value
    else:
        raise Exception("Please pass in either base_url or environment to construct the client")
