# This file was auto-generated by Fern from our API Definition.

import typing
import urllib.parse
from json.decoder import JSONDecodeError

from ...core.api_error import ApiError
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ...core.remove_none_from_dict import remove_none_from_dict
from ...errors.unprocessable_entity_error import UnprocessableEntityError
from ...types.http_validation_error import HttpValidationError
from ...types.paginated_jobs_history_with_metrics import PaginatedJobsHistoryWithMetrics

try:
    import pydantic
    if pydantic.__version__.startswith("1."):
        raise ImportError
    import pydantic.v1 as pydantic  # type: ignore
except ImportError:
    import pydantic  # type: ignore


class JobsClient:
    def __init__(self, *, client_wrapper: SyncClientWrapper):
        self._client_wrapper = client_wrapper

    def get_jobs_api_v_1_jobs_get(
        self,
        *,
        job_name: typing.Optional[str] = None,
        limit: typing.Optional[int] = None,
        offset: typing.Optional[int] = None,
        include_usage_metrics: typing.Optional[bool] = None,
        project_id: typing.Optional[str] = None,
        organization_id: typing.Optional[str] = None,
    ) -> PaginatedJobsHistoryWithMetrics:
        """
        Get jobs for a project.

        Parameters:
            - job_name: typing.Optional[str].

            - limit: typing.Optional[int].

            - offset: typing.Optional[int].

            - include_usage_metrics: typing.Optional[bool].

            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].
        ---
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.jobs.get_jobs_api_v_1_jobs_get()
        """
        _response = self._client_wrapper.httpx_client.request(
            "GET",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/jobs"),
            params=remove_none_from_dict(
                {
                    "job_name": job_name,
                    "limit": limit,
                    "offset": offset,
                    "include_usage_metrics": include_usage_metrics,
                    "project_id": project_id,
                    "organization_id": organization_id,
                }
            ),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(PaginatedJobsHistoryWithMetrics, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)


class AsyncJobsClient:
    def __init__(self, *, client_wrapper: AsyncClientWrapper):
        self._client_wrapper = client_wrapper

    async def get_jobs_api_v_1_jobs_get(
        self,
        *,
        job_name: typing.Optional[str] = None,
        limit: typing.Optional[int] = None,
        offset: typing.Optional[int] = None,
        include_usage_metrics: typing.Optional[bool] = None,
        project_id: typing.Optional[str] = None,
        organization_id: typing.Optional[str] = None,
    ) -> PaginatedJobsHistoryWithMetrics:
        """
        Get jobs for a project.

        Parameters:
            - job_name: typing.Optional[str].

            - limit: typing.Optional[int].

            - offset: typing.Optional[int].

            - include_usage_metrics: typing.Optional[bool].

            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].
        ---
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.jobs.get_jobs_api_v_1_jobs_get()
        """
        _response = await self._client_wrapper.httpx_client.request(
            "GET",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/jobs"),
            params=remove_none_from_dict(
                {
                    "job_name": job_name,
                    "limit": limit,
                    "offset": offset,
                    "include_usage_metrics": include_usage_metrics,
                    "project_id": project_id,
                    "organization_id": organization_id,
                }
            ),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(PaginatedJobsHistoryWithMetrics, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)
