"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
# @generated-id: a244d5f2afc5

from __future__ import annotations
from .classifiertrainingparameters import (
    ClassifierTrainingParameters,
    ClassifierTrainingParametersTypedDict,
)
from .jobmetadata import JobMetadata, JobMetadataTypedDict
from .wandbintegrationresult import (
    WandbIntegrationResult,
    WandbIntegrationResultTypedDict,
)
from mistralai.client.types import (
    BaseModel,
    Nullable,
    OptionalNullable,
    UNSET,
    UNSET_SENTINEL,
    UnrecognizedStr,
)
from mistralai.client.utils import validate_const
import pydantic
from pydantic import ConfigDict, model_serializer
from pydantic.functional_validators import AfterValidator
from typing import Any, List, Literal, Optional, Union
from typing_extensions import Annotated, NotRequired, TypedDict


ClassifierFineTuningJobStatus = Union[
    Literal[
        "QUEUED",
        "STARTED",
        "VALIDATING",
        "VALIDATED",
        "RUNNING",
        "FAILED_VALIDATION",
        "FAILED",
        "SUCCESS",
        "CANCELLED",
        "CANCELLATION_REQUESTED",
    ],
    UnrecognizedStr,
]
r"""The current status of the fine-tuning job."""


ClassifierFineTuningJobIntegrationTypedDict = WandbIntegrationResultTypedDict


class UnknownClassifierFineTuningJobIntegration(BaseModel):
    r"""A ClassifierFineTuningJobIntegration variant the SDK doesn't recognize. Preserves the raw payload."""

    type: Literal["UNKNOWN"] = "UNKNOWN"
    raw: Any
    is_unknown: Literal[True] = True

    model_config = ConfigDict(frozen=True)


_CLASSIFIER_FINE_TUNING_JOB_INTEGRATION_VARIANTS: dict[str, Any] = {
    "wandb": WandbIntegrationResult,
}


ClassifierFineTuningJobIntegration = WandbIntegrationResult


class ClassifierFineTuningJobTypedDict(TypedDict):
    id: str
    r"""The ID of the job."""
    auto_start: bool
    model: str
    status: ClassifierFineTuningJobStatus
    r"""The current status of the fine-tuning job."""
    created_at: int
    r"""The UNIX timestamp (in seconds) for when the fine-tuning job was created."""
    modified_at: int
    r"""The UNIX timestamp (in seconds) for when the fine-tuning job was last modified."""
    training_files: List[str]
    r"""A list containing the IDs of uploaded files that contain training data."""
    hyperparameters: ClassifierTrainingParametersTypedDict
    validation_files: NotRequired[Nullable[List[str]]]
    r"""A list containing the IDs of uploaded files that contain validation data."""
    object: Literal["job"]
    r"""The object type of the fine-tuning job."""
    fine_tuned_model: NotRequired[Nullable[str]]
    r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running."""
    suffix: NotRequired[Nullable[str]]
    r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`."""
    integrations: NotRequired[
        Nullable[List[ClassifierFineTuningJobIntegrationTypedDict]]
    ]
    r"""A list of integrations enabled for your fine-tuning job."""
    trained_tokens: NotRequired[Nullable[int]]
    r"""Total number of tokens trained."""
    metadata: NotRequired[Nullable[JobMetadataTypedDict]]
    job_type: Literal["classifier"]
    r"""The type of job (`FT` for fine-tuning)."""


class ClassifierFineTuningJob(BaseModel):
    id: str
    r"""The ID of the job."""

    auto_start: bool

    model: str

    status: ClassifierFineTuningJobStatus
    r"""The current status of the fine-tuning job."""

    created_at: int
    r"""The UNIX timestamp (in seconds) for when the fine-tuning job was created."""

    modified_at: int
    r"""The UNIX timestamp (in seconds) for when the fine-tuning job was last modified."""

    training_files: List[str]
    r"""A list containing the IDs of uploaded files that contain training data."""

    hyperparameters: ClassifierTrainingParameters

    validation_files: OptionalNullable[List[str]] = UNSET
    r"""A list containing the IDs of uploaded files that contain validation data."""

    object: Annotated[
        Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))],
        pydantic.Field(alias="object"),
    ] = "job"
    r"""The object type of the fine-tuning job."""

    fine_tuned_model: OptionalNullable[str] = UNSET
    r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running."""

    suffix: OptionalNullable[str] = UNSET
    r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`."""

    integrations: OptionalNullable[List[ClassifierFineTuningJobIntegration]] = UNSET
    r"""A list of integrations enabled for your fine-tuning job."""

    trained_tokens: OptionalNullable[int] = UNSET
    r"""Total number of tokens trained."""

    metadata: OptionalNullable[JobMetadata] = UNSET

    job_type: Annotated[
        Annotated[Literal["classifier"], AfterValidator(validate_const("classifier"))],
        pydantic.Field(alias="job_type"),
    ] = "classifier"
    r"""The type of job (`FT` for fine-tuning)."""

    @model_serializer(mode="wrap")
    def serialize_model(self, handler):
        optional_fields = set(
            [
                "validation_files",
                "object",
                "fine_tuned_model",
                "suffix",
                "integrations",
                "trained_tokens",
                "metadata",
            ]
        )
        nullable_fields = set(
            [
                "validation_files",
                "fine_tuned_model",
                "suffix",
                "integrations",
                "trained_tokens",
                "metadata",
            ]
        )
        serialized = handler(self)
        m = {}

        for n, f in type(self).model_fields.items():
            k = f.alias or n
            val = serialized.get(k, serialized.get(n))
            is_nullable_and_explicitly_set = (
                k in nullable_fields
                and (self.__pydantic_fields_set__.intersection({n}))  # pylint: disable=no-member
            )

            if val != UNSET_SENTINEL:
                if (
                    val is not None
                    or k not in optional_fields
                    or is_nullable_and_explicitly_set
                ):
                    m[k] = val

        return m


try:
    ClassifierFineTuningJob.model_rebuild()
except NameError:
    pass
