"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
# @generated-id: 0330b8930f65

from __future__ import annotations
from mistralai.client.types import (
    BaseModel,
    Nullable,
    OptionalNullable,
    UNSET,
    UNSET_SENTINEL,
)
from mistralai.client.utils import validate_const
import pydantic
from pydantic import model_serializer
from pydantic.functional_validators import AfterValidator
from typing import Literal, Optional
from typing_extensions import Annotated, NotRequired, TypedDict


class LegacyJobMetadataTypedDict(TypedDict):
    details: str
    expected_duration_seconds: NotRequired[Nullable[int]]
    r"""The approximated time (in seconds) for the fine-tuning process to complete."""
    cost: NotRequired[Nullable[float]]
    r"""The cost of the fine-tuning job."""
    cost_currency: NotRequired[Nullable[str]]
    r"""The currency used for the fine-tuning job cost."""
    train_tokens_per_step: NotRequired[Nullable[int]]
    r"""The number of tokens consumed by one training step."""
    train_tokens: NotRequired[Nullable[int]]
    r"""The total number of tokens used during the fine-tuning process."""
    data_tokens: NotRequired[Nullable[int]]
    r"""The total number of tokens in the training dataset."""
    estimated_start_time: NotRequired[Nullable[int]]
    deprecated: NotRequired[bool]
    epochs: NotRequired[Nullable[float]]
    r"""The number of complete passes through the entire training dataset."""
    training_steps: NotRequired[Nullable[int]]
    r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset."""
    object: Literal["job.metadata"]


class LegacyJobMetadata(BaseModel):
    details: str

    expected_duration_seconds: OptionalNullable[int] = UNSET
    r"""The approximated time (in seconds) for the fine-tuning process to complete."""

    cost: OptionalNullable[float] = UNSET
    r"""The cost of the fine-tuning job."""

    cost_currency: OptionalNullable[str] = UNSET
    r"""The currency used for the fine-tuning job cost."""

    train_tokens_per_step: OptionalNullable[int] = UNSET
    r"""The number of tokens consumed by one training step."""

    train_tokens: OptionalNullable[int] = UNSET
    r"""The total number of tokens used during the fine-tuning process."""

    data_tokens: OptionalNullable[int] = UNSET
    r"""The total number of tokens in the training dataset."""

    estimated_start_time: OptionalNullable[int] = UNSET

    deprecated: Optional[bool] = True

    epochs: OptionalNullable[float] = UNSET
    r"""The number of complete passes through the entire training dataset."""

    training_steps: OptionalNullable[int] = UNSET
    r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset."""

    object: Annotated[
        Annotated[
            Optional[Literal["job.metadata"]],
            AfterValidator(validate_const("job.metadata")),
        ],
        pydantic.Field(alias="object"),
    ] = "job.metadata"

    @model_serializer(mode="wrap")
    def serialize_model(self, handler):
        optional_fields = set(
            [
                "expected_duration_seconds",
                "cost",
                "cost_currency",
                "train_tokens_per_step",
                "train_tokens",
                "data_tokens",
                "estimated_start_time",
                "deprecated",
                "epochs",
                "training_steps",
                "object",
            ]
        )
        nullable_fields = set(
            [
                "expected_duration_seconds",
                "cost",
                "cost_currency",
                "train_tokens_per_step",
                "train_tokens",
                "data_tokens",
                "estimated_start_time",
                "epochs",
                "training_steps",
            ]
        )
        serialized = handler(self)
        m = {}

        for n, f in type(self).model_fields.items():
            k = f.alias or n
            val = serialized.get(k, serialized.get(n))
            is_nullable_and_explicitly_set = (
                k in nullable_fields
                and (self.__pydantic_fields_set__.intersection({n}))  # pylint: disable=no-member
            )

            if val != UNSET_SENTINEL:
                if (
                    val is not None
                    or k not in optional_fields
                    or is_nullable_and_explicitly_set
                ):
                    m[k] = val

        return m


try:
    LegacyJobMetadata.model_rebuild()
except NameError:
    pass
