"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
# @generated-id: c60d2a45d66b

from __future__ import annotations
from .classifiertarget import ClassifierTarget, ClassifierTargetTypedDict
from .classifiertrainingparameters import (
    ClassifierTrainingParameters,
    ClassifierTrainingParametersTypedDict,
)
from .completiontrainingparameters import (
    CompletionTrainingParameters,
    CompletionTrainingParametersTypedDict,
)
from .creategithubrepositoryrequest import (
    CreateGithubRepositoryRequest,
    CreateGithubRepositoryRequestTypedDict,
)
from .finetuneablemodeltype import FineTuneableModelType
from .trainingfile import TrainingFile, TrainingFileTypedDict
from .wandbintegration import WandbIntegration, WandbIntegrationTypedDict
from mistralai.client.types import (
    BaseModel,
    Nullable,
    OptionalNullable,
    UNSET,
    UNSET_SENTINEL,
)
from pydantic import model_serializer
from typing import List, Optional, Union
from typing_extensions import NotRequired, TypeAliasType, TypedDict


CreateFineTuningJobRequestIntegrationTypedDict = WandbIntegrationTypedDict


CreateFineTuningJobRequestIntegration = WandbIntegration


HyperparametersTypedDict = TypeAliasType(
    "HyperparametersTypedDict",
    Union[ClassifierTrainingParametersTypedDict, CompletionTrainingParametersTypedDict],
)


Hyperparameters = TypeAliasType(
    "Hyperparameters", Union[ClassifierTrainingParameters, CompletionTrainingParameters]
)


CreateFineTuningJobRequestRepositoryTypedDict = CreateGithubRepositoryRequestTypedDict


CreateFineTuningJobRequestRepository = CreateGithubRepositoryRequest


class CreateFineTuningJobRequestTypedDict(TypedDict):
    model: str
    hyperparameters: HyperparametersTypedDict
    training_files: NotRequired[List[TrainingFileTypedDict]]
    validation_files: NotRequired[Nullable[List[str]]]
    r"""A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files."""
    suffix: NotRequired[Nullable[str]]
    r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`"""
    integrations: NotRequired[
        Nullable[List[CreateFineTuningJobRequestIntegrationTypedDict]]
    ]
    r"""A list of integrations to enable for your fine-tuning job."""
    auto_start: NotRequired[bool]
    r"""This field will be required in a future release."""
    invalid_sample_skip_percentage: NotRequired[float]
    job_type: NotRequired[Nullable[FineTuneableModelType]]
    repositories: NotRequired[
        Nullable[List[CreateFineTuningJobRequestRepositoryTypedDict]]
    ]
    classifier_targets: NotRequired[Nullable[List[ClassifierTargetTypedDict]]]


class CreateFineTuningJobRequest(BaseModel):
    model: str

    hyperparameters: Hyperparameters

    training_files: Optional[List[TrainingFile]] = None

    validation_files: OptionalNullable[List[str]] = UNSET
    r"""A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files."""

    suffix: OptionalNullable[str] = UNSET
    r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`"""

    integrations: OptionalNullable[List[CreateFineTuningJobRequestIntegration]] = UNSET
    r"""A list of integrations to enable for your fine-tuning job."""

    auto_start: Optional[bool] = None
    r"""This field will be required in a future release."""

    invalid_sample_skip_percentage: Optional[float] = 0

    job_type: OptionalNullable[FineTuneableModelType] = UNSET

    repositories: OptionalNullable[List[CreateFineTuningJobRequestRepository]] = UNSET

    classifier_targets: OptionalNullable[List[ClassifierTarget]] = UNSET

    @model_serializer(mode="wrap")
    def serialize_model(self, handler):
        optional_fields = set(
            [
                "training_files",
                "validation_files",
                "suffix",
                "integrations",
                "auto_start",
                "invalid_sample_skip_percentage",
                "job_type",
                "repositories",
                "classifier_targets",
            ]
        )
        nullable_fields = set(
            [
                "validation_files",
                "suffix",
                "integrations",
                "job_type",
                "repositories",
                "classifier_targets",
            ]
        )
        serialized = handler(self)
        m = {}

        for n, f in type(self).model_fields.items():
            k = f.alias or n
            val = serialized.get(k, serialized.get(n))
            is_nullable_and_explicitly_set = (
                k in nullable_fields
                and (self.__pydantic_fields_set__.intersection({n}))  # pylint: disable=no-member
            )

            if val != UNSET_SENTINEL:
                if (
                    val is not None
                    or k not in optional_fields
                    or is_nullable_and_explicitly_set
                ):
                    m[k] = val

        return m
