Source code for haive.core.models.llm.providers.mistral
"""Mistral AI Provider Module.This module implements the Mistral AI language model provider for the Haive framework,supporting Mistral's family of high-performance open and commercial language models.The provider handles API key management, model configuration, and safe imports ofthe langchain-mistralai package dependencies.Examples: Basic usage: .. code-block:: python from haive.core.models.llm.providers.mistral import MistralProvider provider = MistralProvider( model="mistral-large-latest", temperature=0.7, max_tokens=1000 ) llm = provider.instantiate() With function calling:: provider = MistralProvider( model="mistral-large-latest", temperature=0.1, max_tokens=2000 ).. autosummary:: :toctree: generated/ MistralProvider"""fromtypingimportAnyfrompydanticimportFieldfromhaive.core.models.llm.provider_typesimportLLMProviderfromhaive.core.models.llm.providers.baseimportBaseLLMProvider,ProviderImportError
[docs]classMistralProvider(BaseLLMProvider):"""Mistral AI language model provider configuration. This provider supports Mistral's family of models including Mistral Large, Mistral Medium, Mistral Small, and the open Mixtral models. Attributes: provider (LLMProvider): Always LLMProvider.MISTRALAI model (str): The Mistral model to use temperature (float): Sampling temperature (0.0-1.0) max_tokens (int): Maximum tokens in response top_p (float): Nucleus sampling parameter random_seed (int): Seed for reproducible generation safe_mode (bool): Enable content filtering Examples: Large model for complex tasks: .. code-block:: python provider = MistralProvider( model="mistral-large-latest", temperature=0.7, max_tokens=2000 ) Small model for fast inference:: provider = MistralProvider( model="mistral-small-latest", temperature=0.1, max_tokens=500 ) """provider:LLMProvider=Field(default=LLMProvider.MISTRALAI,description="Provider identifier")# Mistral model parameterstemperature:float|None=Field(default=None,ge=0,le=1,description="Sampling temperature (0.0-1.0 for Mistral)",)max_tokens:int|None=Field(default=None,ge=1,description="Maximum tokens in response")top_p:float|None=Field(default=None,ge=0,le=1,description="Nucleus sampling parameter")random_seed:int|None=Field(default=None,description="Seed for reproducible generation")safe_mode:bool=Field(default=False,description="Enable content filtering")def_get_chat_class(self)->type[Any]:"""Get the Mistral chat class."""try:fromlangchain_mistralaiimportChatMistralAIreturnChatMistralAIexceptImportErrorase:raiseProviderImportError(provider=self.provider.value,package=self._get_import_package(),message="Mistral AI requires langchain-mistralai. Install with: pip install langchain-mistralai",)fromedef_get_default_model(self)->str:"""Get the default Mistral model."""return"mistral-large-latest"def_get_import_package(self)->str:"""Get the required package name."""return"langchain-mistralai"def_get_initialization_params(self,**kwargs)->dict[str,Any]:"""Get Mistral-specific initialization parameters."""params={"model":self.model,**kwargs,}# Add model parameters if specifiedifself.temperatureisnotNone:params["temperature"]=self.temperatureifself.max_tokensisnotNone:params["max_tokens"]=self.max_tokensifself.top_pisnotNone:params["top_p"]=self.top_pifself.random_seedisnotNone:params["random_seed"]=self.random_seedifself.safe_modeisnotNone:params["safe_mode"]=self.safe_mode# Add API keyapi_key=self.get_api_key()ifapi_key:params["mistral_api_key"]=api_key# Add extra paramsparams.update(self.extra_paramsor{})returnparamsdef_get_env_key_name(self)->str:"""Get the environment variable name for API key."""return"MISTRAL_API_KEY"
[docs]@classmethoddefget_models(cls)->list[str]:"""Get available Mistral models."""return["mistral-large-latest","mistral-medium-latest","mistral-small-latest","mixtral-8x7b-instruct","mistral-7b-instruct","codestral-latest",]