Ë
    ±q±iC  ã                   ó†   — d Z ddlmZ ddlmZ ddlmZ ddlmZ ddl	m
Z
 ddlmZ e G d„ d	e«      «       Z G d
„ de
«      Zy)z;OLLama LLM service implementation for Pipecat AI framework.é    )Ú	dataclass)ÚOptional)Úlogger)ÚOpenAILLMSettings)ÚOpenAILLMService)Ú_warn_deprecated_paramc                   ó   — e Zd ZdZy)ÚOllamaLLMSettingszSettings for OLLamaLLMService.N)Ú__name__Ú
__module__Ú__qualname__Ú__doc__© ó    úM/opt/pipecat/venv/lib/python3.12/site-packages/pipecat/services/ollama/llm.pyr
   r
      s   „ á(àr   r
   c                   ób   ‡ — e Zd ZU dZeZeed<   ddddœdee   dedee   fˆ fd	„Z	dˆ fd
„	Z
ˆ xZS )ÚOLLamaLLMServicezîOLLama LLM service that provides local language model capabilities.

    This service extends OpenAILLMService to work with locally hosted OLLama models,
    providing a compatible interface for running large language models locally.
    Ú	_settingsNzhttp://localhost:11434/v1)ÚmodelÚbase_urlÚsettingsr   r   r   c                óž   •— t        d¬«      }|t        dt         d«       ||_        ||j                  |«       t	        ‰|   d|d|dœ|¤Ž y)aO  Initialize OLLama LLM service.

        Args:
            model: The OLLama model to use. Defaults to "llama2".

                .. deprecated:: 0.0.105
                    Use ``settings=OpenAILLMSettings(model=...)`` instead.

            base_url: The base URL for the OLLama API endpoint.
                    Defaults to "http://localhost:11434/v1".
            settings: Runtime-updatable settings. When provided alongside deprecated
                parameters, ``settings`` values take precedence.
            **kwargs: Additional keyword arguments passed to OpenAILLMService.
        Úllama2)r   Nr   Úollama)r   Úapi_keyr   r   )r
   r   r   Úapply_updateÚsuperÚ__init__)Úselfr   r   r   ÚkwargsÚdefault_settingsÚ	__class__s         €r   r   zOLLamaLLMService.__init__$   s_   ø€ ô. -°8Ô<Ðð ÐÜ" 7Ô,=¸wÔGØ%*ÐÔ"ð
 ÐØ×)Ñ)¨(Ô3ä‰ÑÐb (°HÐGWÑbÐ[aÓbr   c                 óV   •— t        j                  d|› «       t        ‰|   dd|i|¤ŽS )aC  Create OpenAI-compatible client for Ollama.

        Args:
            base_url: The base URL for the API. If None, uses instance base_url.
            **kwargs: Additional keyword arguments passed to the parent create_client method.

        Returns:
            An OpenAI-compatible client configured for Ollama.
        z Creating Ollama client with api r   r   )r   Údebugr   Úcreate_client)r   r   r    r"   s      €r   r%   zOLLamaLLMService.create_clientJ   s1   ø€ ô 	‰Ð7¸°zÐBÔCÜ‰wÑ$ÑA¨hÐA¸&ÑAÐAr   )N)r   r   r   r   r
   ÚSettingsÚ__annotations__r   Ústrr   r%   Ú__classcell__)r"   s   @r   r   r      s^   ø… ñð !€HØ Ó ð
  $Ø3Ø04ò$cð ˜‰}ð$cð ð	$cð
 Ð,Ñ-õ$c÷LBñ Br   r   N)r   Údataclassesr   Útypingr   Úlogurur   Ú pipecat.services.openai.base_llmr   Úpipecat.services.openai.llmr   Úpipecat.services.settingsr   r
   r   r   r   r   ú<module>r0      sH   ðñ Bå !Ý å å >Ý 8Ý <ð ô	Ð)ó 	ó ð	ô;BÐ'õ ;Br   