
    qia                        d dl mZ d dlZd dlmZ ddlmZ ddlmZm	Z	 ddl
mZmZ ddlmZmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZmZ ddlmZ ddlmZ ddlm Z m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/  e!j`                  e1      Z2 G d de      Z3 G d de,      Z4 G d de)      Z5 G d de-      Z6	 	 	 d3dejn                  dejp                  dejp                  dejp                  dejp                  dz  de9d e9dz  d!e9dz  d"e:ejp                  ejp                  f   fd#Z; G d$ d%e%      Z< G d& d'e      Z= G d( d)e+      Z> G d* d+e*      Z? G d, d-e&      Z@ G d. d/e'      ZA G d0 d1e(      ZBg d2ZCy)4    )CallableN   )ACT2FN)CacheDynamicCache)PreTrainedConfiglayer_type_validation)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSRopeParametersdynamic_rope_update)ALL_ATTENTION_FUNCTIONS)Unpack)TransformersKwargslogging)maybe_autocast   )GemmaAttentionGemmaForCausalLMGemmaForSequenceClassificationGemmaForTokenClassificationGemmaMLP
GemmaModelGemmaPreTrainedModelGemmaRMSNormGemmaRotaryEmbeddingapply_rotary_pos_emb	repeat_kvc            4           e Zd ZdZdZdgZddddddddZdgdgfd	d
gd	gfd	gd	gfdZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d'dedz  dedz  dedz  dedz  dedz  dedz  dedz  de	dz  dedz  de
dz  dedz  dedz  dedz  dedz  dedz  dedz  deee	ef   z  dz  dedz  de
dz  d edz  d!edz  d"ee	   dz  d#e
dz  d$e
dz  d%edz  f2 fd&Z xZS )(Gemma2ConfigaK  
    This is the configuration class to store the configuration of a [`Gemma2Model`]. It is used to instantiate an Gemma2
    model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
    defaults will yield a similar configuration to that of the Gemma2-7B.
    e.g. [google/gemma2-7b](https://huggingface.co/google/gemma2-7b)
    Configuration objects inherit from [`PreTrainedConfig`] and can be used to control the model outputs. Read the
    documentation from [`PreTrainedConfig`] for more information.

    Args:
        vocab_size (`int`, *optional*, defaults to 256000):
            Vocabulary size of the Gemma2 model. Defines the number of different tokens that can be represented by the
            `inputs_ids` passed when calling [`Gemma2Model`]
        hidden_size (`int`, *optional*, defaults to 2304):
            Dimension of the hidden representations.
        intermediate_size (`int`, *optional*, defaults to 9216):
            Dimension of the MLP representations.
        num_hidden_layers (`int`, *optional*, defaults to 26):
            Number of hidden layers in the Transformer decoder.
        num_attention_heads (`int`, *optional*, defaults to 8):
            Number of attention heads for each attention layer in the Transformer decoder.
        num_key_value_heads (`int`, *optional*, defaults to 4):
            This is the number of key_value heads that should be used to implement Grouped Query Attention. If
            `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
            `num_key_value_heads=1` the model will use Multi Query Attention (MQA) otherwise GQA is used. When
            converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
            by meanpooling all the original heads within that group. For more details, check out [this
            paper](https://huggingface.co/papers/2305.13245). If it is not specified, will default to
            `num_attention_heads`.
        head_dim (`int`, *optional*, defaults to 256):
            The attention head dimension.
        hidden_activation (`str` or `function`, *optional*, defaults to `"gelu_pytorch_tanh"`):
            The non-linear activation function (function or string) in the decoder. Will default to `"gelu_pytorch_tanh"`
            if not specified. `"gelu_pytorch_tanh"` uses an approximation of the `"gelu"` activation function.
        max_position_embeddings (`int`, *optional*, defaults to 8192):
            The maximum sequence length that this model might ever be used with.
        initializer_range (`float`, *optional*, defaults to 0.02):
            The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
        rms_norm_eps (`float`, *optional*, defaults to 1e-06):
            The epsilon used by the rms normalization layers.
        use_cache (`bool`, *optional*, defaults to `True`):
            Whether or not the model should return the last key/values attentions (not used by all models). Only
            relevant if `config.is_decoder=True`.
        pad_token_id (`int`, *optional*, defaults to 0):
            Padding token id.
        eos_token_id (`int`, *optional*, defaults to 1):
            End of stream token id.
        bos_token_id (`int`, *optional*, defaults to 2):
            Beginning of stream token id.
        tie_word_embeddings (`bool`, *optional*, defaults to `True`):
            Whether to tie weight embeddings
        rope_parameters (`RopeParameters`, *optional*):
            Dictionary containing the configuration parameters for the RoPE embeddings. The dictionary should contain
            a value for `rope_theta` and optionally parameters used for scaling in case you want to use RoPE
            with longer `max_position_embeddings`.
        attention_bias (`bool`, defaults to `False`, *optional*, defaults to `False`):
            Whether to use a bias in the query, key, value and output projection layers during self-attention.
        attention_dropout (`float`, *optional*, defaults to 0.0):
            The dropout ratio for the attention probabilities.
        query_pre_attn_scalar (`float`, *optional*, defaults to 256):
            scaling factor used on the attention scores
        sliding_window (`int`, *optional*, defaults to 4096):
            in Gemma2, every other layer uses sliding window attention. This is the size of the sliding window.
        layer_types (`list`, *optional*):
            Attention pattern for each layer.
        final_logit_softcapping (`float`, *optional*, defaults to 30.0):
            scaling factor when applying tanh softcapping on the logits.
        attn_logit_softcapping (`float`, *optional*, defaults to 50.0):
            scaling factor when applying tanh softcapping on the attention scores.
        use_bidirectional_attention (`bool`, *optional*):
            If True, the model will attend to all text tokens instead of using a causal mask.

    ```python
    >>> from transformers import Gemma2Model, Gemma2Config
    >>> # Initializing a Gemma2 gemma2-7b style configuration
    >>> configuration = Gemma2Config()
    >>> # Initializing a model from the gemma2-7b style configuration
    >>> model = Gemma2Model(configuration)
    >>> # Accessing the model configuration
    >>> configuration = model.config
    ```gemma2past_key_valuescolwiserowwise)zlayers.*.self_attn.q_projzlayers.*.self_attn.k_projzlayers.*.self_attn.v_projzlayers.*.self_attn.o_projzlayers.*.mlp.gate_projzlayers.*.mlp.up_projzlayers.*.mlp.down_proj	input_idsinputs_embedshidden_statesattention_mask)embed_tokenslayersnormN
vocab_sizehidden_sizeintermediate_sizenum_hidden_layersnum_attention_headsnum_key_value_headshead_dimhidden_activationmax_position_embeddingsinitializer_rangerms_norm_eps	use_cachepad_token_ideos_token_idbos_token_idtie_word_embeddingsrope_parametersattention_biasattention_dropoutquery_pre_attn_scalarsliding_windowlayer_typesfinal_logit_softcappingattn_logit_softcappinguse_bidirectional_attentionc                 Z   || _         || _        || _        || _        || _        |	| _        || _        || _        || _        || _	        || _
        || _        |
| _        || _        || _        || _        || _        || _        || _        || _        || _        || _        || _        || _        | j,                  ;t1        | j                        D cg c]  }t3        |dz   dz        rdnd c}| _        t5        | j,                  | j                         || _        t9        | t  di | y c c}w )N   r   sliding_attentionfull_attention )r=   r?   r>   r@   r1   r9   r2   r3   r4   r5   r7   r6   r:   r;   r<   rB   rC   r8   rD   rE   rG   rH   rF   rI   rangeboolr	   rA   super__init__)selfr1   r2   r3   r4   r5   r6   r7   r8   r9   r:   r;   r<   r=   r>   r?   r@   rA   rB   rC   rD   rE   rF   rG   rH   rI   kwargsi	__class__s                               [/opt/pipecat/venv/lib/python3.12/site-packages/transformers/models/gemma2/modular_gemma2.pyrR   zGemma2Config.__init__   s=   : )((#6 $'>$&!2!2#6  #6 !2(",!2!2%:",'>$&<#&+F(#X]^b^t^tXu STtQUaK'8#>NN D 	d..0F0FG."6" s   D()i  i 	  i $              gelu_pytorch_tanhi    g{Gz?gư>Tr   rK   r   TNF        r[   i   Ng      >@g      I@N)__name__
__module____qualname____doc__
model_typekeys_to_ignore_at_inferencebase_model_tp_planbase_model_pp_planintstrfloatrP   r   dictlistrR   __classcell__rV   s   @rW   r%   r%   6   s5   Ob J#4"5%.%.%.%."+ )"+ &(9:#%568IJ!"_$56 "("&(,(**+*+"(;.2*.#'!%#$#$#$+/MQ&+*-,/%)(,04/3375>#$J># 4Z># :	>#
 :># !4Z># !4Z># *># :># "%t># !4<># Dj># $;># Dj># Dj>#  Dj!>#" "D[#>#$ ($sN/B*CCdJ%>#& t'>#( !4<)>#*  #Tz+>#, d
->#. #Y%/>#0 "'1>#2 !&3>#4 &*D[5># >#    r%   c                       e Zd Zy)Gemma2RMSNormNr^   r_   r`   rN   rm   rW   ro   ro          rm   ro   c                        e Zd Z fdZ xZS )	Gemma2MLPc                 T    t         |   |       t        |j                     | _        y N)rQ   rR   r   r8   act_fnrS   configrV   s     rW   rR   zGemma2MLP.__init__   s"     V556rm   )r^   r_   r`   rR   rk   rl   s   @rW   rs   rs      s    7 7rm   rs   c                   R    e Zd ZddefdZ ej                         ed               Zy)Gemma2RotaryEmbeddingNrx   c                    t         j                  j                          |j                  | _        |j                  | _        || _        | j                  j                  d   | _        | j                  }| j                  dk7  rt        | j                     } || j                  |      \  }| _        | j                  d|d       | j                  d|j                         d       y )N	rope_typedefaultinv_freqF)
persistentoriginal_inv_freq)nnModulerR   r9   max_seq_len_cachedoriginal_max_seq_lenrx   rA   r|   compute_default_rope_parametersr   attention_scalingregister_bufferclone)rS   rx   devicerope_init_fnr~   s        rW   rR   zGemma2RotaryEmbedding.__init__   s    
		"("@"@$*$B$B!44[A!%!E!E>>Y&.t~~>L+7V+L($(ZeD0(..2BuUrm   c                 N   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   rK   mpscpuF)device_typeenabledr   )dimdtype)r~   rh   expandshapetor   
isinstancetyperg   r   	transposetorchcatcosr   sinr   )
rS   xposition_idsinv_freq_expandedposition_ids_expandedr   freqsembr   r   s
             rW   forwardzGemma2RotaryEmbedding.forward   sR    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfkUC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s   BFF$ru   )	r^   r_   r`   r%   rR   r   no_gradr   r   rN   rm   rW   rz   rz      s4    V| V  U]]_<  <rm   rz   modulequerykeyvaluer-   dropoutscalingsoftcapreturnc                 |   || j                   dz  }t        || j                        }	t        || j                        }
t        j                  ||	j                  dd            |z  }|||z  }t        j                  |      }||z  }|||z   }t        j                  j                  |dt        j                        j                  |j                        }t        j                  j                  ||| j                        }t        j                  ||
      }|j                  dd      j                         }||fS )N      r   r   r   )r   r   )ptrainingrK   )r7   r#   num_key_value_groupsr   matmulr   tanhr   
functionalsoftmaxfloat32r   r   r   r   
contiguous)r   r   r   r   r-   r   r   r   rT   
key_statesvalue_statesattn_weightsattn_outputs                rW   eager_attention_forwardr     s    //4'3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL#g-zz,/#g-!#n4 ==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$rm   c                   <    e Zd Zdedef fdZ	 	 	 	 ddej                  deej                  ej                  f   dz  dej                  dz  de	dz  d	ej                  dz  d
ee   deej                  ej                  dz  eej                     dz  f   fdZ xZS )Gemma2Attentionrx   	layer_idxc                 t   t        |d      r|j                  |   nd | _        t        |   ||       | j
                  j                  | _        | j
                  j                  | _        t        |dd       | _	        |j                  dz  | _        | j                  dk(  r|j                  | _        y d | _        y )NrF   rI   Fr   rL   )hasattrrF   
layer_typerQ   rR   rx   rH   rC   getattr	is_causalrD   r   rE   rS   rx   r   rV   s      rW   rR   zGemma2Attention.__init__(  s    ;B6=;Y&,,Y7_c+&*kk&H&H#!%!>!>$V-JERR33T97;J]7]f33cgrm   Nr,   position_embeddingsr-   r'   cache_positionrT   r   c                 D   |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        j                  | j                  j                  t              } || |	|
||f| j                  r| j                   nd| j"                  | j$                  | j&                  d|\  }} |j(                  g |d j+                         }| j-                  |      }||fS )Nr   rK   r   )r   r   r   r]   )r   r   rE   r   )r   r7   q_projviewr   k_projv_projr"   updater   r   get_interfacerx   _attn_implementationr   r   rC   r   rE   rH   reshaper   o_proj)rS   r,   r   r-   r'   r   rT   input_shapehidden_shapequery_statesr   r   r   r   cache_kwargsattention_interfacer   r   s                     rW   r   zGemma2Attention.forward2  s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?(M(MKK,,.E)
 %8%
 /3mmD**LL..//%
 %
!\ *k));;;;FFHkk+.L((rm   )NNNN)r^   r_   r`   r%   rf   rR   r   Tensortupler   
LongTensorr   r   r   rk   rl   s   @rW   r   r   '  s    h| h h IM.2(,26+)||+) #5<<#=>E+) t+	+)
 +) ((4/+) -.+) 
u||U\\D0%2E2LL	M+)rm   r   c                   N    e Zd Zdedef fdZ	 	 	 	 	 ddej                  deej                  ej                  f   dz  dej                  dz  dej                  dz  d	e
dz  d
ej                  dz  deej                  eej                  ej                  f   dz  f   fdZ xZS )Gemma2DecoderLayerrx   r   c                    t         |           |j                  | _        || _        |j                  |   | _        t        ||      | _        t        |      | _	        t        |j                  |j                        | _        t        |j                  |j                        | _        t        |j                  |j                        | _        t        |j                  |j                        | _        y )N)rx   r   )eps)rQ   rR   r2   rx   rF   attention_typer   	self_attnrs   mlpro   r;   input_layernormpost_attention_layernormpre_feedforward_layernormpost_feedforward_layernormr   s      rW   rR   zGemma2DecoderLayer.__init__a  s    !--$00;()LV$,V-?-?VEXEXY(5f6H6HfNaNa(b%)6v7I7IvObOb)c&*78J8JPVPcPc*d'rm   Nr,   r   r-   r   r'   r   r   c           
         |}| j                  |      } | j                  d||||||d|\  }}	| j                  |      }||z   }|}| j                  |      }| j	                  |      }| j                  |      }||z   }|S )N)r,   r   r-   r   r'   r   rN   )r   r   r   r   r   r   )
rS   r,   r   r-   r   r'   r   rT   residual_s
             rW   r   zGemma2DecoderLayer.forwardn  s     !,,]; *4>> 
' 3)%+)
 
q 55mD =0 66}E/77F =0rm   )NNNNN)r^   r_   r`   r%   rf   rR   r   r   r   r   r   FloatTensorr   rk   rl   s   @rW   r   r   `  s    e| e e  IM.204(,26!||! #5<<#=>E! t+	!
 &&-! ! ((4/! 
u  %(9(95;L;L(L"MPT"TT	U!rm   r   c                       e Zd Zy)Gemma2PreTrainedModelNrp   rN   rm   rW   r   r     rq   rm   r   c                        e Zd Zdef fdZ	 	 	 	 	 	 	 ddej                  dz  dej                  dz  dej                  dz  dedz  dej                  dz  d	e
dz  d
ej                  dz  dee   defdZ xZS )Gemma2Modelrx   c           	          t         |   |       t        j                  t	        |j
                        D cg c]  }t        ||       c}      | _        t        |      | _	        y c c}w ru   )
rQ   rR   r   
ModuleListrO   r4   r   r/   rz   
rotary_embr   s      rW   rR   zGemma2Model.__init__  sU     mmDI&JbJbDcdy	2d
 07 es   A'Nr*   r-   r   r'   r+   r<   r   rT   r   c           
         |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|F||j	                         nd}	t        j                  |	|	|j                  d   z   |j                        }||j                  d      }t        |x}
t              s*| j                  |||||d}t        di |t        di |d}
|}| j                  ||      }t        j                  | j                  j                   dz  |j"                  	      }||z  }| j$                  d | j                  j&                   D ]  } ||f|
|j(                     ||||d
|}  | j+                  |      }t-        ||      S )Nz:You must specify exactly one of input_ids or inputs_embeds)rx   r   rK   )r   )rx   r+   r-   r   r'   r   )rM   rL   g      ?r   )r-   r   r   r'   r   )last_hidden_stater'   rN   )
ValueErrorr.   r   rx   get_seq_lengthr   aranger   r   	unsqueezer   ri   r
   r   r   tensorr2   r   r/   r4   r   r0   r   )rS   r*   r-   r   r'   r+   r<   r   rT   past_seen_tokenscausal_mask_mappingmask_kwargsr,   r   
normalizerdecoder_layers                   rW   r   zGemma2Model.forward  s    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de"\\ "2]5H5H5K"KTaThThN )33A6L ?-F ++!."0"0#2 ,K #5"C{"C%F%U%U# &"oom\J
 \\$++"9"93">mFYFYZ
%
2![[)H4;;+H+HI 		M)2=3O3OP$7) /- M		 		-0&++
 	
rm   )NNNNNNN)r^   r_   r`   r%   rR   r   r   r   r   r   rP   r   r   r   r   rk   rl   s   @rW   r   r     s    8| 8 .2.204(,26!%26H
##d*H
 t+H
 &&-	H

 H
 ((4/H
 $;H
 ((4/H
 +,H
 
!H
rm   r   c                   .    e Zd Z fdZ	 	 	 	 	 	 	 	 	 ddej
                  dz  dej                  dz  dej
                  dz  dedz  dej                  dz  dej
                  dz  d	e	dz  d
ej
                  dz  de
ej                  z  dee   defdZ xZS )Gemma2ForCausalLMc                 d    t         |   |       t        |      | _        | j	                          y ru   )rQ   rR   r   model	post_initrw   s     rW   rR   zGemma2ForCausalLM.__init__  s&      (
rm   Nr*   r-   r   r'   r+   labelsr<   r   logits_to_keeprT   r   c
                     | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }| j                  j                  G|| j                  j                  z  }t        j                  |      }|| j                  j                  z  }d}| | j                  ||| j                  fi |
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, Gemma2ForCausalLM

        >>> model = Gemma2ForCausalLM.from_pretrained("google/gemma-2-9b")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/gemma-2-9b")

        >>> prompt = "What is your favorite condiment?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "What is your favorite condiment?"
        ```)r*   r-   r   r'   r+   r<   r   N)losslogitsr'   r,   
attentionsrN   )r   r   r   rf   slicelm_headrx   rG   r   r   loss_functionr1   r   r'   r,   r  )rS   r*   r-   r   r'   r+   r  r<   r   r  rT   outputsr,   slice_indicesr  r  s                   rW   r   zGemma2ForCausalLM.forward  s   > ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A;;..:dkkAAAFZZ'FdkkAAAF%4%%ffdooPPD%#33!//))
 	
rm   )	NNNNNNNNr   )r^   r_   r`   rR   r   r   r   r   r   rP   rf   r   r   r   r   rk   rl   s   @rW   r   r     s     .2.204(,26*.!%26-.=
##d*=
 t+=
 &&-	=

 =
 ((4/=
   4'=
 $;=
 ((4/=
 ell*=
 +,=
 
 =
rm   r   c                       e Zd Zy)Gemma2ForSequenceClassificationNrp   rN   rm   rW   r  r  /  rq   rm   r  c                       e Zd Zy)Gemma2ForTokenClassificationNrp   rN   rm   rW   r  r  3  rq   rm   r  )r%   r   r   r   r  r  )r]   NN)Dcollections.abcr   r   torch.nnr   activationsr   cache_utilsr   r   configuration_utilsr   r	   masking_utilsr
   r   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   r   modeling_utilsr   processing_utilsr   utilsr   r   utils.genericr   gemma.modeling_gemmar   r   r   r   r   r   r   r    r!   r"   r#   
get_loggerr^   loggerr%   ro   rs   rz   r   r   rh   r   r   r   r   r   r   r   r  r  __all__rN   rm   rW   <module>r#     s   %   ! . J R B 9 O 
 6 & 0 +    
		H	%a## a#H	L 	7 7<0 <N   %II%<<% 
% <<	%
 LL4'% % T\% T\% 5<<%&%D6)n 6)r/3 /d	0 	P
* P
fC
( C
L	&D 		#> 	rm   