
    qib                     $   d dl mZ d dlmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZmZ ddlmZmZ ddlmZmZ ddlmZ ddl m!Z!m"Z"m#Z# ddl$m%Z%m&Z& ddl'm(Z( ddl)m*Z*  G d dejV                        Z, G d dejV                        Z- G d dejV                        Z.dej^                  de0dej^                  fdZ1	 d6d ejV                  d!ej^                  d"ej^                  d#ej^                  d$ej^                  dz  d%e2d&e2d'ee!   fd(Z3d) Z4d7d*Z5 ee5       G d+ d,ejV                               Z6 G d- d.e      Z7e" G d/ d0e             Z8e" G d1 d2e8             Z9e" G d3 d4e8e             Z:g d5Z;y)8    )Callable)OptionalN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernelized_func)create_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)maybe_autocastmerge_with_config_defaults)capture_outputs   )CohereConfigc                   &     e Zd Zd fd	Zd Z xZS )CohereLayerNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)zcThe hidden size can be a tuple or an int. The tuple is used for QKNorm to normalize across head_dimN)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeepsbias	__class__s       \/opt/pipecat/venv/lib/python3.12/site-packages/transformers/models/cohere/modeling_cohere.pyr"   zCohereLayerNorm.__init__5   s/    ll5::k#:; #    c                    |j                   }|j                  t        j                        }|j	                  dd      }||z
  j                  d      j	                  dd      }||z
  t        j                  || j                  z         z  }| j                  j                  t        j                        |z  }|j                  |      S )NT)keepdim   )	dtypetor$   float32meanpowrsqrtr'   r&   )r(   hidden_statesinput_dtyper6   variances        r-   forwardzCohereLayerNorm.forward;   s    #))%((7!!"d!3!D(--a055b$5G&-XH]H]=]1^^u}}5E,,r.   )Ngh㈵>F__name__
__module____qualname__r"   r<   __classcell__r,   s   @r-   r   r   4   s    $-r.   r   c                        e Zd ZU ej                  ed<   ddef fdZe	 	 	 ddedz  de	d   de
dz  ded	ef   fd
       Z ej                         ed               Z xZS )CohereRotaryEmbeddinginv_freqNconfigc                    t         |           |j                  | _        |j                  | _        || _        | j
                  j                  d   | _        | j                  }| j                  dk7  rt        | j                     } || j
                  |      \  }| _
        | j                  d|d       | j                  d|j                         d       y )N	rope_typedefaultrE   F)
persistentoriginal_inv_freq)r!   r"   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrF   rope_parametersrH   compute_default_rope_parametersr   attention_scalingregister_bufferclone)r(   rF   devicerope_init_fnrE   r,   s        r-   r"   zCohereRotaryEmbedding.__init__H   s    "("@"@$*$B$B!44[A!%!E!E>>Y&.t~~>L+7V+L($(ZeD0(..2BuUr.   rT   ztorch.deviceseq_lenreturnztorch.Tensorc                    | j                   d   }t        | dd      xs | j                  | j                  z  }d}d|t	        j
                  d|dt        j                        j                  |t        j                        |z  z  z  }||fS )	a  
        Computes the inverse frequencies according to the original RoPE implementation
        Args:
            config ([`~transformers.PreTrainedConfig`]):
                The model configuration.
            device (`torch.device`):
                The device to use for initialization of the inverse frequencies.
            seq_len (`int`, *optional*):
                The current sequence length. Unused for this type of RoPE.
        Returns:
            Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
            post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
        
rope_thetahead_dimNg      ?r   r2   r3   )rT   r3   )	rO   getattrr)   num_attention_headsr$   arangeint64r4   float)rF   rT   rV   basedimattention_factorrE   s          r-   rP   z5CohereRotaryEmbedding.compute_default_rope_parametersX   s    & %%l3fj$/c63E3EIcIc3c U\\!S!5;;?BB&X]XcXcBdgjjk
 )))r.   c                    | j                   d d d d f   j                         j                  |j                  d   dd      }|d d d d d f   j                         }t	        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        |d      5  |j                         |j                         z  j                  dd      }t        j                  |dd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j                  |j                   
      	j                  |j                   
      fS # 1 sw Y   AxY w)Nr   r0   r   mpscpuF)device_typeenabledr2   rb   r[   )rE   r`   expandshape
isinstancerT   typestrr   	transposer$   repeat_interleavecosrQ   sinr4   r3   )
r(   xposition_idsinv_freq_expandedposition_ids_expandedrg   freqsembrq   rr   s
             r-   r<   zCohereRotaryEmbedding.forwardv   s@    !MM$4-8>>@GGHZHZ[\H]_acde ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfkUC 	5&,,.1F1L1L1NNYYZ[]^_E))%;C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s   =BFF
N)NNN)r>   r?   r@   r$   Tensor__annotations__r   r"   staticmethodr   inttupler`   rP   no_gradr   r<   rA   rB   s   @r-   rD   rD   E   s    llV| V  &*+/"*t#*(* t* 
~u$	%	* *: U]]_<  <r.   rD   c                   $     e Zd Z fdZd Z xZS )	CohereMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFr+   )r!   r"   rF   r)   intermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnr(   rF   r,   s     r-   r"   zCohereMLP.__init__   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r.   c                     | j                  | j                  | j                  |            | j                  |      z        }|S ry   )r   r   r   r   )r(   rs   r   s      r-   r<   zCohereMLP.forward   s6    NN4;;t~~a/@#ADLLQRO#ST	r.   r=   rB   s   @r-   r   r      s    0r.   r   r9   n_reprW   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rk   rj   reshape)r9   r   batchnum_key_value_headsslenrZ   s         r-   	repeat_kvr      so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr.   modulequerykeyvalueattention_maskscalingdropoutkwargsc                    t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
||
|z   }
t
        j                  j                  |
dt        j                        j                  |j                        }
t
        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr2   r   r0   )rb   r3   )ptrainingr   )r   num_key_value_groupsr$   matmulro   r   
functionalsoftmaxr5   r4   r3   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightsattn_outputs               r-   eager_attention_forwardr      s     3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!#n4==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r.   c                     | dd d df   }| ddd df   }t        j                  | |gd      j                  d      }|S )N.r2   r   r0   ri   )r$   stackflatten)rs   x1x2rot_xs       r-   rotate_halfr      sL    	
3!8B	
319BKK"b	r*2226ELr.   c                 6   | j                   }| j                         } |j                         }|j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }|j	                  |      |j	                  |      fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    r[   )r3   r`   	unsqueezer   r4   )qkrq   rr   unsqueeze_dimr3   q_embedk_embeds           r-   apply_rotary_pos_embr      s    $ GGE		A		A
--
&C
--
&C3w;q>C/0G3w;q>C/0G::E:"GJJUJ$;;;r.   c                       e Zd ZdZddededz  f fdZ	 	 ddej                  de	ej                  ej                  f   dej                  dz  d	e
dz  d
ej                  dz  dee   de	ej                  ej                  dz  f   fdZ xZS )CohereAttentionz=Multi-headed attention from 'Attention Is All You Need' paperNrF   	layer_idxc                 h   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  |j                        | _        |j(                  | _        | j(                  ret+        |j                  | j                  f|j,                        | _        t+        |j                  | j                  f|j,                        | _        y y )NrZ   g      Tr   r)   r*   )r!   r"   rF   r   r\   r)   r]   rZ   r   r   r   attention_dropout	is_causalr   r   attention_biasq_projk_projv_projo_projuse_qk_normr   layer_norm_epsq_normk_normr(   rF   r   r,   s      r-   r"   zCohereAttention.__init__   s   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
 "--)#77GVMbMbDK *#77GVMbMbDK r.   r9   position_embeddingsr   past_key_valuescache_positionr   rW   c                    |j                   d d }g |d| j                  }| j                  |      j                  |      }	| j	                  |      j                  |      }
| j                  |      j                  |      }| j                  r"| j                  |	      }	| j                  |
      }
|	j                  dd      }	|
j                  dd      }
|j                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        j                  | j                  j                   t"              } || |	|
||f| j$                  sdn| j&                  | j(                  d|\  }} |j*                  g |d j-                         }| j/                  |      }||fS )Nr0   r   r2   )rr   rq   r           )r   r   )rk   rZ   r   viewr   r   r   r   r   ro   r   updater   r   get_interfacerF   _attn_implementationr   r   r   r   r   r   r   )r(   r9   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rq   rr   cache_kwargsattention_interfacer   r   s                     r-   r<   zCohereAttention.forward  s    $))#2.88b8$--8{{=166|D[[/44\B
{{=166|D;;|4LZ0J#--a3))!Q/
#--a3&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?(M(MKK,,.E)
 %8	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r.   ry   )NN)r>   r?   r@   __doc__r   r}   r"   r$   rz   r~   r   
LongTensorr   r   r<   rA   rB   s   @r-   r   r      s    G| d
 J )-261)||1) #5<<#=>1) t+	1)
 1) ((4/1) -.1) 
u||U\\D00	11)r.   r   c                   d    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  dej                  dz  dej                  dz  de	dz  d	e
dz  d
ej                  dz  deej                  ej                  f   dz  dee   deej                  eej                  ej                  f   dz  f   fdZ xZS )CohereDecoderLayerrF   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        y )N)rF   r   r   )
r!   r"   r)   r   	self_attnr   mlpr   r   input_layernormr   s      r-   r"   zCohereDecoderLayer.__init__8  sR    !--()LV$.F<N<NU[UjUjkr.   Nr9   r   rt   r   	use_cacher   r   r   rW   c                     |}	| j                  |      } | j                  d|||||||d|\  }
}| j                  |      }|	|
z   |z   }|S )ar  
        Args:
            hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
            attention_mask (`torch.FloatTensor`, *optional*):
                attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
                query_sequence_length, key_sequence_length)` if default attention is used.
            past_key_values (`Cache`, *optional*): cached past key and value projection states
            output_attentions (`bool`, *optional*):
                Whether or not to return the attentions tensors of all attention layers. See `attentions` under
                returned tensors for more detail.
            use_cache (`bool`, *optional*):
                If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
                (see `past_key_values`).
            cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
                Indices depicting the position of the input sequence tokens in the sequence
            position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
                Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
                with `head_dim` being the embedding dimension of each attention head.
        )r9   r   rt   r   r   r   r    )r   r   r   )r(   r9   r   rt   r   r   r   r   r   residualhidden_states_attention_hidden_states_mlps                r-   r<   zCohereDecoderLayer.forward?  s{    < !,,];%3T^^ 	&
')%+) 3	&
 	&
" !HH]3 #::=NNr.   )NNNFNN)r>   r?   r@   r   r}   r"   r$   rz   r   r   boolr~   r   r   FloatTensorr<   rA   rB   s   @r-   r   r   7  s    l| l l /304(,!&26HL.||. t+. &&-	.
 . $;. ((4/. #5<<#=>E. -.. 
u  %(9(95;L;L(L"MPT"TT	U.r.   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)CoherePreTrainedModelrF   modelTr   r   )r9   
attentionsN)r>   r?   r@   r   r{   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   r.   r-   r   r   p  sQ    &*#-.#4"5N!"&+%r.   r   c                       e Zd Zdef fdZeee	 	 	 	 	 	 	 ddej                  dz  dej                  dz  dej                  dz  dedz  dej                  dz  d	ej                  dz  d
edz  dee   defd                     Z xZS )CohereModelrF   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   rF   F)r!   r"   pad_token_idpadding_idx
vocab_sizer   	Embeddingr)   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normrD   
rotary_embgradient_checkpointing	post_initr   s      r-   r"   zCohereModel.__init__  s     !.. ++LL):):F<N<NPTP`P`ammDI&JbJbDcdy	2d
 $1C1C&J_J_`	/v>&+# 	 es   DN	input_idsr   rt   r   inputs_embedsr   r   r   rW   c                 D   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|E||j	                         nd}	t        j                  |j                  d   |j                        |	z   }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
|||||d|} | j                  |      }t        ||	      S )
Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )rT   )rF   r
  r   r   r   rt   )rt   )r   r   rt   r   r   r   )last_hidden_stater   )
ValueErrorr   r	   rF   get_seq_lengthr$   r^   rk   rT   r   r   r  r  r  r  r   )r(   r	  r   rt   r   r
  r   r   r   past_seen_tokenscausal_maskr9   r   decoder_layers                 r-   r<   zCohereModel.forward  s]    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de]003M<P<PQTdd  )33A6L(;;'))+%
 &"oom,oW![[)H4;;+H+HI 
	M)	*$7) /#-	 	M
	 		-0&++
 	
r.   )NNNNNNN)r>   r?   r@   r   r"   r   r   r   r$   r   rz   r   r   r   r   r   r   r<   rA   rB   s   @r-   r   r     s    |     .2.204(,2626!%9
##d*9
 t+9
 &&-	9

 9
 ((4/9
 ((4/9
 $;9
 +,9
 
!9
    9
r.   r   c                   z    e Zd ZddiZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 	 	 dd	e	j                  dz  d
e	j                  dz  de	j                  dz  dedz  de	j                  dz  de	j                  dz  dedz  dedz  dedz  de	j                  dz  dee	j                  z  dee   defd              Z xZS )CohereForCausalLMzlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputr9   logitsc                 ,   t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        |j                  | _	        |j                  | _
        | j                          y r   )r!   r"   r   r   r   r   r   r)   r  logit_scaletie_word_embeddingsr  r   s     r-   r"   zCohereForCausalLM.__init__  sq      (
 ++yy!3!3V5F5FUS!--#)#=#=  	r.   Nr	  r   rt   r   r
  labelsr   output_attentionsoutput_hidden_statesr   logits_to_keepr   rW   c                    ||n| j                   j                  }|	|	n| j                   j                  }	 | j                  d||||||||	|
d	|}|j                  }t        |t              rt        | d      n|}| j                  |dd|ddf         }|| j                  z  }d}|* | j                  d||| j                   j                  d|}t        |||j                  |j                  |j                        S )az  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >> from transformers import AutoTokenizer, CohereForCausalLM

        >> model = CohereForCausalLM.from_pretrained("CohereForAI/c4ai-command-r-v01")
        >> tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-v01")

        >> prompt = "Hey, are you conscious? Can you talk to me?"
        >> inputs = tokenizer(prompt, return_tensors="pt")

        >> # Generate
        >> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```N)	r	  r   rt   r   r
  r   r  r  r   )r  r  r   )lossr  r   r9   r   r   )rF   r  r  r   r  rl   r}   slicer  r  loss_functionr   r   r   r9   r   )r(   r	  r   rt   r   r
  r  r   r  r  r   r  r   outputsr9   slice_indicesr  r  s                     r-   r<   zCohereForCausalLM.forward  s+   N 2C1N-TXT_T_TqTq$8$D $++JjJj 	
 ,64:: ,
)%+'/!5),
 ,
  118B>SV8W~ot4]kmA}a,?@A$***%4%%pVFt{{OeOepiopD%#33!//))
 	
r.   )NNNNNNNNNNr   )r>   r?   r@   _tied_weights_keys_tp_plan_pp_planr"   r   r   r$   r   rz   r   r   r   r}   r   r   r   r<   rA   rB   s   @r-   r  r    s`   *,GH23H_-z:;H	  .2.204(,26*.!%)-,026-.H
##d*H
 t+H
 &&-	H

 H
 ((4/H
   4'H
 $;H
  $;H
 #TkH
 ((4/H
 ell*H
 +,H
 
 H
  H
r.   r  )r  r   r   )r   )r   )<collections.abcr   typingr   r$   r   activationsr   cache_utilsr   r	   
generationr
   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   r   utils.output_capturingr   configuration_coherer   Moduler   rD   r   rz   r}   r   r`   r   r   r   r   r   r   r   r  __all__r   r.   r-   <module>r:     s  : %    ! . ) / / B 9 O K F & I I G 5 .-bii -"><BII ><B		  	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 LL4'% % % '(%2<8 )*T)bii T) +T)n63 6r O  $ M
' M
 M
` Z
- Z
 Z
z Hr.   