
    sg]                        d dl Z d dlmZ d dlZd dlmZ d dlZddlmZ ddl	m
Z
mZmZ ddlmZmZmZ ddlmZmZmZ dd	lmZmZmZ d
dlmZ  ej6                  e      ZdZ G d de      Z G d de      Z  G d de      Z!d Z"d)dZ# G d de      Z$ G d de$e      Z% G d de      Z&e$e%e&dZ' G d de      Z( G d de      Z) G d  d!e)e      Z* G d" d#e
      Z+ G d$ d%e      Z, G d& d'e      Z-g d(Z.y)*    N)Optional   )logging   )GemmaForCausalLMGemmaForSequenceClassificationGemmaForTokenClassification)GraniteAttentionGraniteFlashAttention2GraniteSdpaAttention)LlamaDecoderLayer
LlamaModelLlamaPreTrainedModel)Phi3MLPPhi3RMSNormPhi3RotaryEmbedding   )	GlmConfigzTHUDM/glm-4-9bc                       e Zd Zy)
GlmRMSNormN__name__
__module____qualname__     V/var/www/html/venv/lib/python3.12/site-packages/transformers/models/glm/modular_glm.pyr   r   4       r   r   c                       e Zd Zy)GlmRotaryEmbeddingNr   r   r   r   r    r    8   r   r   r    c                       e Zd Zy)GlmMLPNr   r   r   r   r"   r"   <   r   r   r"   c                 |    | ddddf   }| ddddf   }t        j                  | |fd      j                  d      S )	z*Rotates half the hidden dims of the input..r   Nr   r   dim)torchstackflatten)xx1x2s      r   rotate_halfr.   @   sJ    	
319B	
319B;;Ryb)11"55r   c                    |j                  |      }|j                  |      }|dd|j                  d   dz  f   j                  dd      }|dd|j                  d   dz  f   j                  dd      }|j                  d   }| dd|f   | d|df   }}|dd|f   |d|df   }
}	||z  t        |      |z  z   }|	|z  t        |	      |z  z   }t	        j
                  ||gd      }t	        j
                  ||
gd      }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    .Nr$   r   r%   )	unsqueezeshaperepeat_interleaver.   r(   cat)qkcossinposition_idsunsqueeze_dim
rotary_dimq_rotq_passk_rotk_passq_embedk_embeds                r   apply_rotary_pos_embrA   G   sD   ( --
&C
--
&C c'SYYr]a'''
(
:
:1"
:
EC
c'SYYr]a'''
(
:
:1"
:
EC 2Jc;J;&'3
+;)<6Ec;J;&'3
+;)<6E s{{51C78Gs{{51C78G ii&)r2Gii&)r2GGr   c                   0     e Zd Zddedee   f fdZ xZS )GlmAttentionconfig	layer_idxc                     t         |   ||       t        j                  | j                  | j                  d      | _        dt        j                  | j                        z  | _	        y )NF)biasr   )
super__init__nnLinearhidden_sizeo_projmathsqrthead_dimscalingselfrD   rE   	__class__s      r   rI   zGlmAttention.__init__r   sK    +ii 0 0$2B2BO499T]]33r   Nr   r   r   r   r   intrI   __classcell__rT   s   @r   rC   rC   q   s    4y 4Xc] 4 4r   rC   c                       e Zd Zy)GlmFlashAttention2Nr   r   r   r   r[   r[   x   r   r   r[   c                       e Zd Zy)GlmSdpaAttentionNr   r   r   r   r]   r]   |   r   r   r]   )eagerflash_attention_2sdpac                   0     e Zd Zddedee   f fdZ xZS )GlmDecoderLayerrD   rE   c                     t         |           t        |      | _        t	        |j
                  |j                        | _        t	        |j
                  |j                        | _        y )Neps)	rH   rI   r"   mlpr   rL   rms_norm_epsinput_layernormpost_attention_layernormrR   s      r   rI   zGlmDecoderLayer.__init__   sO    &>)&*<*<&BUBUV(263E3E6K^K^(_%r   rU   rV   rY   s   @r   rb   rb      s#    `y `Xc] ` `r   rb   c                       e Zd Zy)GlmPreTrainedModelNr   r   r   r   rk   rk      r   r   rk   c                   $     e Zd Zdef fdZ xZS )GlmModelrD   c           	         t         |   |       t        j                  t	        |j
                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t        t        |j                  |j                  z        |j                   |j"                        | _        d| _        | j)                          y c c}w )Nrd   )r&   max_position_embeddingsbaseF)rH   rI   rJ   
ModuleListrangenum_hidden_layersrb   layersr   rL   rg   normr    rW   rP   partial_rotary_factorro   
rope_theta
rotary_embgradient_checkpointing	post_initrR   s      r   rI   zGlmModel.__init__   s     mmAFvG_G_A`aI_VY/a
 v11v7J7JK	,FOOf&B&BBC$*$B$B""

 ',# 	 bs   Cr   r   r   r   rI   rX   rY   s   @r   rm   rm      s    y  r   rm   c                   $     e Zd Zdef fdZ xZS )GlmForCausalLMrD   c                 d    t         |   |       t        |      | _        | j	                          y rU   rH   rI   rm   modelrz   rS   rD   rT   s     r   rI   zGlmForCausalLM.__init__   &     f%
r   r{   rY   s   @r   r}   r}          y  r   r}   c                   $     e Zd Zdef fdZ xZS )GlmForSequenceClassificationrD   c                 d    t         |   |       t        |      | _        | j	                          y rU   r   r   s     r   rI   z%GlmForSequenceClassification.__init__   r   r   r{   rY   s   @r   r   r      r   r   r   c                   $     e Zd Zdef fdZ xZS )GlmForTokenClassificationrD   c                 d    t         |   |       t        |      | _        | j	                          y rU   r   r   s     r   rI   z"GlmForTokenClassification.__init__   r   r   r{   rY   s   @r   r   r      r   r   r   )rk   rm   r}   r   r   )Nr   )/rN   typingr   r(   torch.nnrJ   torch.utils.checkpointutilsr   gemma.modeling_gemmar   r   r	   granite.modeling_graniter
   r   r   llama.modeling_llamar   r   r   phi3.modeling_phi3r   r   r   configuration_glmr   
get_loggerr   logger_CHECKPOINT_FOR_DOCr   r    r"   r.   rA   rC   r[   r]   GLM_ATTENTION_CLASSESrb   rk   rm   r}   r   r   __all__r   r   r   <module>r      s%          
 
 
 
 ) 
		H	%& 	 		, 		W 	6'T4# 4	'= 		+ 	
 + `' `	- 	!: $% #A  ; r   