
    sg!                       d Z ddlZddlmZmZmZ ddlmZ ddl	Z	ddl
mZ ddlZddlmZmZmZ ddlmZmZ ddlmZ ddlmZ ddlmZmZ dd	lmZ d
dlmZmZm Z m!Z!m"Z" d
dl#m$Z$m%Z%m&Z&m'Z'm(Z( d
dl)m*Z*m+Z+m,Z,m-Z- ddl.m/Z/  e,j`                  e1      Z2dZ3dZ4ejj                  Z5dejl                  de7de7dejl                  fdZ8 G d dejr                        Z: G d dejr                        Z; G d dejr                        Z< G d dejr                        Z= G d d ejr                        Z> G d! d"ejr                        Z? G d# d$ejr                        Z@ G d% d&ejr                        ZA G d' d(ejr                        ZB G d) d*ejr                        ZC G d+ d,ejr                        ZDd-ZEd.ZFd/ZG G d0 d1e%      ZHd2ZI e*d3eI       G d4 d5ejr                               ZJ G d6 d7eH      ZK e&eKe3e"e4       d8ZL e(eKeGeLz           e'eKe!e49        e*d:eI       G d; d<ejr                               ZM G d= d>eH      ZN e*d?eI       G d@ dAejr                               ZO G dB dCeH      ZPdDZQ e(ePeGeQz           e'ePe!e49       y)EzFlax T5 model.    N)CallableOptionalTuple)
FrozenDictfreezeunfreeze)combine_masksmake_causal_mask)partitioning)dot_product_attention_weights)flatten_dictunflatten_dict)PRNGKey   )FlaxBaseModelOutput-FlaxBaseModelOutputWithPastAndCrossAttentions%FlaxCausalLMOutputWithCrossAttentionsFlaxSeq2SeqLMOutputFlaxSeq2SeqModelOutput)ACT2FNFlaxPreTrainedModelappend_call_sample_docstring append_replace_return_docstringsoverwrite_call_docstring)add_start_docstrings%add_start_docstrings_to_model_forwardloggingreplace_return_docstrings   )T5Configzgoogle-t5/t5-smallr    	input_idspad_token_iddecoder_start_token_idreturnc                    t        j                  |       }|j                  ddddf   j                  | ddddf         }|j                  dddf   j                  |      }t        j                  |dk(  ||      }|S )z1
    Shift input ids one token to the right.
    Nr   r   i)jnp
zeros_likeatsetwhere)r!   r"   r#   shifted_input_idss       Z/var/www/html/venv/lib/python3.12/site-packages/transformers/models/t5/modeling_flax_t5.pyshift_tokens_rightr.   :   s     y1),,QU377	!SbS&8IJ),,QT2667MN		"3t";\K\]    c                       e Zd ZU eed<   ej                  Zej                  ed<   dZe	ed<   e
j                  j                  j                  Zedej"                  f   ed<   d Zd Zy	)
FlaxT5LayerNormhidden_sizedtypegư>eps.weight_initc                 ^    | j                  d| j                  | j                  f      | _        y )Nweight)paramr5   r2   r7   selfs    r-   setupzFlaxT5LayerNorm.setupL   s%    jj4+;+;d>N>N=PQr/   c                     t        j                  |j                  d      d      j                  dd      }|t        j                  || j
                  z         z  }| j                  |z  S )zc
        Construct a layernorm module in the T5 style; No bias and no subtraction of mean.
        f4   r&   T)axiskeepdims)r'   powerastypemeansqrtr4   r7   )r:   hidden_statesvariances      r-   __call__zFlaxT5LayerNorm.__call__O   s[    
 99]11$7;@@bSW@X%DHH1D(EE{{]**r/   N)__name__
__module____qualname__int__annotations__r'   float32r3   r4   floatjaxnninitializersonesr5   r   npndarrayr;   rG    r/   r-   r1   r1   F   sV    {{E399"C-0VV-@-@-E-EK#rzz/*ER+r/   r1   c                   \    e Zd ZU eed<   ej                  Zej                  ed<   d ZddZ	y)FlaxT5DenseActDenseconfigr3   c                    | j                   j                  | j                   j                  dz  z  }| j                   j                  | j                   j                  dz  z  }t	        j
                  | j                   j                  dt        j                  j                  j                  |      | j                        | _
        t	        j
                  | j                   j                  dt        j                  j                  j                  |      | j                        | _        t	        j                  | j                   j                        | _        t        | j                   j                      | _        y N      Fuse_biaskernel_initr3   )rX   initializer_factord_modeld_ffrP   DenserO   rQ   normalr3   wiwoDropoutdropout_ratedropoutr   dense_act_fnactr:   wi_init_stdwo_init_stds      r-   r;   zFlaxT5DenseActDense.setup^   s    kk448K8KT8QRkk448H8H$8NO((KK++22;?**	
 ((KK++22;?**	
 zz$++":":;$++223r/   c                     | j                  |      }| j                  |      }| j                  ||      }| j                  |      }|S Ndeterministic)rd   rj   rh   re   )r:   rE   rq   s      r-   rG   zFlaxT5DenseActDense.__call__q   sD    ./]-P.r/   NT
rH   rI   rJ   r    rL   r'   rM   r3   r;   rG   rU   r/   r-   rW   rW   Z   s$    {{E399"4&r/   rW   c                   Z    e Zd ZU eed<   ej                  Zej                  ed<   d Zd Z	y)FlaxT5DenseGatedActDenserX   r3   c                    | j                   j                  | j                   j                  dz  z  }| j                   j                  | j                   j                  dz  z  }t	        j
                  | j                   j                  dt        j                  j                  j                  |      | j                        | _
        t	        j
                  | j                   j                  dt        j                  j                  j                  |      | j                        | _        t	        j
                  | j                   j                  dt        j                  j                  j                  |      | j                        | _        t	        j                  | j                   j                        | _        t         | j                   j"                     | _        y rZ   )rX   r_   r`   ra   rP   rb   rO   rQ   rc   r3   wi_0wi_1re   rf   rg   rh   r   ri   rj   rk   s      r-   r;   zFlaxT5DenseGatedActDense.setup}   s;   kk448K8KT8QRkk448H8H$8NOHHKK++22;?**	
	 HHKK++22;?**	
	 ((KK++22;?**	
 zz$++":":;$++223r/   c                     | j                  | j                  |            }| j                  |      }||z  }| j                  ||      }| j	                  |      }|S ro   )rj   rw   rx   rh   re   )r:   rE   rq   hidden_geluhidden_linears        r-   rG   z!FlaxT5DenseGatedActDense.__call__   sW    hhtyy78		-0#m3]-P.r/   Nrs   rU   r/   r-   ru   ru   y   s$    {{E399"42r/   ru   c                   \    e Zd ZU eed<   ej                  Zej                  ed<   d ZddZ	y)FlaxT5LayerFFrX   r3   c                    | j                   j                  r't        | j                   | j                        | _        n&t        | j                   | j                        | _        t        | j                   j                  | j                   j                  | j                        | _	        t        j                  | j                   j                        | _        y )Nr3   r4   r3   )rX   is_gated_actru   r3   DenseReluDenserW   r1   r`   layer_norm_epsilon
layer_normrP   rf   rg   rh   r9   s    r-   r;   zFlaxT5LayerFF.setup   s    ;;##":4;;djj"YD"5dkk"TD)$++*=*=4;;CaCaimisistzz$++":":;r/   c                 z    | j                  |      }| j                  ||      }|| j                  ||      z   }|S ro   )r   r   rh   )r:   rE   rq   forwarded_statess       r-   rG   zFlaxT5LayerFF.__call__   sG    ??=9../?}.]%5EUb(ccr/   Nrr   rs   rU   r/   r-   r}   r}      s$    {{E399"<r/   r}   c                       e Zd ZU eed<   dZeed<   dZeed<   ej                  Z
ej                  ed<   d Zedd       Zd Zd	 Zd
 Zej$                  d        Zd Z	 	 	 	 	 	 	 ddZy)FlaxT5AttentionrX   Fhas_relative_attention_biascausalr3   c                 *   | j                   j                  | _        | j                   j                  | _        | j                   j                  | _        | j                   j                  | _        | j                   j                  | _        | j                   j                  | _	        | j                  | j
                  z  | _
        | j                   j                  | j                  | j
                  z  dz  z  }| j                   j                  | j                  dz  z  }| j                   j                  | j                  dz  z  }t        j                  | j                  dt        j                  j                  j!                  |      | j"                        | _        t        j                  | j                  dt        j                  j                  j!                  |      | j"                        | _        t        j                  | j                  dt        j                  j                  j!                  |      | j"                        | _        t        j                  | j                  dt        j                  j                  j!                  |      | j"                        | _        | j,                  rdt        j.                  | j                  | j                  t        j                  j                  j!                  |      | j"                        | _        y y )Nr[   Fr\   embedding_initr3   )rX   relative_attention_num_bucketsrelative_attention_max_distancer`   d_kvkey_value_proj_dim	num_headsn_headsrg   rh   	inner_dimr_   rP   rb   rO   rQ   rc   r3   qkvor   Embedrelative_attention_bias)r:   
q_init_stdkv_init_std
o_init_stds       r-   r;   zFlaxT5Attention.setup   s   .2kk.X.X+/3{{/Z/Z,{{**"&++"2"2{{,,{{//(?(??[[33I`I`8`ei7ij
kk448LM[[33t~~t7KL
NN++22:>**	
 NN++22;?**	
 NN++22;?**	
 LL++22:>**	
 +++-8833"vv2299+Fjj	,D( ,r/   c                    d}|r&|dz  }|| dkD  |z  z  }t        j                  |       } nt        j                  | d       } |dz  }| |k  }|t        j                  | |z        t        j                  ||z        z  ||z
  z  z   }t        j                  ||dz
        }|t        j                  || |      z  }|j                  d      S )av  
        Adapted from Mesh Tensorflow:
        https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593

        Translate relative position to a bucket number for relative attention. The relative position is defined as
        memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to
        position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for
        small absolute relative_position and larger buckets for larger absolute relative_positions. All relative
        positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket.
        This should allow for more graceful generalization to longer sequences than the model has been trained on
        r   r>   )a_maxr   i4)r'   abscliplogr+   rB   )relative_positionbidirectionalnum_bucketsmax_distancerelative_buckets	max_exactis_smallrelative_position_if_larges           r-   _relative_position_bucketz)FlaxT5Attention._relative_position_bucket   s     AK!2Q!6+ EE #(9 :!$*;1!E E  1$	$y0 &/GG%	12SWW\I=U5VVZehqZqr&
" &)XX.HP[^_P_%`"CIIh0AC]^^&&t,,r/   c                 N   t        j                  |d      dddf   }t        j                  |d      dddf   }||z
  }| j                  || j                   | j                  | j
                        }| j                  |      }|j                  d      dddddddf   }|S )z%Compute binned relative position biasr   r   N)r   r   r   )r>   r   r   )r'   aranger   r   r   r   r   	transpose)r:   query_length
key_lengthcontext_positionmemory_positionr   relative_position_bucketvaluess           r-   compute_biaszFlaxT5Attention.compute_bias  s    ::l$?4H**Zt<T1WE+.>>#'#A#A#{{?;;==	 $B $
  --.FG!!),T1a];r/   c                 p    |j                  |j                  d d | j                  | j                  fz         S Nr>   )reshapeshaper   r   r:   rE   s     r-   _split_headszFlaxT5Attention._split_heads  s4    $$]%8%8!%<dNeNe?f%fggr/   c                 Z    |j                  |j                  d d | j                  fz         S r   )r   r   r   r   s     r-   _merge_headszFlaxT5Attention._merge_heads  s,    $$]%8%8!%<?P%PQQr/   c                 R   | j                  dd      }| j                  ddt        j                  |j                  |j
                        }| j                  ddt        j                  |j                  |j
                        }| j                  ddd       }|r|j                  j                  ^ }	}
}}|j                  }dt        |	      z  |ddfz   }t        j                  j                  |j                  ||      }t        j                  j                  |j                  ||      }||_        ||_        |j                  d   }|j                  |z   |_        t        j                  t        j                  |
      ||z   k  t        |	      d||
fz         }t        ||      }|||fS )	a[  
        This function takes projected key, value states from a single input token and concatenates the states to cached
        states from previous steps. This function is slighly adapted from the official Flax repository:
        https://github.com/google/flax/blob/491ce18759622506588784b4fca0e4bf05f8c8cd/flax/linen/attention.py#L252
        cache
cached_keycached_valuecache_indexc                  L    t        j                  dt         j                        S )Nr   r   )r'   arrayint32rU   r/   r-   <lambda>z7FlaxT5Attention._concatenate_to_cache.<locals>.<lambda>-  s    CIIaWZW`W`Da r/   )r   r   r   )has_variablevariabler'   zerosr   r3   valuelenrO   laxdynamic_update_slicebroadcast_tor   tupler	   )r:   keyr   queryattention_maskis_initializedr   r   r   
batch_dims
max_lengthr   depth_per_head	cur_indexindicesnum_updated_cache_vectorspad_masks                    r-   _concatenate_to_cachez%FlaxT5Attention._concatenate_to_cache"  s|    **7LA]]7L#))SYYPSPYPYZ
}}WnciiV[VaVabmmG]<abAKAQAQAWAW>ZY#))IS_,	1a/@@G''..z/?/?gNCGG001C1CUGTE"J!&L(-A% + 1 14M MK ''

:&5N)NNj!Q(A:$NNH +8^DNE>))r/   c                    | j                   xr | j                  dd      xr | }|j                  d   }|r|n|j                  d   }	| j                  r| j	                  |	|      }
nG|t        j                  |      }
n/t        j                  d| j                  |	|f| j                        }
|rR| j                  d   d   j                  d   }t        j                  j                  |
dd|dfd| j                  ||f      }
|
S )Nr   r   r   r   r   )r   r   r   r   r   r'   r(   r   r   r3   	variablesrO   r   dynamic_slice)r:   
key_statesquery_statesr   
init_cache
seq_lengthcausal_attention_mask_shiftcache_is_filledr   r   position_biasmax_decoder_lengths               r-   _create_position_biasz%FlaxT5Attention._create_position_biasC  s     ++g$*;*;G\*Rg\fXf%%a(
%4z,:L:LQ:O++ --lJGM'NN>:MIIq$,,j&QY]YcYcdM !%!8!F!L!LQ!OGG11A2A6DLL*.@AM
 r/   Nc	           
      "   |j                   dd \  }	}
| j                  |      }|| j                  |      n| j                  |      }|| j                  |      n| j                  |      }| j	                  |      }| j	                  |      }| j	                  |      }|t        j                  |j                   d         z  }| j                  dd      r| j                  r| j                  d   d   nd}| j                  rt        |d	      }| j                  dd      rH| j                  d   d   j                   d
   }t        j                  j                  |dd|dfd
d
|
|f      }t        j                  ||	f|j                   d
d z         }t        j                  t        j                  |d      |j                         }t!        ||      }n|t        j                  |d      }| j                  r,| j                  dd      s|r| j#                  ||||      \  }}}|t        j$                  | j&                        j(                  }t        j                  j+                  |dkD  t        j,                  |j                   d      j/                  | j&                        t        j,                  |j                   |      j/                  | j&                              }|| j1                  |||||
|      }|||z   }d}|s | j2                  dkD  r| j5                  d      }t7        ||||| j2                  d|| j&                        }t        j8                  d||      }| j;                  |      }| j=                  |      }||f}|r||fz   }|S )z
        Self-attention (if key_value_states is None) or attention over source sentence (provided by key_value_states).
        Nr>   r&   r   r   r   r   boolr   r   ))r?   g        rh   T)biasdropout_rngrg   broadcast_dropoutrq   r3   z...hqk,...khd->...qhd)r   r   r   r   r   r'   rD   r   r   r   r
   rO   r   r   r   expand_dimsr	   r   finfor3   minselectfullrB   r   rh   make_rngr   einsumr   r   )r:   rE   r   key_value_statesr   	use_cacheoutput_attentionsrq   r   
batch_sizer   r   r   value_statesr   causal_attention_maskr   
mask_valuer   attn_weightsattn_outputoutputss                         r-   rG   zFlaxT5Attention.__call__[  s|    "/!4!4Ra!8
J vvm,.>.FTVVM*DFFScLd
0@0Htvvm,dffUeNf ((6&&z2
((6 	!3!3B!788 8<7H7HR^7_dhdodoDNN7#M2vw 	$ ;;$4^6$R!   ,7%)^^G%<\%J%P%PQR%S"(+(=(=)6::'9:)% %($4$4%
}7L7R7RSTSU7V'V%! !--X>@U@[@[N +>;PQN' __^(KN ;;D--g|D
7;7Q7QL,84Jn
 %4::.22J WW^^"--s3::4::F--z:AA$**MN   66L.*jRmM ) - > !3--	2K 5#"'**	
 jj!8,U ''4 ff[)./Gr/   )T       )NNNFFTF)rH   rI   rJ   r    rL   r   r   r   r'   rM   r3   r;   staticmethodr   r   r   r   rP   compactr   r   rG   rU   r/   r-   r   r      s    (--FD{{E399",\ !- !-F"hR ZZ* *@6 qr/   r   c                   t    e Zd ZU eed<   dZeed<   ej                  Z	ej                  ed<   d Z
	 	 	 	 	 ddZy)	FlaxT5LayerSelfAttentionrX   Fr   r3   c                 v   t        | j                  | j                  | j                  j                  | j                        | _        t        | j                  j                  | j                  j                  | j                        | _	        t        j                  | j                  j                        | _        y )Nr   r   r3   r   )r   rX   r   r   r3   SelfAttentionr1   r`   r   r   rP   rf   rg   rh   r9   s    r-   r;   zFlaxT5LayerSelfAttention.setup  sz    ,KK(,(H(H;;%%**	
 *$++*=*=4;;CaCaimisistzz$++":":;r/   Nc                     | j                  |      }| j                  ||||||      }|| j                  |d   |      z   }|f|dd  z   }	|	S )Nr   r   r   rq   r   r   rp   r   )r   r	  rh   )
r:   rE   r   r   r   rq   r   normed_hidden_statesattention_outputr   s
             r-   rG   z!FlaxT5LayerSelfAttention.__call__  su      $}=-- )'/'! . 
 &5Ea5HXe(ff "%5ab%99r/   )NNFTFrH   rI   rJ   r    rL   r   r   r'   rM   r3   r;   rG   rU   r/   r-   r  r    s@    (--{{E399"< r/   r  c                   d    e Zd ZU eed<   ej                  Zej                  ed<   d Z	 	 	 	 ddZ	y)FlaxT5LayerCrossAttentionrX   r3   c                 :   t        | j                  dd| j                        | _        t	        | j                  j
                  | j                  j                  | j                        | _        t        j                  | j                  j                        | _        y )NFr  r   )r   rX   r3   EncDecAttentionr1   r`   r   r   rP   rf   rg   rh   r9   s    r-   r;   zFlaxT5LayerCrossAttention.setup  sj    .KKU5PTPZPZ 
 *$++*=*=4;;CaCaimisistzz$++":":;r/   Nc                     | j                  |      }| j                  |||||      }|| j                  |d   |      z   }|f|dd  z   }	|	S )N)r   r   r   r   r   rp   r   )r   r  rh   )
r:   rE   r   r   r   r   rq   r  r  r   s
             r-   rG   z"FlaxT5LayerCrossAttention.__call__   sr      $}=// )-'/ 0 
 &5Ea5HXe(ff "%5ab%99r/   )NNFTrs   rU   r/   r-   r  r    s2    {{E399"< r/   r  c                   |    e Zd ZU eed<   dZeed<   ej                  Z	ej                  ed<   d Z
	 	 	 	 	 	 	 	 	 ddZy)	FlaxT5BlockrX   Fr   r3   c                    | j                   j                  | _        t        | j                   | j                  t	        d      | j
                        f| _        d}| j                  rD| xj                  t        | j                   t	        d      | j
                        fz  c_        |dz  }| xj                  t        | j                   t	        |      | j
                        fz  c_        y )Nr   )r   namer3   r   )r  r3   )	rX   r   r  r   strr3   layerr  r}   )r:   feed_forward_indexs     r-   r;   zFlaxT5Block.setup  s    kk(($,0,L,LVjj	

 ;;JJ4T[[s1vUYU_U_`bbJ!#

}T[[s;M7NVZV`V`acc
r/   Nc                     | j                   d   |||||	|
      }|d   }|dd  }| j                  xr |d u}|r( | j                   d   ||||||	      }|d   }||dd  z   } | j                   d   ||	      }|f}||z   }|S )Nr   r  r   )r   r   r   r   rq   r&   rp   )r  r   )r:   rE   r   r   encoder_hidden_statesencoder_attention_maskencoder_decoder_position_biasr   return_dictrq   r   self_attention_outputsattention_outputsdo_cross_attentioncross_attention_outputsr   s                   r-   rG   zFlaxT5Block.__call__,  s     "/A)'/'!"
 /q12126![[N-B$-N&3djjm!65;"3+'# 4A6M !24KAB4O O '

2}MR "-- r/   )	NNNNNFTTFr  rU   r/   r-   r  r    sM    (--{{E399"d( "#&*0r/   r  c                   v    e Zd ZU eed<   eed<   ej                  Zej                  ed<   d Z		 	 	 	 	 	 	 	 ddZ
y)FlaxT5LayerCollectionrX   r   r3   c                 f    t        | j                  | j                  | j                        | _        y )N)r   r3   )r  rX   r   r3   r  r9   s    r-   r;   zFlaxT5LayerCollection.setupd  s&     KKT5U5U]a]g]g

r/   Nc
                 6    | j                  |||||||||		      S )N)r   r   r  r  r  r   rq   r   )r  )
r:   rE   r   r   r  r  r  r   rq   r   s
             r-   rG   zFlaxT5LayerCollection.__call__i  s5     zz)'"7#9*G/'!  

 
	
r/   )NNNNNFTF)rH   rI   rJ   r    rL   r   r'   rM   r3   r;   rG   rU   r/   r-   r%  r%  _  sD    !%%{{E399"
 "#&*
r/   r%  c            	           e Zd ZU eed<   ej                  Zej                  ed<   dZe	ed<   d Z
	 	 	 	 	 	 	 	 dde	de	d	e	d
e	fdZy)FlaxT5BlockCollectionrX   r3   Fgradient_checkpointingc                     | j                   j                  | _        | j                  rnt        t        d      }t        | j                   j                        D cg c].  } || j                   |dk(  | j                  t        |            0 c}| _	        y t        | j                   j                        D cg c]1  }t	        | j                   |dk(  | j                  t        |            3 c}| _	        y c c}w c c}w )N)         )static_argnumsr   )r   r3   r  )
rX   r   r*  rematr%  range
num_layersr3   r  blocks)r:   FlaxT5CheckpointLayeris      r-   r;   zFlaxT5BlockCollection.setup  s    kk((&&$)*?PY$Z! t{{556  &KK12a**Q	DK" t{{556  &KK12a**Q	DKs   3C676C;Nr   output_hidden_statesrq   r   c	                 T   |rdnd }	|rdnd }
|r| j                   rdnd }d }d }t        | j                        D ]`  \  }}|r|	|fz   }	 ||||||||||	      }|d   }|d   }| j                   r|	||rdnd   }|sB|
|d   fz   }
| j                   sX||d   fz   }b t        ||	|
|      S )NrU   r   r   r   r>      last_hidden_staterE   
attentionscross_attentions)r   	enumerater3  r   )r:   rE   r   r  r  r   r6  rq   r   all_hidden_statesall_attentionsall_cross_attentionsr   r  r5  layer_modulelayer_outputss                    r-   rG   zFlaxT5BlockCollection.__call__  s    #7BD0d&7DKKrd(,%(5 	VOA|#$58H$H!(%&-!
M *!,M
 *!,M{{4@0=CTaZ[0\- !/=3C2E!E;;+?=QRCSBU+U(;	V> =++%1	
 	
r/   )NNNNFFTFrH   rI   rJ   r    rL   r'   rM   r3   r*  r   r;   rG   rU   r/   r-   r)  r)    sq    {{E399"#(D(4 "#"'%*" 6
  6
 #6
 6
 6
r/   r)  c                       e Zd ZU eed<   ej                  ed<   ej                  Z	ej                  ed<   dZ
eed<   d Z	 	 	 	 	 	 	 	 	 dded	ed
ededef
dZy)FlaxT5StackrX   embed_tokensr3   Fr*  c                    | j                   j                  | _        t        | j                   | j                  | j                        | _        t        | j                   j                  | j                   j                  | j                        | _	        t        j                  | j                   j                        | _        y )Nr3   r*  r   )rX   r   r)  r3   r*  blockr1   r`   r   final_layer_normrP   rf   rg   rh   r9   s    r-   r;   zFlaxT5Stack.setup  s~    kk((*KKtzz$B]B]

 !0KKT[[%C%C4::!
 zz$++":":;r/   Nr   r6  r  rq   r   c
           
      n   | j                  |      }
| j                  |
|      }
| j                  |
|||||||	      }|d   }
| j                  |
      }
| j                  |
|      }
d }|r|j                  }||
fz   }|s|r
|
|f|dd  z   S |
f|dd  z   S t        |
||j                  |j                        S )Nrp   )r   r  r  r   r6  rq   r   r   r>   r   r9  )rF  rh   rI  rJ  rE   r   r;  r<  )r:   r!   r   r  r  r   r6  r  rq   r   rE   r   r>  s                r-   rG   zFlaxT5Stack.__call__  s    )))4]-P**)"7#9/!5'!  	
  
--m<]-P ! ' 5 5 1]4D D#!% ABK    "#gabk11<++))$55	
 	
r/   )	NNNNFFTTF)rH   rI   rJ   r    rL   rP   r   r'   rM   r3   r*  r   r;   rG   rU   r/   r-   rE  rE    s    (({{E399"#(D(	< "#"'%* " 3
  3
 #3
 3
 3
 3
r/   rE  a  
    Args:
        input_ids (`jnp.ndarray` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. T5 is a model with relative position embeddings so you
            should be able to pad the inputs on both the right and the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for detail.

            To know more on how to prepare `input_ids` for pretraining take a look a [T5 Training](./t5#training).
        attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
a1
  
    Args:
        decoder_input_ids (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`):
            Indices of decoder input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are decoder input IDs?](../glossary#decoder-input-ids)

            For training, `decoder_input_ids` should be provided.
        encoder_outputs (`tuple(tuple(jnp.ndarray)`):
            Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`)
            `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of
            hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
        encoder_attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        decoder_attention_mask (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`, *optional*):
            Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
            be used by default.

            If you want to change padding behavior, you should modify to your needs. See diagram 1 in [the
            paper](https://arxiv.org/abs/1910.13461) for more information on the default strategy.
        past_key_values (`Dict[str, np.ndarray]`, *optional*, returned by `init_cache` or when passing previous `past_key_values`):
            Dictionary of pre-computed hidden-states (key and values in the attention blocks) that can be used for fast
            auto-regressive decoding. Pre-computed key and value hidden-states are of shape *[batch_size, max_length]*.
        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
a  
    Args:
        input_ids (`jnp.ndarray` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. T5 is a model with relative position embeddings so you
            should be able to pad the inputs on both the right and the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for detail.

            [What are input IDs?](../glossary#input-ids)

            To know more on how to prepare `input_ids` for pretraining take a look a [T5 Training](./t5#training).
        attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        decoder_input_ids (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`, *optional*):
            Indices of decoder input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are decoder input IDs?](../glossary#decoder-input-ids)

            T5 uses the `pad_token_id` as the starting token for `decoder_input_ids` generation. If `past_key_values`
            is used, optionally only the last `decoder_input_ids` have to be input (see `past_key_values`).

            To know more on how to prepare `decoder_input_ids` for pretraining take a look at [T5
            Training](./t5#training).
        decoder_attention_mask (`jnp.ndarray` of shape `(batch_size, target_sequence_length)`, *optional*):
            Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
            be used by default.
        encoder_outputs (`tuple(tuple(jnp.ndarray)`, *optional*):
            Tuple consists of (`last_hidden_state`, `optional`: *hidden_states*, `optional`: *attentions*)
            `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)` is a sequence of hidden states at
            the output of the last layer of the encoder. Used in the cross-attention of the decoder.
        past_key_values (`tuple(tuple(jnp.ndarray))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
            Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.

            If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that
            don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all
            `decoder_input_ids` of shape `(batch_size, sequence_length)`.


        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
c                       e Zd ZU dZeZdZdZej                  e
d<   ddej                  ddfd	ed
ee   dedej                  dedef fdZd Zd%dej(                  j*                  d
ededefdZ ee      	 	 	 	 	 	 	 	 	 d&dej4                  deej4                     dej4                  deej4                     dee   dee   dee   dededefd       Zd Z ee        e!e"e       	 	 	 	 	 	 	 d'dej4                  deej4                     dee   dee   dee   dededefd!              Z# ee$       e!e%e       	 	 	 	 	 	 	 	 	 d&d"eej4                     deej4                     d#edee   dee   dee   dededefd$              Z& xZ'S )(FlaxT5PreTrainedModelz
    An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
    models.
    transformerNmodule_class)r   r   r   TFrX   input_shapeseedr3   _do_initr*  c                 \     | j                   d|||d|}t        	| 	  ||||||       y )NrX   r3   r*  )rP  rQ  r3   rR  rU   )rO  super__init__)
r:   rX   rP  rQ  r3   rR  r*  kwargsmodule	__class__s
            r-   rV  zFlaxT5PreTrainedModel.__init__  sA     #""w&Vlwpvw[tSXcklr/   c                 ^    | j                  | j                  | j                  d      | _        y )NTrT  )rO  rX   r3   _moduler9   s    r-   enable_gradient_checkpointingz3FlaxT5PreTrainedModel.enable_gradient_checkpointing  s*    ((;;**#' ) 
r/   rngparamsr$   c                 `   t        j                  |d      }t        j                  |      }||g}| j                  t        fvr=t        j                  |      }t        j                  |      }|j                  ||g       t        j                  j                  |      \  }	}
|	|
d} | j                  j                  |g| d   }|dt        t        |            }t        t        |            }| j                  D ]
  }||   ||<    t               | _        t        t!        |            S |S )Nr   r   )r^  rh   r^  )r'   r   	ones_likerO  FlaxT5EncoderModuleextendrO   randomsplitrX  initr   r   _missing_keysr*   r   r   )r:   r]  rP  r^  r!   r   argsdecoder_input_idsdecoder_attention_mask
params_rngr   rngsrandom_paramsmissing_keys                 r-   init_weightsz"FlaxT5PreTrainedModel.init_weights  s'   IIk6	y1>*%8$99 #i 8%(]]9%="KK*,BCD"%**"2"23"7
K$=(((

 
 (-)@AM!(6"23F#11 A&3K&@{#A!$D.011  r/   r!   r   rh  ri  r   r6  r  trainr   c                 8   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|t	        d      |t        j                  |      }|t        j                  |      }|
d|
ini }| j                  j                  d|	xs | j                  it        j                  |d      t        j                  |d      t        j                  |d      t        j                  |d      |||| |
      S )NzfMake sure to provide both `input_ids` and `decoder_input_ids`. `decoder_input_ids` is not passed here.rh   r^  r   r   )	r!   r   rh  ri  r   r6  r  rq   rk  )rX   r   r6  r  
ValueErrorr'   r`  rX  applyr^  r   )r:   r!   r   rh  ri  r   r6  r  ro  r^  r   rk  s               r-   rG   zFlaxT5PreTrainedModel.__call__  s#    2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++BYBY$  ! ]]95N ")%(]]3D%E" ,7+B	;'{{  v,-ii	699^4@!ii(9F#&99-C4#P/!5##) ! 
 	
r/   c                    t        j                  ||fd      }t        j                  |      }d }| j                  j	                  t
        j                  j                  d      |||d   d|      }t        |d         S )a+  
        Args:
            batch_size (`int`):
                batch_size used for fast auto-regressive decoding. Defines the batch size of the initialized cache.
            max_length (`int`):
                maximum possible length for auto-regressive decoding. Defines the sequence length of the initialized
                cache.
            encoder_outputs (`Union[FlaxBaseModelOutput, tuple(tuple(jnp.ndarray)]`):
                `encoder_outputs` consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*:
                `attentions`). `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*)
                is a sequence of hidden-states at the output of the last layer of the encoder. Used in the
                cross-attention of the decoder.
        r   r   c                 6    | j                         } |||fi |S N_get_decoder_modulerX  rh  ri  rW  decoder_modules        r-   _decoder_forwardz:FlaxT5PreTrainedModel.init_cache.<locals>._decoder_forward  -    #779N!!&  r/   r   T)rh  ri  r  r   methodr   )	r'   rR   r`  rX  re  rO   rc  r   r   )r:   r   r   encoder_outputsrh  ri  rz  init_variabless           r-   r   z FlaxT5PreTrainedModel.init_cache  s      HHj*%=TJ!$/@!A	 ))JJq!/#9"1!"4# * 
 w/00r/   output_typeconfig_classc	                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|t	        j
                  |      }i }	|||	d<   d }
| j                  j                  d|xs | j                  it	        j                  |d      t	        j                  |d      |||| |	|
	      S )a  
        Returns:

        Example:

        ```python
        >>> from transformers import AutoTokenizer, FlaxT5ForConditionalGeneration

        >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
        >>> model = FlaxT5ForConditionalGeneration.from_pretrained("google-t5/t5-small")

        >>> text = "My friends are cool but they eat too many carbs."
        >>> inputs = tokenizer(text, return_tensors="np")
        >>> encoder_outputs = model.encode(**inputs)
        ```rh   c                 6    | j                         } |||fi |S ru  )_get_encoder_module)rX  r!   r   rW  encode_modules        r-   _encoder_forwardz6FlaxT5PreTrainedModel.encode.<locals>._encoder_forwardY  s"    "668M NEfEEr/   r^  r   r   )r!   r   r   r6  r  rq   rk  r|  
rX   r   r6  r  r'   r`  rX  rr  r^  r   )r:   r!   r   r   r6  r  ro  r^  r   rk  r  s              r-   encodezFlaxT5PreTrainedModel.encode/  s    8 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++BYBY! ]]95N ")DO	F {{  v,-ii	699^4@/!5##)# ! 

 
	
r/   r  past_key_valuesc                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|d   }|)|j                  dd \  }}t        j                  ||f      }|j                  \  }}|t        j                  ||f      }i }|||d<   d|
xs | j                  i}|r	||d<   dg}nd}d }| j                  j                  |t        j                  |d	
      t        j                  |d	
      |t        j                  |d	
      ||||	 |||      }||r|\  }}t        |d         |d<   |S |"|s |\  }}|dd t        |d         fz   |dd z   }|S )a9  
        Returns:

        Example:

        ```python
        >>> from transformers import AutoTokenizer, FlaxT5ForConditionalGeneration
        >>> import jax.numpy as jnp

        >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
        >>> model = FlaxT5ForConditionalGeneration.from_pretrained("google-t5/t5-small")

        >>> text = "My friends are cool but they eat too many carbs."
        >>> inputs = tokenizer(text, return_tensors="np")
        >>> encoder_outputs = model.encode(**inputs)

        >>> decoder_start_token_id = model.config.decoder_start_token_id
        >>> decoder_input_ids = jnp.ones((inputs.input_ids.shape[0], 1), dtype="i4") * decoder_start_token_id

        >>> outputs = model.decode(decoder_input_ids, encoder_outputs)
        >>> logits = outputs.logits
        ```Nr   r>   rh   r^  r   Fc                 6    | j                         } |||fi |S ru  rv  rx  s        r-   rz  z6FlaxT5PreTrainedModel.decode.<locals>._decoder_forward  r{  r/   r   r   rh  ri  r  r  r   r6  r  rq   rk  mutabler|  r  r   )rX   r   r6  r  r   r'   rR   r^  rX  rr  r   r   )r:   rh  r}  r  ri  r  r   r6  r  ro  r^  r   r  r   sequence_lengthrk  inputsr  rz  r   pasts                        r-   decodezFlaxT5PreTrainedModel.decodei  s   L 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++BYBY / 2!)*?*E*Ebq*I'J%(XXz?.K%L"&7&=&=#
O!)%(XXz?.K%L" ")DOF1dkk2
 -F7OiGG	 ++##!ii(9F#&99-C4#P"7#&99-C4#P/!5##)# $ 
  &;#MGT)1$w-)@G%&N(#MGTbqkXd7m%<$>>LGr/   ru  	NNNNNNFNNNNNNFNN)(rH   rI   rJ   __doc__r    r  base_model_prefixrO  rP   ModulerL   r'   rM   r   rK   r3   r   rV  r\  rO   rc  r   r   rn  r   T5_INPUTS_DOCSTRINGrT   r   dictrG   r   r   T5_ENCODE_INPUTS_DOCSTRINGr   r   r  T5_DECODE_INPUTS_DOCSTRINGr   r  __classcell__)rY  s   @r-   rM  rM    s   
 L%"L"))"
 #);;',mm 3Zm 	m
 yym m !%m
!

 2 2 ! !PZ !fp !: ++>? 15)-8<,0/3&*#/
;;/
 !-/
 ;;	/

 !) 5/
 $D>/
 'tn/
 d^/
 /
 /
 /
 @/
b"1H 45+>XV 15,0/3&*#6
;;6
 !-6
 $D>	6

 'tn6
 d^6
 6
 6
 6
 W 66
p 45+Xgop
 9=8< $,0/3&*#c !) 5	c
 !) 5c c $D>c 'tnc d^c c c c q 6cr/   rM  a	  
    The T5 model was proposed in [Exploring the Limits of Transfer Learning with a Unified Text-to-Text
    Transformer](https://arxiv.org/abs/1910.10683) by Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan
    Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu. It's an encoder decoder transformer pre-trained in a
    text-to-text denoising generative setting.

    This model inherits from [`FlaxPreTrainedModel`]. Check the superclass documentation for the generic methods the
    library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
    etc.)

    This model is also a Flax Linen
    [flax.nn.Module](https://flax.readthedocs.io/en/latest/_autosummary/flax.nn.module.html) subclass. Use it as a
    regular Flax Module and refer to the Flax documentation for all matter related to general usage and behavior.

    Finally, this model supports inherent JAX features such as:

    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)
    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)
    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)
    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)

    Parameters:
        config ([`T5Config`]): Model configuration class with all the parameters of the model.
            Initializing with a config file does not load the weights associated with the model, only the
            configuration. Check out the [`~FlaxPreTrainedModel.from_pretrained`] method to load the model weights.
        dtype (`jax.numpy.dtype`, *optional*, defaults to `jax.numpy.float32`):
            The data type of the computation. Can be one of `jax.numpy.float32`, `jax.numpy.float16` (on GPUs) and
            `jax.numpy.bfloat16` (on TPUs).

            This can be used to enable mixed-precision training or half-precision inference on GPUs or TPUs. If
            specified all the computation will be performed with the given `dtype`.

            **Note that this only specifies the dtype of the computation and does not influence the dtype of model
            parameters.**

            If you wish to change the dtype of the model parameters, see [`~FlaxPreTrainedModel.to_fp16`] and
            [`~FlaxPreTrainedModel.to_bf16`].
z[The bare T5 Model transformer outputting raw hidden-stateswithout any specific head on top.c                       e Zd ZU eed<   ej                  Zej                  ed<   dZe	ed<   d Z
d Zd Z	 	 	 	 	 	 	 	 	 dd	e	fd
Zy)FlaxT5ModulerX   r3   Fr*  c                     | j                   S ru  encoderr9   s    r-   r  z FlaxT5Module._get_encoder_module      ||r/   c                     | j                   S ru  decoderr9   s    r-   rw  z FlaxT5Module._get_decoder_module  r  r/   c                    t        j                  | j                  j                  | j                  j                  t
        j                   j                  j                  | j                  j                  dz        | j                        | _
        t        j                  | j                        }d|_        t        || j                  | j                  | j                        | _        t        j                  | j                        }d|_        | j                  j"                  |_        t        || j                  | j                  | j                        | _        y )N      ?r   FrF  r3   r*  T)rP   r   rX   
vocab_sizer`   rO   rQ   rc   r_   r3   sharedcopydeepcopyr   rE  r*  r  num_decoder_layersr2  r  r:   encoder_configdecoder_configs      r-   r;   zFlaxT5Module.setup  s    hhKK""KK66..55dkk6T6TWZ6Z[**	
 t{{3 %"**#'#>#>	
 t{{3 $$(KK$B$B!"**#'#>#>	
r/   Nrq   c
           
      p   ||n| j                   j                  }| j                  ||||||	      }| j                  |||d   |||||	      }
|s|
|z   S t	        |
j
                  |
j                  |
j                  |
j                  |
j                  |j
                  |j                  |j                        S )Nr!   r   r   r6  r  rq   r   r!   r   r  r  r   r6  r  rq   )r:  r  decoder_hidden_statesdecoder_attentionsr<  encoder_last_hidden_stater  encoder_attentions)
rX   use_return_dictr  r  r   r:  r  rE   r;  r<  )r:   r!   r   rh  ri  r}  r   r6  r  rq   decoder_outputss              r-   rG   zFlaxT5Module.__call__#  s     &1%<k$++B]B] ,,)/!5#' ' 
 ,,'1"1!"4#1/!5#' ' 	
 "_44%-??+;;"1"?"?.99,==&5&G&G"1"?"?.99	
 		
r/   	NNNNNNNNTrH   rI   rJ   r    rL   r'   rM   r3   r*  r   r  rw  r;   rG   rU   r/   r-   r  r    sb    
 {{E399"#(D(
: #!"0
 0
r/   r  c                       e Zd ZeZy)FlaxT5ModelN)rH   rI   rJ   r  rO  rU   r/   r-   r  r  V  s    Lr/   r  a  
    Returns:

    Example:

    ```python
    >>> from transformers import AutoTokenizer, FlaxT5Model

    >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
    >>> model = FlaxT5Model.from_pretrained("google-t5/t5-small")

    >>> input_ids = tokenizer(
    ...     "Studies have been shown that owning a dog is good for you", return_tensors="np"
    ... ).input_ids
    >>> decoder_input_ids = tokenizer("Studies show that", return_tensors="np").input_ids

    >>> # preprocess: Prepend decoder_input_ids with start token which is pad token for T5Model.
    >>> # This is not needed for torch's T5ForConditionalGeneration as it does this internally using labels arg.
    >>> decoder_input_ids = model._shift_right(decoder_input_ids)

    >>> # forward pass
    >>> outputs = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids)
    >>> last_hidden_states = outputs.last_hidden_state
    ```
r  zfThe bare T5 Model transformer outputting encoder's raw hidden-states without any specific head on top.c                       e Zd ZU eed<   ej                  Zej                  ed<   dZe	ed<   d Z
	 	 	 	 	 	 d
de	de	fd	Zy)ra  rX   r3   Fr*  c                    t        j                  | j                  j                  | j                  j                  t
        j                   j                  j                  | j                  j                  dz        | j                        | _
        t        j                  | j                        }d|_        d|_        d|_        t!        || j                  | j                  | j"                        | _        y )Nr  r   Fr  )rP   r   rX   r  r`   rO   rQ   rc   r_   r3   r  r  r  
is_decoderis_encoder_decoderr   rE  r*  r  )r:   r  s     r-   r;   zFlaxT5EncoderModule.setup  s    hhKK""KK66..55dkk6T6TWZ6Z[**	
 t{{3$)!,1) %"**#'#>#>	
r/   Nr  rq   c                 4    | j                  ||||||      }|S )Nr  r  )r:   r!   r   r   r6  r  rq   r}  s           r-   rG   zFlaxT5EncoderModule.__call__  s1     ,,)/!5#' ' 
 r/   )NNFFTTrC  rU   r/   r-   ra  ra  {  sY    
 {{E399"#(D(
* " "  r/   ra  c                       e Zd ZeZ ee      	 	 	 	 	 	 	 ddej                  de	ej                     de	e
   de	e
   de	e
   de
ded	efd
       Zy)FlaxT5EncoderModelNr!   r   r   r6  r  ro  r^  r   c	           
         ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|t	        j
                  |      }|d|ini }	| j                  j                  d|xs | j                  it	        j                  |d      t	        j                  |d      |||| |	      S )Nrh   r^  r   r   )r!   r   r   r6  r  rq   rk  r  )
r:   r!   r   r   r6  r  ro  r^  r   rk  s
             r-   rG   zFlaxT5EncoderModel.__call__  s     2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++BYBY ! ]]95N ,7+B	;'{{  v,-ii	699^4@/!5##) ! 	
 		
r/   r  )rH   rI   rJ   ra  rO  r   r  r'   rT   r   r   r  r   rG   rU   r/   r-   r  r    s    &L*+EF 15,0/3&*#!
;;!
 !-!
 $D>	!

 'tn!
 d^!
 !
 !
 !
 G!
r/   r  z0T5 Model with a `language modeling` head on top.c                       e Zd ZU eed<   ej                  Zej                  ed<   dZe	ed<   d Z
d Zd Z	 	 	 	 	 	 	 	 	 dd	e	fd
Zy)$FlaxT5ForConditionalGenerationModulerX   r3   Fr*  c                     | j                   S ru  r  r9   s    r-   r  z8FlaxT5ForConditionalGenerationModule._get_encoder_module  r  r/   c                     | j                   S ru  r  r9   s    r-   rw  z8FlaxT5ForConditionalGenerationModule._get_decoder_module  r  r/   c                    | j                   j                  | _        t        j                  | j                   j
                  | j                   j                  t        j                  j                  j                  | j                   j                        | j                        | _        t        j                  | j                         }d|_        d|_        d|_        t#        || j                  | j                  | j$                        | _        t        j                  | j                         }d|_        d|_        | j                   j(                  |_        t#        || j                  | j                  | j$                        | _        t        j.                  | j                   j
                  dt        j                  j                  j                  | j                   j                        | j                        | _        y )Nr   FrH  Tr\   )rX   r`   	model_dimrP   r   r  rO   rQ   rc   r_   r3   r  r  r  r   r   r  rE  r*  r  r  r2  r  rb   lm_headr  s      r-   r;   z*FlaxT5ForConditionalGenerationModule.setup  s[   ,,hhKK""KK66..55dkk6T6TU**	
 t{{3 %#( ,1)"DKKtzzRVRmRm
 t{{3 $,1)$(KK$B$B!"DKKtzzRVRmRm
 xxKK""++224;;3Q3QR**	
r/   Nrq   c
           
         ||n| j                   j                  }| j                  ||||||	      }|d   }
| j                  |||
|||||	      }|d   }| j                   j                  r|| j
                  dz  z  }| j                   j                  rG| j                  j                  d   d   }| j                  j                  dd|j                  ii|      }n| j                  |      }|s|f|dd  z   |z   S t        ||j                  |j                  |j                  |j                  |j                   |j                  |j                  	      S )
Nr  r   r  r[   r^  	embeddingkernelr   )logitsr  r  r  r<  r  r  r  )rX   r  r  r  tie_word_embeddingsr  r  r   r  rr  Tr   r  rE   r;  r<  r:  )r:   r!   r   rh  ri  r}  r   r6  r  rq   rE   r  sequence_outputshared_embedding	lm_logitss                  r-   rG   z-FlaxT5ForConditionalGenerationModule.__call__  sp    &1%<k$++B]B] ,,)/!5#' ' 
 (* ,,'1"/#1/!5#' ' 	
 *!,;;** .1EFO;;**#{{44X>{K**HxAQASAS6T+UWfgI_5I</!""55GG"+;;"1"?"?.99,==&5&G&G"1"?"?.99	
 		
r/   r  r  rU   r/   r-   r  r    sa    {{E399"#(D(
F #!"?
 ?
r/   r  c                      e Zd ZeZ ee       eee	      	 	 	 	 	 	 	 	 	 dde
ej                     de
ej                     dede
e   de
e   de
e   d	ed
edefd              Z	 	 	 dde
ej$                     de
ej$                     fdZd Zy)FlaxT5ForConditionalGenerationr  Nr  ri  r  r   r6  r  ro  r^  r   c                 z    ||n j                   j                  }||n j                   j                  }||n j                   j                  }|d   }|)|j                  dd \  }}t        j                  ||f      }|j                  \  }}|t        j                  ||f      }i }|||d<   d|
xs  j                  i}|r	||d<   dg}nd} fd} j                  j                  |t        j                  |d	
      t        j                  |d	
      |t        j                  |d	
      ||||	 |||      }||\  }}n|\  \  }}}|r.t        ||j                  |j                  |j                        }n	|f|dd z   }||rt        d         |d<   |S ||s|dd t        d         fz   |dd z   }|S )aD  
        Returns:

        Example:

        ```python
        >>> from transformers import AutoTokenizer, FlaxT5ForConditionalGeneration
        >>> import jax.numpy as jnp

        >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
        >>> model = FlaxT5ForConditionalGeneration.from_pretrained("google-t5/t5-small")

        >>> text = "summarize: My friends are cool but they eat too many carbs."
        >>> inputs = tokenizer(text, return_tensors="np")
        >>> encoder_outputs = model.encode(**inputs)

        >>> decoder_start_token_id = model.config.decoder_start_token_id
        >>> decoder_input_ids = jnp.ones((inputs.input_ids.shape[0], 1), dtype="i4") * decoder_start_token_id

        >>> outputs = model.decode(decoder_input_ids, encoder_outputs)
        >>> logits = outputs.logits
        ```Nr   r>   rh   r^  r   Fc                    | j                         } |||fi |}|d   }	j                  j                  r|	j                  j                  dz  z  }	j                  j                  rJ| j                  j
                  d   d   }| j                  j                  dd|j                  ii|      }||fS | j                  |      }||fS )Nr   r[   r^  r  r  )	rw  rX   r  r`   r  r   r  rr  r  )
rX  rh  ri  rW  ry  r  r  r  r  r:   s
            r-   rz  z?FlaxT5ForConditionalGeneration.decode.<locals>._decoder_forward  s    #779N,!& O .a0O{{.. #2T[[5H5H$5N"O{{..#)==#:#:8#D[#Q "NN00(XGWGYGY<Z1[]lm	 o-- #NN?;	o--r/   r   r   r  )r  rE   r;  r<  r   r  )rX   r   r6  r  r   r'   rR   r^  rX  rr  r   r   rE   r;  r<  r   )r:   rh  r}  r  ri  r  r   r6  r  ro  r^  r   r  r   r  rk  r  r  rz  r   r  r  r  s   `                      r-   r  z%FlaxT5ForConditionalGeneration.decodeG  s   L 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++BYBY / 2!)*?*E*Ebq*I'J%(XXz?.K%L"&7&=&=#
O!)%(XXz?.K%L" ")DOF1dkk2
 -F7OiGG	.. ++##!ii(9F#&99-C4#P"7#&99-C4#P/!5##)# $ 
 ")0&I18.(Y$; -;;*55!0!A!A	G !l_QR%88G &;)1$w-)@G%&N(bqkXd7m%<$>>LGr/   r   c                     |j                   \  }}| j                  |||      }	t        j                  ||fd      }
|!t        j
                  j                  |
|d      }
|	|||
dS )Nr   r   )r   r   )r  r}  r  ri  )r   r   r'   rR   rO   r   r   )r:   rh  r   r   ri  r}  rW  r   r   r  extended_attention_masks              r-   prepare_inputs_for_generationz<FlaxT5ForConditionalGeneration.prepare_inputs_for_generation  sy     "3!8!8
J//*j/R #&((J
+C4"P!-&)gg&B&B')?'#
  /.&4&=	
 	
r/   c                 $    |j                   |d<   |S )Nr  )r  )r:   model_outputsmodel_kwargss      r-   update_inputs_for_generationz;FlaxT5ForConditionalGeneration.update_inputs_for_generation  s    *7*G*G&'r/   r  )NNN)rH   rI   rJ   r  rO  r   r  r   r   r    r   r'   rT   r  r   r   r  rO   Arrayr  r  rU   r/   r-   r  r  D  s   7L45+P_gh
 9=8< $,0/3&*# !) 5	
 !) 5  $D> 'tn d^    i 6J /36:
 !+	

 !) 3
:r/   r  a  
    Returns:

    Example:

    ```python
    >>> from transformers import AutoTokenizer, FlaxT5ForConditionalGeneration

    >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
    >>> model = FlaxT5ForConditionalGeneration.from_pretrained("google-t5/t5-small")

    >>> ARTICLE_TO_SUMMARIZE = "summarize: My friends are cool but they eat too many carbs."
    >>> inputs = tokenizer([ARTICLE_TO_SUMMARIZE], return_tensors="np")

    >>> # Generate Summary
    >>> summary_ids = model.generate(inputs["input_ids"]).sequences
    >>> print(tokenizer.decode(summary_ids[0], skip_special_tokens=True, clean_up_tokenization_spaces=False))
    ```
)Rr  r  typingr   r   r   
flax.linenlinenrP   rO   	jax.numpynumpyr'   rS   flax.core.frozen_dictr   r   r   r	   r
   r   nn_partitioningflax.linen.attentionr   flax.traverse_utilr   r   
jax.randomr   modeling_flax_outputsr   r   r   r   r   modeling_flax_utilsr   r   r   r   r   utilsr   r   r   r   configuration_t5r    
get_loggerrH   logger_CHECKPOINT_FOR_DOC_CONFIG_FOR_DOCr0  rT   rK   r.   r  r1   rW   ru   r}   r   r  r  r  r%  r)  rE  r  r  r  rM  T5_START_DOCSTRINGr  r  FLAX_T5_MODEL_DOCSTRINGra  r  r  r  (FLAX_T5_CONDITIONAL_GENERATION_DOCSTRINGrU   r/   r-   <module>r     s     , ,  
   > > 6 6 > ;    u t & 
		H	%* 	#++ 	S 	Z] 	bebmbm 	+bii +(")) >#ryy #LBII (Ybii Yx#ryy #L		 BF")) FR 
BII  
FS
BII S
lD
")) D
N 6' T7 tp/ pf	% P aV
299 V
	V
r '   [*=?UWf g 6 &9<S&S T  :M\k l l+")) +	+\%
. %
P LN`ak
299 k
 bk
\e%: eP, (* "$7:b$b !"0CRar/   