
    sg-                      d Z ddlmZ ddlZddlZddlZddlZddlmZm	Z	m
Z
 ddlZddlZddlmZ ddlmZ ddlmZmZmZmZ dd	lmZmZmZmZmZmZmZ dd
l m!Z!m"Z"m#Z# ddl$m%Z%m&Z&m'Z'm(Z( ddl)m*Z*  e'jV                  e,      Z-dZ. G d dej^                  j`                        Z1 G d dej^                  j`                        Z2 G d dej^                  j`                        Z3 G d dej^                  j`                        Z4 G d dej^                  j`                        Z5 G d dej^                  j`                        Z6 G d dej^                  j`                        Z7 G d dej^                  j`                        Z8e G d d ej^                  j`                               Z9 G d! d"e      Z:d#Z;d$Z<d%Z=d&Z> e%d'e;       G d( d)e:             Z? e%d*e;       G d+ d,e:e             Z@ e%d-e;       G d. d/e:             ZAy)0zTF 2.0 T5 model.    )annotationsN)OptionalTupleUnion)dynamic_slice   )get_tf_activation)TFBaseModelOutput+TFBaseModelOutputWithPastAndCrossAttentionsTFSeq2SeqLMOutputTFSeq2SeqModelOutput)TFCausalLanguageModelingLossTFModelInputTypeTFPreTrainedModelget_initializerkeraskeras_serializableunpack_inputs)check_embeddings_within_bounds
shape_liststable_softmax)add_start_docstrings%add_start_docstrings_to_model_forwardloggingreplace_return_docstrings   )T5Configr   c                  0     e Zd Zd fd	Z fdZd Z xZS )TFT5LayerNormc                @    t        |   di | || _        || _        y)zb
        Construct a layernorm module in the T5 style No bias and no subtraction of mean.
        N )super__init__variance_epsilonhidden_size)selfr%   epsilonkwargs	__class__s       X/var/www/html/venv/lib/python3.12/site-packages/transformers/models/t5/modeling_tf_t5.pyr#   zTFT5LayerNorm.__init__E   s%     	"6" '&    c                l    | j                  d| j                  fd      | _        t        |   |       y)z!Build shared word embedding layerweightones)shapeinitializerN)
add_weightr%   r-   r"   build)r&   input_shaper)   s     r*   r2   zTFT5LayerNorm.buildM   s/    ooht7G7G6IW]o^k"r+   c                    t         j                  j                  t         j                  j                  |      dd      }|t         j                  j	                  || j
                  z         z  }| j                  |z  S )NT)axiskeepdims)tfmathreduce_meansquarersqrtr$   r-   )r&   hidden_statesvariances      r*   callzTFT5LayerNorm.callR   s\    77&&rww~~m'D2X\&]%hAVAV6V(WW{{]**r+   )gư>)__name__
__module____qualname__r#   r2   r?   __classcell__r)   s   @r*   r   r   D   s    '#
+r+   r   c                  .     e Zd Z fdZddZddZ xZS )TFT5DenseActDensec                v   t        |   di | t        j                  j	                  d|j
                  |j                  dz  z        }t        j                  j	                  d|j
                  |j                  dz  z        }t        j                  j                  |j                  dd|      | _
        t        j                  j                  |j                  dd|      | _        t        j                  j                  |j                        | _        t        |j                         | _        || _        y )	Nr         ࿩meanstddevFwiuse_biasnamekernel_initializerwor!   )r"   r#   r   initializersRandomNormalinitializer_factord_modeld_fflayersDenserL   rQ   Dropoutdropout_ratedropoutr	   dense_act_fnactconfigr&   r^   r(   wi_initializerwo_initializerr)   s        r*   r#   zTFT5DenseActDense.__init__Y   s   "6"++886448LM 9 
 ++88644T8IJ 9 
 ,,$$KK%d~ % 
 ,,$$NNU. % 
 ||++F,?,?@$V%8%89r+   c                    | j                  |      }| j                  |      }| j                  ||      }| j                  |      }|S Ntraining)rL   r]   r[   rQ   )r&   r=   re   s      r*   r?   zTFT5DenseActDense.callk   sD    ./]XF.r+   c                "   | j                   ry d| _         t        | dd       dt        j                  | j                  j
                        5  | j                  j                  d d | j                  j                  g       d d d        t        | dd       et        j                  | j                  j
                        5  | j                  j                  d d | j                  j                  g       d d d        y y # 1 sw Y   |xY w# 1 sw Y   y xY w)NTrL   rQ   )builtgetattrr8   
name_scoperL   rO   r2   r^   rU   rQ   rV   r&   r3   s     r*   r2   zTFT5DenseActDense.buildr   s    ::
4t$0tww||, AtT4;;+>+>?@A4t$0tww||, >tT4;;+;+;<=> > 1A A> >s   3C9<3D9DDFNr@   rA   rB   r#   r?   r2   rC   rD   s   @r*   rF   rF   X   s    $	>r+   rF   c                  .     e Zd Z fdZddZddZ xZS )TFT5DenseGatedActDensec                   t        |   d	i | t        j                  j	                  d|j
                  |j                  dz  z        }t        j                  j	                  d|j
                  |j                  dz  z        }t        j                  j                  |j                  dd|      | _
        t        j                  j                  |j                  dd|      | _        t        j                  j                  |j                  dd|      | _        t        j                  j                  |j                        | _        t!        |j"                        | _        || _        y )
Nr   rH   rI   Fwi_0rM   wi_1rQ   r!   )r"   r#   r   rR   rS   rT   rU   rV   rW   rX   rq   rr   rQ   rY   rZ   r[   r	   r\   r]   r^   r_   s        r*   r#   zTFT5DenseGatedActDense.__init__   s2   "6"++886448LM 9 
 ++88644T8IJ 9 
 LL&&KK%f ' 
	 LL&&KK%f ' 
	 ,,$$NNU. % 
 ||++F,?,?@$V%8%89r+   c                    | j                  | j                  |            }| j                  |      }||z  }| j                  ||      }| j	                  |      }|S rc   )r]   rq   rr   r[   rQ   )r&   r=   re   hidden_geluhidden_linears        r*   r?   zTFT5DenseGatedActDense.call   sW    hhtyy78		-0#m3]XF.r+   c                   | j                   ry d| _         t        | dd       dt        j                  | j                  j
                        5  | j                  j                  d d | j                  j                  g       d d d        t        | dd       dt        j                  | j                  j
                        5  | j                  j                  d d | j                  j                  g       d d d        t        | dd       et        j                  | j                  j
                        5  | j                  j                  d d | j                  j                  g       d d d        y y # 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   y xY w)NTrq   rr   rQ   )rg   rh   r8   ri   rq   rO   r2   r^   rU   rr   rQ   rV   rj   s     r*   r2   zTFT5DenseGatedActDense.build   s0   ::
4&2tyy~~. C		tT[[-@-@ ABC4&2tyy~~. C		tT[[-@-@ ABC4t$0tww||, >tT4;;+;+;<=> > 1C CC C> >s$   3E*<3E6-3F*E36E?Frk   rl   rm   rD   s   @r*   ro   ro   ~   s    *>r+   ro   c                  .     e Zd Z fdZddZddZ xZS )TFT5LayerFFc                0   t        |   di | |j                  rt        |d      | _        nt        |d      | _        t        |j                  |j                  d      | _	        t        j                  j                  |j                        | _        y )NDenseReluDenserO   
layer_normr'   rO   r!   )r"   r#   is_gated_actro   rz   rF   r   rU   layer_norm_epsilonr|   r   rW   rY   rZ   r[   r&   r^   r(   r)   s      r*   r#   zTFT5LayerFF.__init__   ss    "6""8FV"WD"3FAQ"RD'@Y@Y`lm||++F,?,?@r+   c                z    | j                  |      }| j                  ||      }|| j                  ||      z   }|S rc   )r|   rz   r[   )r&   r=   re   normed_hidden_statesdense_outputs        r*   r?   zTFT5LayerFF.call   sD    #}=**+?(*S%\H(UUr+   c                   | j                   ry d| _         t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       Nt        j                  | j                  j
                        5  | j                  j                  d        d d d        y y # 1 sw Y   exY w# 1 sw Y   y xY w)NTr|   rz   )rg   rh   r8   ri   r|   rO   r2   rz   rj   s     r*   r2   zTFT5LayerFF.build   s    ::
4t,8t334 ,%%d+,4)40<t22778 0##))$/0 0 =, ,0 0   C%CCC rk   rl   rm   rD   s   @r*   rx   rx      s    A	0r+   rx   c                       e Zd Z ej                         Zd fd	ZddZd Ze	d	d       Z
d Z	 	 	 	 	 	 	 	 	 d
dZ xZS )TFT5Attentionc                6   t        |   d
i | t        t        j                        | _        |j                  | _        |j                  | _        || _        |j                  | _	        |j                  | _
        |j                  | _        |j                  | _        |j                  | _        |j                  | _        | j                   | j                  z  | _        t$        j&                  j)                  d|j*                  | j"                  | j                  z  dz  z        }t$        j&                  j)                  d|j*                  | j"                  dz  z        }t$        j&                  j)                  d|j*                  | j"                  dz  z        }t$        j&                  j)                  d|j*                  | j"                  dz  z        }t$        j&                  j)                  d|j*                  | j"                  dz  z        | _        t$        j.                  j1                  | j"                  dd|      | _        t$        j.                  j1                  | j"                  dd|      | _        t$        j.                  j1                  | j"                  dd|      | _        t$        j.                  j1                  | j                  dd	|      | _        t$        j.                  j;                  |j<                        | _        tA               | _!        y )Nr   rH   rI   FqrM   kvor!   )"r"   r#   nextr   NEW_IDlayer_id
is_decoder	use_cachehas_relative_attention_biasoutput_attentionsrelative_attention_num_bucketsrelative_attention_max_distancerU   d_kvkey_value_proj_dim	num_headsn_heads	inner_dimr   rR   rS   rT   #relative_attention_bias_initializerrW   rX   r   r   r   r   rY   rZ   r[   setpruned_heads)	r&   r^   r   r(   q_initializerk_initializerv_initializero_initializerr)   s	           r*   r#   zTFT5Attention.__init__   s   "6"]112 ++))+F(!'!9!9.4.S.S+/5/U/U,~~"(++''(?(?? **77644$JaJa9afj8jk 8 
 **776448LM 8 
 **776448LM 8 
 **776448LM 8 
 493E3E3R3R6448LM 4S 4
0 ##NNU $ 
 ##NNU $ 
 ##NNU $ 
 ##LL5s} $ 
 ||++F,?,?@Er+   c                   | j                   ry d| _         | j                  rWt        j                  d      5  | j	                  d| j
                  | j                  g| j                        | _        d d d        t        | dd       Zt        j                  | j                  j                        5  | j                  j                  d d | j                  g       d d d        t        | dd       Zt        j                  | j                  j                        5  | j                  j                  d d | j                  g       d d d        t        | dd       Zt        j                  | j                  j                        5  | j                  j                  d d | j                  g       d d d        t        | dd       [t        j                  | j                   j                        5  | j                   j                  d d | j"                  g       d d d        y y # 1 sw Y   xY w# 1 sw Y   NxY w# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   y xY w)	NTrelative_attention_bias
embeddings)rO   r/   r0   r   r   r   r   )rg   r   r8   ri   r1   r   r   r   r   rh   r   rO   r2   rU   r   r   r   r   rj   s     r*   r2   zTFT5Attention.build   s   ::
++89 /3%>>M $ H H 0? 0, 4d#/tvv{{+ 9dD$,,7894d#/tvv{{+ 9dD$,,7894d#/tvv{{+ 9dD$,,7894d#/tvv{{+ ;dD$..9:; ; 0 9 99 99 9; ;s;   :H.)H#)H0<)H<#)IH #H-0H9<IIc                    t         rl   NotImplementedError)r&   headss     r*   prune_headszTFT5Attention.prune_heads      !!r+   c           
     6   d}|rj|dz  }|t        j                  t         j                  j                  | d      | j                        |z  z  }t         j                  j                  |       } n!t         j                  j                  | d       } |dz  }t         j                  j                  | |      }|t        j                  t         j                  j                  t        j                  | t         j                        t        j                  |t         j                        z        t        j                  ||z        z  ||z
  z  | j                        z   }t         j                  j                  ||dz
        }|t        j                  || |      z  }|S )a  
        Adapted from Mesh Tensorflow:
        https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593

        Translate relative position to a bucket number for relative attention. The relative position is defined as
        memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to
        position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for
        small absolute relative_position and larger buckets for larger absolute relative_positions. All relative
        positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket.
        This should allow for more graceful generalization to longer sequences than the model has been trained on

        Args:
            relative_position: an int32 Tensor
            bidirectional: a boolean - whether the attention is bidirectional
            num_buckets: an integer
            max_distance: an integer

        Returns:
            a Tensor with the same shape as relative_position, containing int32 values in the range [0, num_buckets)
        r      dtyper   )r8   castr9   greaterr   absminimumlesslogfloat32where)relative_positionbidirectionalnum_bucketsmax_distancerelative_buckets	max_exactis_smallrelative_position_if_larges           r*   _relative_position_bucketz'TFT5Attention._relative_position_bucket  sU   , AK(91=EVE\E\]`kk !#,= >!#1BA!F F1$	77<< 19=%.GGKK 12::>TVT^T^A__`hh|i/01Y&( $))	2
 &
" &(WW__5OQ\_`Q`%a"BHHX/@B\]]r+   c                   t        j                  |      dddf   }t        j                  |      dddf   }||z
  }| j                  || j                   | j                  | j
                        }t        j                  | j                  |      }t        j                  t        j                  |g d      d      }|S )z%Compute binned relative position biasN)r   r   r   )r   r   r   r   r6   )
r8   ranger   r   r   r   gatherr   expand_dims	transpose)r&   query_length
key_lengthcontext_positionmemory_positionr   relative_position_bucketvaluess           r*   compute_biaszTFT5Attention.compute_biasC  s    88L1!T':((:.tQw7+.>>#'#A#A#.;;==	 $B $
  ((*B
 LL+!
 r+   c           
     P    t        |      dd \  }|}|:t        |      dk(  sJ dt        |       d       ||t        |d         d   n|z  }||nt        |      d   } fd fd}fd	}  j                  |            } || j                  |||d   nd      } || j                  |||d   nd      } j
                  r|r||f}nd}t        j                  d
||      }| j                  s$t        j                  d j                  ||f      }n j                  ||      }|x j                  s|dddd| dddf   }nYt        j                  t        j                  |d   dddddf   dk7              }t        |dd|dz   dfd j                  ||f      }|&t        j                  ||j                         }||z   }||z  }t#        |d      } j%                  ||	      }|ht        j&                  j)                  t        |       j                  gd j                   dt        |              t        j*                  |d      |z  }t        j,                  ||      } j/                   ||            }|f|fz   |fz   }|
r||fz   }|S )z
        Self-attention (if key_value_states is None) or attention over source sentence (provided by key_value_states).
        Nr   z?past_key_value should have 2 past states: keys and values. Got z past statesr   r   c           	         t        j                  t        j                  | dj                  j                  f      d      S )
projectionr5   r   r   r   r   perm)r8   r   reshaper   r   r=   
batch_sizer&   s    r*   r/   z!TFT5Attention.call.<locals>.shapeu  s8    <<

=:r4<<I`I`*abiu r+   c                t    t        j                  t        j                  | d      dj                  f      S )zcompute contextr   r   r5   )r8   r   r   r   r   s    r*   unshapez#TFT5Attention.call.<locals>.unshape{  s.    ::bll=|Lz[]_c_m_mNnoor+   c                    |  ||             } n|  ||            } ||t        j                  || gd      } | S |} | S )z4projects hidden states correctly to key/query statesr   r   )r8   concat)r=   
proj_layerkey_value_statespast_key_valuer/   s       r*   projectz#TFT5Attention.call.<locals>.project  sk    ' !&j&? @' !&j1A&B C)#+ %'II~}.MTU$VM !  %3M  r+   zbnqd,bnkd->bnqkg        r   r5   r   rd   z/Head mask for a single layer should be of size z	, but is )message)r   r5   r   r   )r   lenr   r   r   r   r8   einsumr   zerosr   r   
reduce_maxr   r   r   r   r   r[   	debuggingassert_equalr   matmulr   )r&   r=   maskr   position_biasr   layer_head_maskr   r   re   r   
seq_lengthreal_seq_lengthr   r   r   query_states
key_statesvalue_statespresent_key_value_statescoresmost_recently_filled_past_indexweightsattn_outputoutputsr   r/   s   `                        @@r*   r?   zTFT5Attention.callV  s   & ",M!:2A!>
J$%N#q(sPQTUcQdPeeqrs(<CWz.*;<Q?]iiO(8(@_jQaFbcdFe
		p	!, TVVM23 466#3.Jd^A5Fjn

 466#3.Jd^A5Fjn

 ??y'1<&@#&*#|Z
  33 "!T\\?J)W X $ 1 1/: N )77$1!Qa2G$HM 79mmBHH^\]M^_`bcefhi_iMjnqMqDr6s3$1%A>BAFDLL*oF%M  "TZZ H - 4- b1,,w,: &LL%%?+Et||EU V"?346	 &  jj-@7JGii6ffW[12.$;#==@PP
*Gr+   rk   rl   )T       )	NNNNNNFFF)r@   rA   rB   	itertoolscountr   r#   r2   r   staticmethodr   r   r?   rC   rD   s   @r*   r   r      s^    Y__F."`;0" *  * X, Dr+   r   c                  >     e Zd Zd fd	Z	 	 	 	 	 	 	 ddZddZ xZS )TFT5LayerSelfAttentionc                    t        |   di | t        ||d      | _        t	        |j
                  |j                  d      | _        t        j                  j                  |j                        | _        y )NSelfAttentionr   rO   r|   r}   r!   )r"   r#   r   r   r   rU   r   r|   r   rW   rY   rZ   r[   r&   r^   r   r(   r)   s       r*   r#   zTFT5LayerSelfAttention.__init__  sb    "6"*(C 

 (@Y@Y`lm||++F,?,?@r+   c	           
         | j                  |      }	| j                  |	|||||||      }
|| j                  |
d   |      z   }|f|
dd  z   }|S )N)r   r   r   r   r   r   re   r   rd   r   )r|   r   r[   )r&   r=   attention_maskr   r   r   r   r   re   r   attention_outputr   s               r*   r?   zTFT5LayerSelfAttention.call  s{      $}=-- '+)/ . 	
 &5Ea5HS[(\\ "%5ab%99r+   c                   | j                   ry d| _         t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       Nt        j                  | j                  j
                        5  | j                  j                  d        d d d        y y # 1 sw Y   exY w# 1 sw Y   y xY w)NTr   r|   )rg   rh   r8   ri   r   rO   r2   r|   rj   s     r*   r2   zTFT5LayerSelfAttention.build  s    ::
4$/;t11667 /""((./4t,8t334 ,%%d+, , 9/ /, ,r   rk   )NNNNFFFrl   rm   rD   s   @r*   r   r     s*    A 4	,r+   r   c                  >     e Zd Z fdZ	 	 	 	 	 	 	 	 ddZddZ xZS )TFT5LayerCrossAttentionc                    t        |   di | t        |dd      | _        t	        |j
                  |j                  d      | _        t        j                  j                  |j                        | _        y )NFEncDecAttentionr   r|   r}   r!   )r"   r#   r   r  r   rU   r   r|   r   rW   rY   rZ   r[   r   s      r*   r#   z TFT5LayerCrossAttention.__init__  sb    "6",(-" 

 (@Y@Y`lm||++F,?,?@r+   c                    | j                  |      }| j                  |||||||||	|

      }|| j                  |d   |
      z   }|f|dd  z   }|S )N)	r   r   r   r   r   r   r   r   re   r   rd   r   )r|   r  r[   )r&   r=   r   r   r   r   r   r   r   r   re   r   r  r   s                 r*   r?   zTFT5LayerCrossAttention.call  s      $}=// -'+)%/ 0 
 &5Ea5HS[(\\ "%5ab%99r+   c                   | j                   ry d| _         t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       Nt        j                  | j                  j
                        5  | j                  j                  d        d d d        y y # 1 sw Y   exY w# 1 sw Y   y xY w)NTr  r|   )rg   rh   r8   ri   r  rO   r2   r|   rj   s     r*   r2   zTFT5LayerCrossAttention.build7  s    ::
4*D1=t33889 1$$**4014t,8t334 ,%%d+, , 91 1, ,r   )NNNNNFFFrl   rm   rD   s   @r*   r  r    s-    A <	,r+   r  c                  F     e Zd Zd fd	Z	 	 	 	 	 	 	 	 	 	 	 ddZddZ xZS )	TFT5Blockc           	     ~   t        |   di | |j                  | _        g | _        | j                  j	                  t        ||d             | j                  r&| j                  j	                  t        |d             | j                  j	                  t        |dt        | j                                      y )Nz	layer_._0r   z	layer_._1r{   zlayer_._r!   )	r"   r#   r   layerappendr   r  rx   r   r   s       r*   r#   zTFT5Block.__init__D  s    "6" ++


",G 	
 ??JJ'$ 	

+fXc$**o=N3OPQr+   c                   |	W| j                   sJ d       |dnd}t        |	      |k7  r%t        d| d|dk(  rdnd dt        |	       d	      |	d d }|	dd  }nd
\  }} | j                  d   ||||||
||      }|d d \  }}|dd  }| j                   rN|L|t	        |d         d   }nd } | j                  d   ||||||||
||
      }|d   }|||d   z   }||dd  z   } | j                  d   ||      }|f}||fz   |z   }|S )Nz&Only decoder can use `past_key_values`r      zThere should be z past states. z#2 (key / value) for cross attention z. Got z past key / value states)NNr   )r   r   r   r   r   r   re   r   )	r   r   r   r   r   r   r   r   re   r5   rd   )r   r   
ValueErrorr  r   )r&   r=   r   r   encoder_hidden_statesencoder_attention_maskencoder_decoder_position_biasr   encoder_layer_head_maskr   r   r   re   expected_num_past_key_valuesself_attn_past_key_valuecross_attn_past_key_valueself_attention_outputsr   attention_outputsr   cross_attention_outputsr   s                         r*   r?   zTFT5Block.callY  s    %??L$LL?0E0M1ST(>"&BB &'C&DN@\`a@a<gij k~.//GI  (6bq'9$(6qr(:%BL?$&?!.A)'+3/	"
 2H1K..2126??4@ '2)*A!*DEaH#&3djjm!65; 78)#"3!'# 4A6M&2*AD[\]D^*^' !24KAB4O O '

2}xH " 4669JJr+   c                    | j                   ry d| _         | j                  D ]H  }t        |d      st        j                  |j
                        5  |j                  d        d d d        J y # 1 sw Y   UxY w)NTrO   )rg   r  hasattrr8   ri   rO   r2   )r&   r3   layer_modules      r*   r2   zTFT5Block.build  sj    ::
 JJ 	-L|V,]]<#4#45 - &&t,- -	-- -s   A--A6	rk   )NNNNNNNNFFFrl   rm   rD   s   @r*   r
  r
  C  s8    R0 "#&* $N`-r+   r
  c                  b     e Zd ZeZd fd	Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 d	 dd       ZddZ	 xZ
S )TFT5MainLayerc           
     2   t        |   di | || _        |j                  | _        |j                  | _        |j
                  | _        || _        |j                  | _        || _        |j                  | _	        t        |j                        D cg c]  }t        |t        |dk(        d|       ! c}| _        t        |j                  |j                   d      | _        t$        j&                  j)                  |j*                        | _        y c c}w )Nr   zblock_._r   final_layer_normr}   r!   )r"   r#   r^   output_hidden_statesr   r   embed_tokensr   
num_layersnum_hidden_layersr   r
  boolblockr   rU   r   r"  r   rW   rY   rZ   r[   )r&   r^   r$  r(   ir)   s        r*   r#   zTFT5MainLayer.__init__  s    "6"$*$?$?!!'!9!9))( ++!'!2!2 6,,-
 f$qAv,xXYWZ^\

 !.NNF$=$=DV!
 ||++F,?,?@
s   $Dc                    t         rl   r   )r&   heads_to_prunes     r*   _prune_headszTFT5MainLayer._prune_heads  r   r+   c                   |$|"| j                   rdnd}t        d| d| d      |'t        |      }t        j                  |d|d   f      }n3|t        |      d d }n"| j                   rdnd}t        d| d| d	      |D| j
                  J d
       t        || j
                  j                         | j                  |      }|\  }}|t        |d   d         d   |z   n|}|t        j                  ||fd      }| j                   r*|(|&t        |      d   }t        j                  ||fd      }|d gt        | j                        z  }t        j                  ||j                        }t        t        |            }|dk(  r|d d d d d d d f   }n|dk(  r| j                   rt        j                  |      }t        j                  t        j                  |d d d d f   ||df      |d d d d f         }t        j                  ||j                        }|d d d d d d d f   |d d d d d d f   z  }|d    |d d d d | d d d f   }n|d d d d d d f   }dz
  dz  }| j                   rf|dt        j                  ||j                        }t        t        |            }|dk(  r|d d d d d d d f   }|dk(  r|d d d d d d f   }dz
  dz  }nd }|	r| j                   rdnd }|rdnd }|
rdnd }|
r| j                   rdnd }d }d }| j!                  ||      } t#        t%        | j                  |            D ]  \  }!\  }"}#|r|| fz   } |"| ||||||||!   nd |||!   nd |#|	|
|      }$|$d d \  } }%|$d   }| j                   r|	|$|
rdnd   }|%|	r| j                   r||%fz   }|
sp||$d   fz   }| j                   s||$d   fz   } | j'                  |       } | j!                  | |      } |r|| fz   }|s;| f}&|	r| j                   r|&|fz   }&|r|&|fz   }&|
r|&|fz   }&| j                   r|&|fz    |&S | j                   rt)        | ||||      S t+        | ||      S )Ndecoder_r  zYou cannot specify both zinput_ids and zinputs_embeds at the same timer5   zYou have to specify either zinput_ids or inputs_embedsz<You have to initialize the model with valid token embeddingsr   r   r   r   r   g      ?g    er!   rd   )r   r   r  r  r  r   r  r   r   r   re   r     )last_hidden_statepast_key_valuesr=   
attentionscross_attentionsr1  r=   r3  )r   r  r   r8   r   r$  r   	input_dimfillr   r(  r   r   r   
less_equaltiler[   	enumeratezipr"  r   r
   )'r&   	input_idsr   r  r  r/  	head_maskencoder_head_maskr2  r   r   r#  return_dictre   err_msg_prefixr3   r   r   mask_seq_lengthencoder_seq_lengthnum_dims_attention_maskextended_attention_maskseq_idscausal_masknum_dims_encoder_attention_maskencoder_extended_attention_maskpresent_key_value_statesall_hidden_statesall_attentionsall_cross_attentionsr   r  r=   idxr  r   layer_outputsr   r   s'                                          r*   r?   zTFT5MainLayer.call  s   "  ]%>+/??ZN*>*:.HXXvw  "$Y/K

9r;r?.CDI&$]3CR8K+/??ZN:>:J-XfWggtuvv $$0p2pp0*9d6G6G6Q6QR --i8M!,
J BQA\Jq)!,-a0:=bl 	 !WWj/%BAFN??5=BWBc!+,A!B1!E%'WWj:L-Mq%Q" "#fs4::6O }7J7JK"%j&@"A"a'&4Qa]&C#$) ((?3 mmGGGD$M2ZRS4TUD!TM* !ggk9M9MN*5aq!m*D~VWY]_cefVfGg*g'"1%1.EaZKLZ[F[.\+*8D$9I*J' $')@#@D"H??5A &(WW-CKbKhKh%i".1*=S2T.U+.!32HDRSUV2W/.!32HDRVXYIY2Z/ 035T/TX\.\+.2+)2t2D "6BD0d&7DOOrRV(,%]XF3<S_=]3^ %	V/C/,#$58H$H!(6+&;'F.K2;2G	#TBSB_(9#(>ei-#"3!M" 6C2A5F2M2
 *!,M#8#D0=CTaZ[0\- '2yT__+CG^F`+`( !/=3C2E!E??+?=QRCSBU+U(K%	VN --m<]XF   1]4D D$&GT__!%=$??#!%6$88 !^$55??355N??>"/ 8/)!5  %"//) r+   c                   | j                   ry d| _         t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       K| j                  D ];  }t        j                  |j
                        5  |j                  d        d d d        = y y # 1 sw Y   bxY w# 1 sw Y   UxY w)NTr"  r(  )rg   rh   r8   ri   r"  rO   r2   r(  )r&   r3   r  s      r*   r2   zTFT5MainLayer.build  s    ::
4+T2>t4499: 2%%++D124$'3 &]]5::. &KK%& && 42 2& &s   C*CCC	rl   )NNNNNNNNNNNNF)returnr   )r@   rA   rB   r   config_classr#   r,  r   r?   r2   rC   rD   s   @r*   r   r     sc    LA."  "#!} 
} }~
&r+   r   c                  0    e Zd ZdZeZdZdgZd Zd Z	d Z
y)TFT5PreTrainedModelz
    An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
    models.
    transformerzIdecoder\Wblock[\W_0]+layer[\W_1]+EncDecAttention\Wrelative_attention_biasc                    | j                   S rl   )sharedr&   s    r*   get_input_embeddingsz(TFT5PreTrainedModel.get_input_embeddings  s    {{r+   c                    || _         | j                   | j                  _        t        | d      r| j                   | j                  _        y y )Ndecoder)rV  encoderr$  r  rZ  )r&   values     r*   set_input_embeddingsz(TFT5PreTrainedModel.set_input_embeddings  s8    $(KK!4#(,DLL% $r+   c           
        | j                   j                  }| j                   j                  }|J d       t        j                  t        |      d   df|      }t        j                  ||j                        }t        j                  ||d d d df   gd      }|J d       t        j                  |dk(  t        j                  t        j                  t        |      |      |j                        |      }t        j                  j                  |t        j                  d|j                              }t        j                  |g      5  t        j                  |      }d d d        |S # 1 sw Y   |S xY w)Nzself.model.config.decoder_start_token_id has to be defined. In TF T5 it is usually set to the pad_token_id. See T5 docs for more informationr   r   r5   z1self.model.config.pad_token_id has to be defined.ir   )r^   decoder_start_token_idpad_token_idr8   r7  r   r   r   r   r   r   assert_greater_equalconstantcontrol_dependenciesidentity)r&   r<  r_  r`  start_tokensshifted_input_idsassert_gte0s          r*   _shift_rightz TFT5PreTrainedModel._shift_right  sS   !%!C!C{{//%1 	
>	
1
 ww
9 5a 8!<>TUww|Y__=II|Yq#2#v5F&GL'\)\\'HH%GGBGGJ'89<HJ[JaJab
 ll77r{{14E4K4KL

 $$k]3 	? ",= >	? ! 	? ! s   E..E8N)r@   rA   rB   __doc__r   rQ  base_model_prefix"_keys_to_ignore_on_load_unexpectedrX  r]  rh  r!   r+   r*   rS  rS    s,    
 L%*v)w&4!r+   rS  a
  

    The T5 model was proposed in [Exploring the Limits of Transfer Learning with a Unified Text-to-Text
    Transformer](https://arxiv.org/abs/1910.10683) by Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan
    Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu. It's an encoder decoder transformer pre-trained in a
    text-to-text denoising generative setting.

    This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the
    library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
    etc.)

    This model is also a [keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it
    as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and
    behavior.

    <Tip>

    TensorFlow models and layers in `transformers` accept two formats as input:

    - having all inputs as keyword arguments (like PyTorch models), or
    - having all inputs as a list, tuple or dict in the first positional argument.

    The reason the second format is supported is that Keras methods prefer this format when passing inputs to models
    and layers. Because of this support, when using methods like `model.fit()` things should "just work" for you - just
    pass your inputs and labels in any format that `model.fit()` supports! If, however, you want to use the second
    format outside of Keras methods like `fit()` and `predict()`, such as when creating your own layers or models with
    the Keras `Functional` API, there are three possibilities you can use to gather all the input Tensors in the first
    positional argument:

    - a single Tensor with `input_ids` only and nothing else: `model(input_ids)`
    - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring:
    `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])`
    - a dictionary with one or several input Tensors associated to the input names given in the docstring:
    `model({"input_ids": input_ids, "token_type_ids": token_type_ids})`

    Note that when creating models and layers with
    [subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/) then you don't need to worry
    about any of this, as you can just pass inputs like you would to any other Python function!

    </Tip>

    Parameters:
        config ([`T5Config`]): Model configuration class with all the parameters of the model.
            Initializing with a config file does not load the weights associated with the model, only the
            configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
a  
    Args:
        input_ids (`tf.Tensor` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. T5 is a model with relative position embeddings so you
            should be able to pad the inputs on the right or the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.__call__`] and
            [`PreTrainedTokenizer.encode`] for details.

            [What are input IDs?](../glossary#input-ids)

            To know more on how to prepare `inputs` for pretraining take a look at [T5 Training](./t5#training).
        decoder_input_ids (`tf.Tensor` of shape `(batch_size, target_sequence_length)`, *optional*):
            Provide for sequence to sequence training. T5 uses the `pad_token_id` as the starting token for
            `decoder_input_ids` generation. If `past_key_values` is used, optionally only the last `decoder_input_ids`
            have to be input (see `past_key_values`).

            To know more on how to prepare `decoder_input_ids` for pretraining take a look at [T5
            Training](./t5#training).
        attention_mask (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        decoder_attention_mask (`tf.Tensor` of shape `(batch_size, target_sequence_length)`, *optional*):
            Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
            be used by default.
        head_mask (`tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules in the encoder. Mask values selected in `[0,
            1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.

        decoder_head_mask (`tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules in the decoder. Mask values selected in `[0,
            1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.

        encoder_outputs (`tuple(tuple(tf.FloatTensor)`, *optional*):
            Tuple consists of (`last_hidden_state`, `optional`: *hidden_states*, `optional`: *attentions*)
            `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)` is a sequence of hidden states at
            the output of the last layer of the encoder. Used in the cross-attention of the decoder.
        past_key_values (`tuple(tuple(tf.Tensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
            contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.

            If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that
            don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all
            `decoder_input_ids` of shape `(batch_size, sequence_length)`.
        inputs_embeds (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
            Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
            is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
            model's internal embedding lookup matrix.
        decoder_inputs_embeds (`tf.Tensor` of shape `(batch_size, target_sequence_length, hidden_size)`, *optional*):
            Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded
            representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be
            input (see `past_key_values`). This is useful if you want more control over how to convert
            `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.

            If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value
            of `inputs_embeds`.
        use_cache (`bool`, *optional*, defaults to `True`):
            If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
            `past_key_values`).
        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the
            config will be used instead.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail. This argument can be used only in eager mode, in graph mode the value in the config will be
            used instead.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in
            eager mode, in graph mode the value will always be set to True.
        training (`bool`, *optional*, defaults to `False`):
            Whether or not to use the model in training mode (some modules like dropout modules have different
            behaviors between training and evaluation).
aG	  
    Args:
        inputs (`tf.Tensor` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. T5 is a model with relative position embeddings so you
            should be able to pad the inputs on the right or the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.__call__`] and
            [`PreTrainedTokenizer.encode`] for details.

            To know more on how to prepare `inputs` for pre-training take a look at [T5 Training](./t5#training).
        attention_mask (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        inputs_embeds (`tf.Tensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
            Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
            is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
            model's internal embedding lookup matrix.
        head_mask (`tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.

        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
        training (`bool`, *optional*, defaults to `False`):
            Whether or not to use the model in training mode (some modules like dropout modules have different
            behaviors between training and evaluation).
a^  
The input argument `head_mask` was split into two arguments `head_mask` and `decoder_head_mask`. Currently,
`decoder_head_mask` is set to copy `head_mask`, but this feature is deprecated and will be removed in future versions.
If you do not want to use any `decoder_head_mask` now, please set `decoder_head_mask = tf.ones((num_layers,
num_heads))`.
z[The bare T5 Model transformer outputting raw hidden-stateswithout any specific head on top.c                       e Zd Z fdZd Zd Ze ee       e	e
e      	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd                     Zd	dZ xZS )
	TFT5Modelc                .   t        |   |g|i | t        j                  j	                  |j
                  |j                  t        j                  j                  | j                  j                        d      | _        d| j                  _        t        j                  |      }d|_        t!        || j                  d      | _        t        j                  |      }d|_        |j&                  |_        t!        || j                  d      | _        y )NrV  )r6  
output_dimembeddings_initializerrO   Fr[  r{   TrZ  )r"   r#   r   rW   	Embedding
vocab_sizerU   rR   TruncatedNormalr^   rT   rV  load_weight_prefixcopydeepcopyr   r   r[  r   num_decoder_layersr%  rZ  )r&   r^   inputsr(   encoder_configdecoder_configr)   s         r*   r#   zTFT5Model.__init__  s    3&3F3ll,,''~~#(#5#5#E#EdkkFdFd#e	 - 
 *2&v.#( $^T[[yQv.$(!$*$=$=!$^T[[yQr+   c                    | j                   S rl   r[  rW  s    r*   get_encoderzTFT5Model.get_encoder      ||r+   c                    | j                   S rl   rZ  rW  s    r*   get_decoderzTFT5Model.get_decoder  r~  r+   output_typerQ  c                   |"| t        j                  t        t               |}|| j	                  ||dd|	|dd||||      }|d   }| j                  |||||
||||||||      }|r|d   nd}|s||dd |fz   |dd z   }||z   S t        |j                  ||j                  |j                  |j                  |j                  |j                  |j                        S )	a  
        Returns:

        Examples:

        ```python
        >>> from transformers import AutoTokenizer, TFT5Model

        >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
        >>> model = TFT5Model.from_pretrained("google-t5/t5-small")

        >>> input_ids = tokenizer(
        ...     "Studies have been shown that owning a dog is good for you", return_tensors="tf"
        ... ).input_ids  # Batch size 1
        >>> decoder_input_ids = tokenizer("Studies show that", return_tensors="tf").input_ids  # Batch size 1

        >>> # preprocess: Prepend decoder_input_ids with start token which is pad token for T5Model.
        >>> # This is not needed for torch's T5ForConditionalGeneration as it does this internally using labels arg.
        >>> decoder_input_ids = model._shift_right(decoder_input_ids)

        >>> # forward pass
        >>> outputs = model(input_ids, decoder_input_ids=decoder_input_ids)
        >>> last_hidden_states = outputs.last_hidden_state
        ```NFr   r  r  r/  r=  r2  r   r   r#  r?  re   r   )r   r  r  r/  r=  r>  r2  r   r   r#  r?  re   r   r   )r1  r2  decoder_hidden_statesdecoder_attentionsr4  encoder_last_hidden_stater  encoder_attentions)warningswarn_HEAD_MASK_WARNING_MSGFutureWarningr[  rZ  r   r1  r=   r3  r4  )r&   r<  r   decoder_input_idsdecoder_attention_maskr=  decoder_head_maskencoder_outputsr2  r/  decoder_inputs_embedsr   r   r#  r?  re   r=   decoder_outputspasts                      r*   r?   zTFT5Model.call  sA   \  %6%>MM0-@ ) ""ll-&*'++# $"3%9'! + O (* ,,1"/#1/''+/!5# ' 
 &/q!D*"1"1"5"?/RSRTBU"U"_44#-?? "1"?"?.99,==&5&G&G"1"?"?.99	
 		
r+   c                   | j                   ry d| _         t        j                  | j                  j                  dz   | j                  j
                  z   dz         5  | j                  j                  d        d d d        t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       Nt        j                  | j                  j
                        5  | j                  j                  d        d d d        y y # 1 sw Y   xY w# 1 sw Y   qxY w# 1 sw Y   y xY w)NT/r[  rZ  )
rg   r8   ri   rV  rt  rO   r2   rh   r[  rZ  rj   s     r*   r2   zTFT5Model.build  s   ::
 ]]4;;99C?$++BRBRRUXXY 	$KKd#	$4D)5t||001 )""4()4D)5t||001 )""4() ) 6	$ 	$) )) )s$   D55EE5D>E
E)NNNNNNNNNNNNNNF) r<  TFModelInputType | Noner   np.ndarray | tf.Tensor | Noner  r  r  r  r=  r  r  r  r  r  r2  4Optional[Tuple[Tuple[Union[np.ndarray, tf.Tensor]]]]r/  r  r  r  r   Optional[bool]r   r  r#  r  r?  r  re   r  rP  z"Union[Tuple, TFSeq2SeqModelOutput]rl   )r@   rA   rB   r#   r}  r  r   r   T5_INPUTS_DOCSTRINGr   r   _CONFIG_FOR_DOCr?   r2   rC   rD   s   @r*   rm  rm    s-   
R* *+>?+?o^ .28<;?@D37;?9=PT7;?C$(,0/3&*#(!b
*b
 6b
 9	b

 !>b
 1b
 9b
 7b
 Nb
 5b
  =b
 "b
 *b
 -b
 $b
  !!b
" 
,#b
 _ @ b
H)r+   rm  z0T5 Model with a `language modeling` head on top.c                      e Zd Z fdZd Zd Zd Zd Ze e	e
       eee      	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd                     Zd Z	 	 	 	 	 	 	 dd	Zdd
ZddZ xZS )TFT5ForConditionalGenerationc                   t        |   |g|i | |j                  | _        t        j
                  j                  |j                  |j                  dt        | j                  j                              | _        d| j                  _        t        j                  |      }d|_        t!        || j                  d      | _        t        j                  |      }d|_        |j&                  |_        t!        || j                  d      | _        |j,                  s]t        j.                  j1                  d|j                  	      }t        j
                  j3                  |j                  dd
|      | _        || _	        y )NrV  rO   rp  Fr[  r{   TrZ  r   rI   lm_headrM   )r"   r#   rU   	model_dimr   rW   rq  rr  r   r^   rT   rV  rt  ru  rv  r   r   r[  r   rw  r%  rZ  tie_word_embeddingsrR   rS   rX   r  )r&   r^   rx  r(   ry  rz  lm_head_initializerr)   s          r*   r#   z%TFT5ForConditionalGeneration.__init__+  s5   3&3F3ll,,NN#24;;3Q3Q#R	 - 
 *2&v.#( $^T[[yQv.$(!$*$=$=!$^T[[yQ))"'"4"4"A"AqQWQjQj"A"k <<--!!E	Vi . DL r+   c                    | j                   j                  r| j                         S t        j                  | j
                  j                        S rl   )r^   r  rX  r8   r   r  kernelrW  s    r*   get_output_embeddingsz2TFT5ForConditionalGeneration.get_output_embeddingsG  s9    ;;**,,.. << 3 344r+   c                r   | j                   j                  r| j                  |       y t        j                  j                  d| j                   j                        }t        j                  j                  t        |      d   dd|      | _
        t        j                  |      }|| j                  _        y )Nr   rI   Fr  rM   )r^   r  r]  r   rR   rS   rT   rW   rX   r   r  r8   r   r  )r&   r\  r  transposed_values       r*   set_output_embeddingsz2TFT5ForConditionalGeneration.set_output_embeddingsO  s    ;;**%%e,"'"4"4"A"AqQUQ\Q\QoQo"A"p <<--5!!$u9Yl . DL
  "||E2"2DLLr+   c                    | j                   S rl   r|  rW  s    r*   r}  z(TFT5ForConditionalGeneration.get_encoder\  r~  r+   c                    | j                   S rl   r  rW  s    r*   r  z(TFT5ForConditionalGeneration.get_decoder_  r~  r+   r  c                   |"| t        j                  t        t               |}|| j	                  |||	|||||      }|d   }|||
| j                  |      }| j                  |||||
|||||||      }|d   }| j                  j                  r?|| j                  dz  z  }t        j                  || j                  j                  d      }n| j                  |      }t        j                  |t        j                         }|dn| j#                  ||      }|r|d   nd}|s'||dd |fz   |d	d z   }|f|dd z   |z   }||f|z   S |S t%        |t&              r1|d   }d}d}d}|r
|dz  }||   }|r
|dz  }||   }t)        |||
      }t+        ||||j,                  |j.                  |j0                  |j2                  |j,                  |j.                  	      S )a  
        labels (`tf.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the cross entropy classification loss. Indices should be in `[0, ...,
            config.vocab_size - 1]`.

        Returns:

        Examples:

        ```python
        >>> from transformers import AutoTokenizer, TFT5ForConditionalGeneration

        >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
        >>> model = TFT5ForConditionalGeneration.from_pretrained("google-t5/t5-small")

        >>> # training
        >>> inputs = tokenizer("The <extra_id_0> walks in <extra_id_1> park", return_tensors="tf").input_ids
        >>> labels = tokenizer("<extra_id_0> cute dog <extra_id_1> the <extra_id_2>", return_tensors="tf").input_ids
        >>> outputs = model(inputs, labels=labels)
        >>> loss = outputs.loss
        >>> logits = outputs.logits

        >>> # inference
        >>> inputs = tokenizer(
        ...     "summarize: studies have shown that owning a dog is good for you", return_tensors="tf"
        ... ).input_ids  # Batch size 1
        >>> outputs = model.generate(inputs)
        >>> print(tokenizer.decode(outputs[0], skip_special_tokens=True))
        >>> # studies have shown that owning a dog is good for you
        ```N)r   r/  r=  r   r#  r?  re   r   r  rH   T)transpose_br   r   r5  )	losslogitsr2  r  r  r4  r  r  r  )r  r  r  r  r[  rh  rZ  r^   r  r  r8   r   rV  r   r  r   r   hf_compute_loss
isinstancetupler
   r   r=   r3  r4  r1  )r&   r<  r   r  r  r=  r  r  r2  r/  r  labelsr   r   r#  r?  re   r=   r  sequence_outputr  r  r  outputr1  r3  rM  s                              r*   r?   z!TFT5ForConditionalGeneration.callb  s`   j  %6%>MM0-@ ) ""ll-+#"3%9'! + 	O (*"3";@U@] $ 1 1& 9 ,,1"/#1/'+/!5# ' 
 *!, ;;**-1EFOYY0C0CQUVF\\/2F,~t4+?+?+O%.q!D*"1"1"5"?/RSRTBU"UY!44FF)-)9TGf$EvE / / 2 MJC#q / 4 q,S1
/"3+%O ! "1"?"?.99,==&5&G&G"1"?"?.99

 
	
r+   c           
        | j                   j                  r"t        j                  |j                  dd        nd }| j                   j
                  rt        j                  |j                        nd }| j                   j                  rt        j                  |j                        nd }| j                   j                  rt        j                  |j                        nd }| j                   j
                  rt        j                  |j                        nd }| j                   j                  rt        j                  |j                        nd }t        |j                  |||||j                  ||      S )Nr   )r  r2  r  r  r4  r  r  r  )r^   r   r8   convert_to_tensorr2  r#  r  r   r  r4  r  r  r   r  r  )r&   r  pkvdec_hs	dec_attnscross_attnsenc_hs	enc_attnss           r*   serving_outputz+TFT5ForConditionalGeneration.serving_output  s   BF++BWBWb""6#9#9!"#=>]aGK{{GgGg%%f&B&BCmqGK{{GdGdB(()B)BCjn	GK{{GdGdb**6+B+BCjnGK{{GgGg%%f&B&BCmqGK{{GdGdB(()B)BCjn	 =="(((&,&F&F"((	
 		
r+   c	           
     4    ||d d dd f   }d ||||||||d	S )Nr5   )	r<  r  r2  r  r   r  r=  r  r   r!   )
r&   r<  r2  r   r  r=  r  r   r  r(   s
             r*   prepare_inputs_for_generationz:TFT5ForConditionalGeneration.prepare_inputs_for_generation  sA     &!!RS&)I !*..,&<"!2"

 
	
r+   c                $    | j                  |      S rl   )rh  )r&   r  s     r*   %prepare_decoder_input_ids_from_labelszBTFT5ForConditionalGeneration.prepare_decoder_input_ids_from_labels   s      ((r+   c                   | j                   ry d| _         t        j                  | j                  j                  dz   | j                  j
                  z   dz         5  | j                  j                  d        d d d        t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       Mt        j                  | j                  j
                        5  | j                  j                  d        d d d        t        | dd       et        j                  | j                  j
                        5  | j                  j                  d d | j                  j                  g       d d d        y y # 1 sw Y   1xY w# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   y xY w)NTr  r[  rZ  r  )rg   r8   ri   rV  rt  rO   r2   rh   r[  rZ  r  r^   rU   rj   s     r*   r2   z"TFT5ForConditionalGeneration.build#  sp   ::
 ]]4;;99C?$++BRBRRUXXY 	$KKd#	$4D)5t||001 )""4()4D)5t||001 )""4()4D)5t||001 F""D$0C0C#DEF F 6	$ 	$) )) )F Fs0   F&5F3F?)3G&F03F<?GG)NNNNNNNNNNNNNNNF)"r<  r  r   r  r  r  r  r  r=  r  r  r  r  r  r2  r  r/  r  r  r  r  r  r   r  r   r  r#  r  r?  r  re   r  rP  zUnion[Tuple, TFSeq2SeqLMOutput])NNNNNNN)r  z	tf.Tensorrl   )r@   rA   rB   r#   r  r  r}  r  r   r   r  r   r   r  r?   r  r  r  r2   rC   rD   s   @r*   r  r  )  sk   853 *+>?+<?[ .28<;?@D37;?9=PT7;?C04$(,0/3&*#(#J
*J
 6J
 9	J

 !>J
 1J
 9J
 7J
 NJ
 5J
  =J
 .J
 "J
 *J
 -J
  $!J
" !#J
$ 
)%J
 \ @ J
X
, #
8)Fr+   r  zeThe bare T5 Model transformer outputting encoder's raw hidden-stateswithout any specific head on top.c                       e Zd Z fdZd Ze ee       ee	e
      	 	 	 	 	 	 	 	 d	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd                     ZddZ xZS )	TFT5EncoderModelc                r   t        |   |g|i | t        j                  j	                  |j
                  |j                  dt        | j                  j                              | _
        d| j                  _        t        j                  |      }d|_        t        || j                  d      | _        y )NrV  r  Fr[  r{   )r"   r#   r   rW   rq  rr  rU   r   r^   rT   rV  rt  ru  rv  r   r   r[  )r&   r^   rx  r(   ry  r)   s        r*   r#   zTFT5EncoderModel.__init__<  s    3&3F3ll,,NN#24;;3Q3Q#R	 - 
 *2&v.#( $^T[[yQr+   c                    | j                   S rl   r|  rW  s    r*   r}  zTFT5EncoderModel.get_encoderK  r~  r+   r  c	                    | j                  ||dd||dd||||      }	|s|	S t        |	j                  |	j                  |	j                        S )a  
        Returns:

        Examples:

        ```python
        >>> from transformers import AutoTokenizer, TFT5EncoderModel

        >>> tokenizer = AutoTokenizer.from_pretrained("google-t5/t5-small")
        >>> model = TFT5EncoderModel.from_pretrained("google-t5/t5-small")

        >>> input_ids = tokenizer(
        ...     "Studies have been shown that owning a dog is good for you", return_tensors="tf"
        ... ).input_ids  # Batch size 1
        >>> outputs = model(input_ids)
        ```NFr  r5  )r[  r
   r1  r=   r3  )
r&   r<  r   r=  r/  r   r#  r?  re   r  s
             r*   r?   zTFT5EncoderModel.callN  sn    > ,,)"&#'' /!5# ' 
 "" -??)77&11
 	
r+   c                   | j                   ry d| _         t        j                  | j                  j                  dz   | j                  j
                  z   dz         5  | j                  j                  d        d d d        t        | dd       Nt        j                  | j                  j
                        5  | j                  j                  d        d d d        y y # 1 sw Y   exY w# 1 sw Y   y xY w)NTr  r[  )	rg   r8   ri   rV  rt  rO   r2   rh   r[  rj   s     r*   r2   zTFT5EncoderModel.build  s    ::
 ]]4;;99C?$++BRBRRUXXY 	$KKd#	$4D)5t||001 )""4() ) 6	$ 	$) )s   C5C'C$'C0)NNNNNNNF)r<  r  r   r  r=  r  r/  r  r   r  r#  r  r?  r  re   r  rP  zUnion[Tuple, TFBaseModelOutput]rl   )r@   rA   rB   r#   r}  r   r   T5_ENCODER_INPUTS_DOCSTRINGr   r
   r  r?   r2   rC   rD   s   @r*   r  r  7  s    
R *+FG+<?[ .28<377;,0/3&*#(2
*2
 62
 1	2

 52
 *2
 -2
 $2
 !2
 
)2
 \ H 2
h)r+   r  )Bri  
__future__r   ru  r   r9   r  typingr   r   r   numpynp
tensorflowr8   %tensorflow.compiler.tf2xla.python.xlar   activations_tfr	   modeling_tf_outputsr
   r   r   r   modeling_tf_utilsr   r   r   r   r   r   r   tf_utilsr   r   r   utilsr   r   r   r   configuration_t5r   
get_loggerr@   loggerr  rW   Layerr   rF   ro   rx   r   r   r  r
  r   rS  T5_START_DOCSTRINGr  r  r  rm  r  r  r!   r+   r*   <module>r     s     "     ) )   ? /    S R  ' 
		H	%+ELL&& +(#>** #>L*>U\\// *>Z0%,,$$ 0:RELL&& Rj.,U\\// .,b2,ell00 2,jm-"" m-h g&ELL&& g& g&b2!+ 2!j- ^R h& P  aQ)# Q)	Q)h LN`aJF#68T JF bJFZ kU)* U)	U)r+   