
    sg                        U d Z ddlZddlZddlZddlZddlmZ ddlmZm	Z	m
Z
mZmZ ddlmZ ddlmZmZ ddlmZ dd	lmZ dd
lmZ ddlmZmZmZmZmZmZ ddlm Z  ddl!m"Z" ddl#m$Z$m%Z%m&Z&m'Z'm(Z(  e       rddl)m*Z* ndZ* ejV                  e,      Z-er e       Z.ee/ee
e/   e
e/   f   f   e0d<   nz eg d e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdnd e       rdndffddd e       rdndffd  e       rd!nddffd"d#d$ e       rd%nd e       rd&ndffd'd( e       rd)ndffd*d+d,d-d e       rdndffd.d/ e       rd0ndffd1d e       rd2ndffd3d4 e       rd5ndffd6d e       rdndffd7d8 e       rd9nd e       rd:ndffd;d< e       rd=nd e       rd>ndffd?d e       rdndffd@d4 e       rd5ndffdAdB e       rdCndffdDdB e       rdCndffdEdF e       rdGnd e       rdHndffdIdJ e       rdKndffdLd e       rdMndffdNdO e       rdPndffdQ e       rdRnd e       rdSndffdTdUdVdWd4 e       rd5ndffdXd/ e       rd0ndffdYdZ e       rd[ndffd\ e       rd]nd e       rd^ndffd_d` e       rdandffdbdc e       rddndffdedf e       rdgndffdhd e       rdndffdi e       rdjnddffdkdld e       rdmndffdnd e       rdondffdp e       rdqnddffdrdsdt e       rdundffdvdwdx e       rdyndffdz e       rd{nd e       rd|ndffd} e       rd{nd e       rd|ndffd~d e       rdndffdd e       rdmndffd e       rdnddffdd/ e       rd0ndffdd/ e       rd0ndffdd/ e       rd0ndffdd e       rdondffddd/ e       rd0ndffddd e       rdndffddB e       rdCndffdd e       rdndffddd4 e       rd5ndffdd e       rd>ndffdd= e       rd>ndffdd= e       rd>ndffdd/ e       rd0ndffdd/ e       rd0ndffd e       rd=nd e       rd>ndffd e       rd=nd e       rd>ndffdd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffd e       rd=nd e       rd>ndffdd= e       rd>ndffdd= e       rd>ndffdd= e       rd>ndffdd= e       rd>ndffdd e       rdndffd e       rdnd e       rdndffddd e       rdndffd e       rdnddffdd e       rdondffdd e       rdondffd e       rdnddffd e       rdnd e       rdndffd e       rdnd e       rdndffdd4 e       rd5ndffdd e       rdndffdȑd e       rd=nd e       rd>ndffd e       rd=nd e       rd>ndffdd= e       rd>ndffd e       rdnddffdd e       rdndffdd e       rdmndffdd e       rdndffdd e       rdondffdd4 e       rd5ndffd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdߑdd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffdd e       rdondffdd e       rdondffdd e       rdondffddB e       rdCndffddB e       rdCndffdd e       rdndffdd/ e       rd0ndffddB e       rdCndffddB e       rdCndffdd= e       rd>ndffd e       rd(nd e       rd)ndffd e       rd(nd e       rd)ndffdd e       rd=nd e       rd>ndffddJ e       rdKndffdd= e       rd>ndffdd= e       rd>ndffddd e       rdndffdd e       rdmndffd e       rdnddffddd e       rdndffd d e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffddd e       rd	ndffd
 e       rd{nd e       rd|ndffd e       rdnd e       rdndffd e       rdnd e       rdndffdd e       rdndffdd4 e       rd5ndffdd4 e       rd5ndffddd e       rdndffdd e       rdondffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rd nddffd! e       rd"nddffd#d$ e       rd%nddffd&d'd( e       rd)ndffd*d e       rdondffd+d/ e       rd0ndffd, e       rdnd e       rdndffd- e       rdnd e       rdndffd.d/d0d1d e       rdndffd2 e       rd3nd e       rd4ndffd5 e       rdnd e       rdndffd6d= e       rd>ndffd7d e       rdndffd8d= e       rd>ndffd9d e       rdndffd:d;d<d=d>d?d@ e       rdAndffdBdB e       rdCndffdC e       rdDnd e       rdEndffdFdG e       rdHnddffdI e       rdnd e       rdndffdJ e       rdnd e       rdndffdK e       rdLnd e       rdMndffdN e       rdnd e       rdndffdO e       rdnd e       rdndffdP e       rd=nd e       rd>ndff      Z. e"e$e.      Z1 e$jd                         D  ci c]  \  } }|| 
 c}} Z3dQe/fdRZ4	 	 	 	 	 	 	 	 d_dSee/ejj                  f   dTe
ee/ejj                  f      dUe6dVe
e6   dWe
e	e/e/f      dXe
ee6e/f      dYe
e/   dZe6d[e/fd\Z7 G d] d^      Z8yc c}} w (`  zAuto Tokenizer class.    N)OrderedDict)TYPE_CHECKINGDictOptionalTupleUnion   )PretrainedConfig)get_class_from_dynamic_moduleresolve_trust_remote_code)load_gguf_checkpoint)PreTrainedTokenizer)TOKENIZER_CONFIG_FILE)cached_fileextract_commit_hashis_g2p_en_availableis_sentencepiece_availableis_tokenizers_availablelogging   )EncoderDecoderConfig   )_LazyAutoMapping)CONFIG_MAPPING_NAMES
AutoConfigconfig_class_to_model_typemodel_type_to_module_name!replace_list_option_in_docstrings)PreTrainedTokenizerFastTOKENIZER_MAPPING_NAMESalbertAlbertTokenizerAlbertTokenizerFastalignBertTokenizerBertTokenizerFastbark)bart)BartTokenizerBartTokenizerFastbarthezBarthezTokenizerBarthezTokenizerFast)bartpho)BartphoTokenizerNbertzbert-generationBertGenerationTokenizer)zbert-japanese)BertJapaneseTokenizerN)bertweet)BertweetTokenizerNbig_birdBigBirdTokenizerBigBirdTokenizerFastbigbird_pegasusPegasusTokenizerPegasusTokenizerFast)biogpt)BioGptTokenizerN)
blenderbot)BlenderbotTokenizerBlenderbotTokenizerFast)zblenderbot-small)BlenderbotSmallTokenizerNblipzblip-2GPT2TokenizerGPT2TokenizerFastbloomBloomTokenizerFastbridgetowerRobertaTokenizerRobertaTokenizerFastbros)byt5)ByT5TokenizerN	camembertCamembertTokenizerCamembertTokenizerFast)canine)CanineTokenizerN	chameleonLlamaTokenizerLlamaTokenizerFastchinese_clipclapclipCLIPTokenizerCLIPTokenizerFastclipseg)clvp)ClvpTokenizerN
code_llamaCodeLlamaTokenizerCodeLlamaTokenizerFastcodegenCodeGenTokenizerCodeGenTokenizerFastcohereCohereTokenizerFastconvbertConvBertTokenizerConvBertTokenizerFastcpmCpmTokenizerCpmTokenizerFast)cpmant)CpmAntTokenizerN)ctrl)CTRLTokenizerN)zdata2vec-audioWav2Vec2CTCTokenizerNzdata2vec-textdbrxdebertaDebertaTokenizerDebertaTokenizerFastz
deberta-v2DebertaV2TokenizerDebertaV2TokenizerFast
distilbertDistilBertTokenizerDistilBertTokenizerFastdprDPRQuestionEncoderTokenizerDPRQuestionEncoderTokenizerFastelectraElectraTokenizerElectraTokenizerFasternieernie_mErnieMTokenizer)esm)EsmTokenizerNfalconr   falcon_mambaGPTNeoXTokenizerFastfastspeech2_conformerFastSpeech2ConformerTokenizer)flaubert)FlaubertTokenizerNfnetFNetTokenizerFNetTokenizerFast)fsmt)FSMTTokenizerNfunnelFunnelTokenizerFunnelTokenizerFastgemmaGemmaTokenizerGemmaTokenizerFastgemma2gitglmzgpt-sw3GPTSw3Tokenizergpt2gpt_bigcodegpt_neogpt_neox)gpt_neox_japanese)GPTNeoXJapaneseTokenizerNgptj)zgptsan-japanese)GPTSanJapaneseTokenizerNzgrounding-dinogroupvitherbertHerbertTokenizerHerbertTokenizerFast)hubertrn   ibertideficsidefics2idefics3instructblipinstructblipvideojambajetmoe)jukebox)JukeboxTokenizerNzkosmos-2XLMRobertaTokenizerXLMRobertaTokenizerFastlayoutlmLayoutLMTokenizerLayoutLMTokenizerFast
layoutlmv2LayoutLMv2TokenizerLayoutLMv2TokenizerFast
layoutlmv3LayoutLMv3TokenizerLayoutLMv3TokenizerFast	layoutxlmLayoutXLMTokenizerLayoutXLMTokenizerFastledLEDTokenizerLEDTokenizerFastliltllamallava
llava_nextllava_next_videollava_onevision
longformerLongformerTokenizerLongformerTokenizerFastlongt5T5TokenizerT5TokenizerFast)luke)LukeTokenizerNlxmertLxmertTokenizerLxmertTokenizerFastm2m_100M2M100Tokenizermambamamba2marianMarianTokenizermbartMBartTokenizerMBartTokenizerFastmbart50MBart50TokenizerMBart50TokenizerFastmegazmegatron-bert)zmgp-str)MgpstrTokenizerNmistralmixtralmllamamlukeMLukeTokenizer
mobilebertMobileBertTokenizerMobileBertTokenizerFastmoshimpnetMPNetTokenizerMPNetTokenizerFastmptmramt5MT5TokenizerMT5TokenizerFastmusicgenmusicgen_melodymvpMvpTokenizerMvpTokenizerFast)myt5)MyT5TokenizerNnezhanllbNllbTokenizerNllbTokenizerFastznllb-moenystromformerolmoolmo2olmoezomdet-turbo	oneformerz
openai-gptOpenAIGPTTokenizerOpenAIGPTTokenizerFastoptowlv2owlvit	paligemmapegasus	pegasus_x)	perceiver)PerceiverTokenizerN	persimmonphiphi3phimoe)phobert)PhobertTokenizerN
pix2structpixtralplbartPLBartTokenizer)
prophetnet)ProphetNetTokenizerNqdqbertqwen2Qwen2TokenizerQwen2TokenizerFastqwen2_audio	qwen2_moeqwen2_vl)rag)RagTokenizerNrealmRealmTokenizerRealmTokenizerFastrecurrent_gemmareformerReformerTokenizerReformerTokenizerFastrembertRemBertTokenizerRemBertTokenizerFast	retribertRetriBertTokenizerRetriBertTokenizerFastrobertazroberta-prelayernorm)roc_bert)RoCBertTokenizerNroformerRoFormerTokenizerRoFormerTokenizerFastrwkvseamless_m4tSeamlessM4TTokenizerSeamlessM4TTokenizerFastseamless_m4t_v2siglipSiglipTokenizerspeech_to_textSpeech2TextTokenizer)speech_to_text_2)Speech2Text2TokenizerNspeecht5SpeechT5Tokenizer)splinter)SplinterTokenizerSplinterTokenizerFastsqueezebertSqueezeBertTokenizerSqueezeBertTokenizerFaststablelm
starcoder2switch_transformerst5)tapas)TapasTokenizerN)tapex)TapexTokenizerN)z
transfo-xl)TransfoXLTokenizerNtvpudopUdopTokenizerUdopTokenizerFastumt5video_llavaviltvipllavavisual_bert)vits)VitsTokenizerN)wav2vec2rn   )zwav2vec2-bertrn   )zwav2vec2-conformerrn   )wav2vec2_phoneme)Wav2Vec2PhonemeCTCTokenizerNwhisperWhisperTokenizerWhisperTokenizerFastxclipxglmXGLMTokenizerXGLMTokenizerFast)xlm)XLMTokenizerNzxlm-prophetnetXLMProphetNetTokenizerzxlm-robertazxlm-roberta-xlxlnetXLNetTokenizerXLNetTokenizerFastxmodyosozamba
class_namec                    | dk(  rt         S t        j                         D ]<  \  }}| |v st        |      }t	        j
                  d| d      }	 t        ||       c S  t        j                  j                         D ]"  \  }}|D ]  }t        |dd       | k(  s|c c S  $ t	        j
                  d      }t        ||       rt        ||       S y # t        $ r Y w xY w)Nr   .ztransformers.models__name__transformers)r   r    itemsr   	importlibimport_modulegetattrAttributeErrorTOKENIZER_MAPPING_extra_contenthasattr)rn  module_name
tokenizersmoduleconfig	tokenizermain_modules          ]/var/www/html/venv/lib/python3.12/site-packages/transformers/models/auto/tokenization_auto.pytokenizer_class_from_namer  B  s    ..&&#:#@#@#B Z#3K@K,,q->@UVFvz22 0>>DDF !
# 	!Iy*d3zA  	!! )).9K{J'{J// " s   C	CCpretrained_model_name_or_path	cache_dirforce_downloadresume_downloadproxiestokenrevisionlocal_files_only	subfolderc	                    |	j                  dd      }
|
)t        j                  dt               |t	        d      |
}|	j                  dd      }t        | t        ||||||||ddd|      }|t        j                  d       i S t        ||      }t        |d	
      5 }t        j                  |      }ddd       |d<   |S # 1 sw Y   xY w)a	  
    Loads the tokenizer configuration from a pretrained model tokenizer configuration.

    Args:
        pretrained_model_name_or_path (`str` or `os.PathLike`):
            This can be either:

            - a string, the *model id* of a pretrained model configuration hosted inside a model repo on
              huggingface.co.
            - a path to a *directory* containing a configuration file saved using the
              [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.

        cache_dir (`str` or `os.PathLike`, *optional*):
            Path to a directory in which a downloaded pretrained model configuration should be cached if the standard
            cache should not be used.
        force_download (`bool`, *optional*, defaults to `False`):
            Whether or not to force to (re-)download the configuration files and override the cached versions if they
            exist.
        resume_download:
            Deprecated and ignored. All downloads are now resumed by default when possible.
            Will be removed in v5 of Transformers.
        proxies (`Dict[str, str]`, *optional*):
            A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
            'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
        token (`str` or *bool*, *optional*):
            The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
            when running `huggingface-cli login` (stored in `~/.huggingface`).
        revision (`str`, *optional*, defaults to `"main"`):
            The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
            git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
            identifier allowed by git.
        local_files_only (`bool`, *optional*, defaults to `False`):
            If `True`, will only try to load the tokenizer configuration from local files.
        subfolder (`str`, *optional*, defaults to `""`):
            In case the tokenizer config is located inside a subfolder of the model repo on huggingface.co, you can
            specify the folder name here.

    <Tip>

    Passing `token=True` is required when you want to use a private model.

    </Tip>

    Returns:
        `Dict`: The configuration of the tokenizer.

    Examples:

    ```python
    # Download configuration from huggingface.co and cache.
    tokenizer_config = get_tokenizer_config("google-bert/bert-base-uncased")
    # This model does not have a tokenizer config so the result will be an empty dict.
    tokenizer_config = get_tokenizer_config("FacebookAI/xlm-roberta-base")

    # Save a pretrained tokenizer locally and you can reload its config
    from transformers import AutoTokenizer

    tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-cased")
    tokenizer.save_pretrained("tokenizer-test")
    tokenizer_config = get_tokenizer_config("tokenizer-test")
    ```use_auth_tokenNrThe `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.V`token` and `use_auth_token` are both specified. Please set only the argument `token`._commit_hashF)r  r  r  r  r  r  r  r   _raise_exceptions_for_gated_repo%_raise_exceptions_for_missing_entries'_raise_exceptions_for_connection_errorsr  z\Could not locate the tokenizer configuration file, will try to use the model config instead.zutf-8)encoding)popwarningswarnFutureWarning
ValueErrorgetr   r   loggerinfor   openjsonload)r  r  r  r  r  r  r  r  r  kwargsr  commit_hashresolved_config_filereaderresults                  r  get_tokenizer_configr  ^  s    R ZZ 0$7N! A	
 uvv**^T2K&%%'))..305   #rs	%&:KHK	"W	5 #6"#(F>M# #s   CCc                   D    e Zd ZdZd Ze ee      d               ZddZ	y)AutoTokenizera  
    This is a generic tokenizer class that will be instantiated as one of the tokenizer classes of the library when
    created with the [`AutoTokenizer.from_pretrained`] class method.

    This class cannot be instantiated directly using `__init__()` (throws an error).
    c                     t        d      )Nz}AutoTokenizer is designed to be instantiated using the `AutoTokenizer.from_pretrained(pretrained_model_name_or_path)` method.)EnvironmentError)selfs    r  __init__zAutoTokenizer.__init__  s    _
 	
    c           
      	   |j                  dd      }|<t        j                  dt               |j	                  dd      t        d      ||d<   |j                  dd      }d|d<   |j                  d	d      }|j                  d
d      }|j                  dd      }|j	                  dd      }	|d}
t        j	                  |d      }|:t        d| ddj                  d t        j                         D               d      |\  }}|r#|t        |      }
nt        j                  d       |
t        |      }
|
t        d| d       |
j                  |g|i |S t        |fi |}d|v r|d   |d<   |j	                  d      }d}d|v r4t        |d   t        t         f      r|d   }n|d   j	                  dd      }|t        |t"              sM|	r3t%        ||	fi |}t'        |d      d   }t)        j*                  d'i |}nt)        j                  |fd|i|}|j,                  }t/        |d      rd|j0                  v r|j0                  d   }|du}t3        |      t4        v xs% |duxr t        |      duxs t        |dz         du}t7        ||||      }|rz|rx|r|d   |d   }n|d   }t9        ||fi |}
|j                  dd      }t:        j<                  j?                  |      r|
jA                           |
j                  |g|d|i|S |[d}
|r!|jC                  d      s| d}t        |      }
|
|}t        |      }
|
t        d d       |
j                  |g|i |S t        |tD              rzt3        |jF                        t3        |jH                        urDt        j                  d |jH                  jJ                   d!|jF                  jJ                   d"       |jH                  }tM        t3        |      jN                        }|Tt4        t3        |         \  }}|r|s| |j                  |g|i |S | |j                  |g|i |S t        d#      t        d$|jJ                   d%dj                  d& t4        j                         D               d      )(a]  
        Instantiate one of the tokenizer classes of the library from a pretrained model vocabulary.

        The tokenizer class to instantiate is selected based on the `model_type` property of the config object (either
        passed as an argument or loaded from `pretrained_model_name_or_path` if possible), or when it's missing, by
        falling back to using pattern matching on `pretrained_model_name_or_path`:

        List options

        Params:
            pretrained_model_name_or_path (`str` or `os.PathLike`):
                Can be either:

                    - A string, the *model id* of a predefined tokenizer hosted inside a model repo on huggingface.co.
                    - A path to a *directory* containing vocabulary files required by the tokenizer, for instance saved
                      using the [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.
                    - A path or url to a single saved vocabulary file if and only if the tokenizer only requires a
                      single vocabulary file (like Bert or XLNet), e.g.: `./my_model_directory/vocab.txt`. (Not
                      applicable to all derived classes)
            inputs (additional positional arguments, *optional*):
                Will be passed along to the Tokenizer `__init__()` method.
            config ([`PretrainedConfig`], *optional*)
                The configuration object used to determine the tokenizer class to instantiate.
            cache_dir (`str` or `os.PathLike`, *optional*):
                Path to a directory in which a downloaded pretrained model configuration should be cached if the
                standard cache should not be used.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether or not to force the (re-)download the model weights and configuration files and override the
                cached versions if they exist.
            resume_download:
                Deprecated and ignored. All downloads are now resumed by default when possible.
                Will be removed in v5 of Transformers.
            proxies (`Dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
            revision (`str`, *optional*, defaults to `"main"`):
                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
                identifier allowed by git.
            subfolder (`str`, *optional*):
                In case the relevant files are located inside a subfolder of the model repo on huggingface.co (e.g. for
                facebook/rag-token-base), specify it here.
            use_fast (`bool`, *optional*, defaults to `True`):
                Use a [fast Rust-based tokenizer](https://huggingface.co/docs/tokenizers/index) if it is supported for
                a given model. If a fast tokenizer is not available for a given model, a normal Python-based tokenizer
                is returned instead.
            tokenizer_type (`str`, *optional*):
                Tokenizer type to be loaded.
            trust_remote_code (`bool`, *optional*, defaults to `False`):
                Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
                should only be set to `True` for repositories you trust and in which you have read the code, as it will
                execute code present on the Hub on your local machine.
            kwargs (additional keyword arguments, *optional*):
                Will be passed to the Tokenizer `__init__()` method. Can be used to set special tokens like
                `bos_token`, `eos_token`, `unk_token`, `sep_token`, `pad_token`, `cls_token`, `mask_token`,
                `additional_special_tokens`. See parameters in the `__init__()` for more details.

        Examples:

        ```python
        >>> from transformers import AutoTokenizer

        >>> # Download vocabulary from huggingface.co and cache.
        >>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")

        >>> # Download vocabulary from huggingface.co (user-uploaded) and cache.
        >>> tokenizer = AutoTokenizer.from_pretrained("dbmdz/bert-base-german-cased")

        >>> # If vocabulary files are in a directory (e.g. tokenizer was saved using *save_pretrained('./test/saved_model/')*)
        >>> # tokenizer = AutoTokenizer.from_pretrained("./test/bert_saved_model/")

        >>> # Download vocabulary from huggingface.co and define model-specific arguments
        >>> tokenizer = AutoTokenizer.from_pretrained("FacebookAI/roberta-base", add_prefix_space=True)
        ```r  Nr  r  r  r~  T
_from_autouse_fasttokenizer_typetrust_remote_code	gguf_filezPassed `tokenizer_type` z3 does not exist. `tokenizer_type` should be one of z, c              3       K   | ]  }|  y wN .0cs     r  	<genexpr>z0AutoTokenizer.from_pretrained.<locals>.<genexpr>D  s      Kq Ks   rp  zt`use_fast` is set to `True` but the tokenizer class does not have a fast version.  Falling back to the slow version.zTokenizer class z is not currently imported.r  tokenizer_classauto_mapr  F)return_tensorsFastr   r   code_revisionz- does not exist or is not currently imported.z The encoder model config class: z3 is different from the decoder model config class: z. It is not recommended to use the `AutoTokenizer.from_pretrained()` method in this case. Please use the encoder and decoder specific tokenizer classes.zzThis tokenizer cannot be instantiated. Please make sure you have `sentencepiece` installed in order to use this tokenizer.z!Unrecognized configuration class z8 to build an AutoTokenizer.
Model type should be one of c              3   4   K   | ]  }|j                     y wr  )rq  r  s     r  r  z0AutoTokenizer.from_pretrained.<locals>.<genexpr>  s     4bAQZZ4bs   r  )(r  r  r  r  r  r  r    joinkeysr  r  warningfrom_pretrainedr  
isinstancetuplelistr
   r   r   r   	for_modelr  rz  r  typerx  r   r   ospathisdirregister_for_auto_classendswithr   decoderencoder	__class__r   rq  )clsr  inputsr  r  r~  r  r  r  r  r  tokenizer_class_tupletokenizer_class_nametokenizer_fast_class_nametokenizer_configconfig_tokenizer_classtokenizer_auto_map	gguf_pathconfig_dicthas_remote_codehas_local_code	class_ref_tokenizer_class_candidate
model_typetokenizer_class_pytokenizer_class_fasts                              r  r  zAutoTokenizer.from_pretrained  s   Z  $4d;%MM E zz'4(4 l  -F7OHd+#|::j$/$4d;"JJ':DAJJ{D1	 %"O$;$?$?PT$U!$, .~.>>qyy K,C,H,H,J KKLAO 
 ?T; ";,8&?@Y&ZONN= &";<P"Q& #34H3IId!eff2?223PdSYd]cdd 00MXQWX--%5n%EF>"!1!5!56G!H!))*:6F%5j%A"%5j%A%E%EoW[%\" ")f&67 +,I9 _X^ _I"6yQV"WX`"aK'11@K@F'775IZ^dF &,%;%;"vz*&///Q%+___%E",D8f):: 
"$. )*@AM Z,-Cf-LMUYY	 	 6<no
 0.q1=.q1	.q1	;IGdohnoO

?D1Aww}}:;7792?22-06J[_e  $/"O 6 ? ? G/E.Fd,K)";<U"V&,B)";<U"V& &'@&AAno  3?223PdSYd]cdd f23FNN#4+??6v~~7O7O6P Q%%+^^%=%=$> ?22 ^^F/V0E0EF
!7Hf7V4 4#5G5O;+;;<Ym\bmflmm%1=-==>[o^dohnoo$: 
 /0@0@/A B++/994bIZI_I_Ia4b+b*ccdf
 	
r  Nc                    ||t        d      |t        |t              rt        d      |t        |t              rt        d      |=|;t        |t              r+|j                  |k7  rt        d|j                   d| d      | t
        j                  v rt
        |    \  }}||}||}t
        j                  | ||f|       y)	a  
        Register a new tokenizer in this mapping.


        Args:
            config_class ([`PretrainedConfig`]):
                The configuration corresponding to the model to register.
            slow_tokenizer_class ([`PretrainedTokenizer`], *optional*):
                The slow tokenizer to register.
            fast_tokenizer_class ([`PretrainedTokenizerFast`], *optional*):
                The fast tokenizer to register.
        NzKYou need to pass either a `slow_tokenizer_class` or a `fast_tokenizer_classz:You passed a fast tokenizer in the `slow_tokenizer_class`.z:You passed a slow tokenizer in the `fast_tokenizer_class`.zThe fast tokenizer class you are passing has a `slow_tokenizer_class` attribute that is not consistent with the slow tokenizer class you passed (fast tokenizer has z and you passed z!. Fix one of those so they match!)exist_ok)r  
issubclassr   r   slow_tokenizer_classrx  ry  register)config_classr  fast_tokenizer_classr  existing_slowexisting_fasts         r  r  zAutoTokenizer.register  s     ',@,Hjkk+
;OQh0iYZZ+
;OQd0eYZZ !,$0/1HI$99=QQ['<<==MNbMc d!!  ,;;;+<\+J(M=#+'4$#+'4$""<2FH\1]hp"qr  )NNF)
rq  
__module____qualname____doc__r  classmethodr   r    r  r  r  r  r  r  r    s8    
 &'>?\
 @ \
|)rr  r  )NFNNNNF )9r  rt  r  r  r  collectionsr   typingr   r   r   r   r   configuration_utilsr
   dynamic_module_utilsr   r   modeling_gguf_pytorch_utilsr   tokenization_utilsr   tokenization_utils_baser   utilsr   r   r   r   r   r   encoder_decoderr   auto_factoryr   configuration_autor   r   r   r   r   tokenization_utils_fastr   
get_loggerrq  r  r    str__annotations__rx  rs  CONFIG_TO_TYPEr  PathLikeboolr  r  )kvs   00r  <module>r     s      	  # > > 3 \ ? 5 <  3 *  B" 
		H	% VaUb[eHSM8C=4P.Q)QRb)|	
)C)E%4-D-F)D|	
 ?V?X(;^bcd|	
 o>U>W':]abc|	
 =|	
 *D*F&D.E.G*T|	
$ 4%|	
& o>U>W':]abc'|	
( >X>Z!:`dfj kl)|	
* ?+|	
, 6-|	
0 *D*F&D.E.G*T/|	
< !3OfOh5Knr st=|	
> 2?|	
@ OA|	
B EC|	
D o>U>W':]abcE|	
F @W@Y)<_cdeG|	
H t5L5N1TXYZI|	
J /KbKd1GjnopK|	
L o>U>W':]abcM|	
N .O|	
R ,F,H(d0G0I,tQ|	
^ 2_|	
b (B(D$$,C,E(4a|	
n oF]F_/Beijko|	
r &.E.G*Tq|	
@ #+B+D'$|	
N #+B+D'$M|	
Z .[|	
^ ,F,H(d0G0I,t]|	
j +G^G`-Cfjklk|	
l 7N7P3VZ[\m|	
n -JaJc/Fimnoo|	
r &@&BN*A*C&q|	
~ 2|	
@ .A|	
B ?C|	
D 1MdMf3IlpqrE|	
F o>U>W':]abcG|	
H +G^G`-CfjklI|	
L ,F,H(d0G0I,tK|	
X 1PgPi3LostuY|	
\ 19P9R5X\[|	
h +G^G`-Cfjkli|	
j ?V?X(;^bcdk|	
l .H.J*PTVZ[\m|	
n ,o|	
p ;R;T7Z^_`q|	
r d>U>W$:]abcs|	
v (4G4I0tUYZu|	
| 6}|	
~ o>U>W':]abc|	
@ .A|	
B )D[D]+@cghiC|	
F (B(D$$,C,E(4E|	
T (B(D$$,C,E(4S|	
` _=T=V&9\`aba|	
b T8O8Q4W[\]c|	
d .H.J*PTVZ[\e|	
f o>U>W':]abcg|	
h _E\E^.Adhiji|	
j AXAZ*=`defk|	
l $:Q:S 6Y]^_m|	
n Fo|	
p o>U>W':]abcq|	
r Cs|	
t H_Ha1Dgklmu|	
v /BYB[+>aefgw|	
x +G^G`-Cfjkly|	
z 7{|	
| )E\E^+Adhij}|	
~ 7N7P3VZ[\|	
@ *D[D],@cghiA|	
B *D[D],@cghiC|	
D oF]F_/BeijkE|	
F !?KbKd4Gjn"opG|	
J (B(D$$,C,E(4I|	
X (B(D$$,C,E(4W|	
d 4e|	
h -G-I)t1H1J-PTg|	
t -JaJc/Fimnou|	
v 1PgPi3Lostuw|	
x 1PgPi3Lostuy|	
z /MdMf1Ilpqr{|	
| ^;R;T%7Z^_`}|	
~ +JaJc-Fimno|	
B (B(D$$,C,E(4A|	
N 'AXAZ)=`defO|	
P ,F]F_.BeijkQ|	
R  "2LcLe4Hko!pqS|	
T !1KbKd3Gjn opU|	
V 1PgPi3LostuW|	
Z %?%AMt)@)B%Y|	
f .g|	
h )D[D]+@cghii|	
j .H.J*PTVZ[\k|	
l t7N7P3VZ[\m|	
n 8O8Q4W[\]o|	
p -G-I)tUYZ[q|	
t (B(D$$,C,E(4s|	
B *D*F&D.E.G*TA|	
N (D[D]*@cghiO|	
P G^G`0CfjklQ|	
R 3S|	
V (B(D$$,C,E(4U|	
d (B(D$$,C,E(4c|	
p (BYB[*>aefgq|	
r +E+G'TSWXYs|	
t 1PgPi3Lostuu|	
v t:Q:S6Y]^_w|	
x 'AXAZ)=`defy|	
z T5L5N1TXYZ{|	
| 'CZC\)?bfgh}|	
@ &@&BN*A*C&|	
L ->U>W):]abcM|	
N E\E^0Adh ijO|	
P ^;R;T%7Z^_`Q|	
R .S|	
T ?V?X(;^bcdU|	
X 'A'CO+B+D'$W|	
f 'A'CO+B+D'$e|	
t  )C)E%4-D-F)Ds|	
@	 d6M6O2UYZ[A	|	
B	 t7N7P3VZ[\C	|	
D	 t7N7P3VZ[\E	|	
H	  9P9R"5X\]G	|	
N	 ?CZC\,?bfghO	|	
R	 %CZC\'?bfgQ	|	
X	 _=T=V&9\`abY	|	
Z	 ?V?X(;^bcd[	|	
\	 @W@Y)<_cde]	|	
^	 +E\E^-Adhij_	|	
b	 *D*F&D.E.G*Ta	|	
p	 *D*F&D.E.G*To	|	
|	}	|	
L
 (B(D$$,C,E(4K
|	
X
 'CZC\)?bfghY
|	
Z
 &@W@Y(<_cde[
|	
\
 (BYB[*>aefg]
|	
^
 4_
|	
`
 M@W@Y+<_cdea
|	
b
 <S<U8[_`ac
|	
d
 -G-I)tUYZ[e
|	
f
 :g
|	
h
 AXAZ*=`defi
|	
l
 $,C,E(4k
|	
x
 -G^G`/Cfjkly
|	
|
 $,C,E(4{
|	
H *D[D],@cghiI|	
J ,K|	
L 'AXAZ)=`defM|	
P "(B(D$$,C,E(4O|	
^ +E+G'T/F/H+d]|	
l *D*F&D.E.G*Tk|	
x /MdMf1Ilpqry|	
z +G^G`-Cfjkl{|	
~ '#?V?X%;^bc}|	
D 5E|	
F -JaJc/FimnoG|	
H d6M6O2UYZ[I|	
L .H.J*PT2I2K.QUK|	
Z ".H.J*PT2I2K.QUY|	
f -G-I)tUYZ[g|	
h :T:V 6\`bfghi|	
j Bk|	
l 1K1M-SWY]^_m|	
n Io|	
r 'G^G`)Cfjkq|	
x $:Q:S 6Y]^_y|	
z OD[D]-@cghi{|	
~ &%?%AMt)@)B%}|	
L %?%AMt)@)B%K|	
X 0Y|	
Z 0[|	
\ 9]|	
^ _=T=V&9\`ab_|	
b 'A'CO+B+D'$a|	
p %?%AMt)@)B%o|	
| -G^G`/Cfjkl}|	
~ o>U>W':]abc|	
@ *D[D],@cghiA|	
B _E\E^.AdhijC|	
D .E|	
F 9G|	
H >I|	
J CK|	
L HM|	
N +G^G`-CfjklO|	
P ?V?X(;^bcdQ|	
T 'A'CO+B+D'$S|	
` ,a|	
b <V<X 8^bdhijc|	
f -G-I)t1H1J-PTe|	
t !-G-I)t1H1J-PTs|	
B (B(D$$,C,E(4A|	
P -G-I)t1H1J-PTO|	
^ )C)E%4-D-F)D]|	
l (B(D$$,C,E(4k|	
~@ %%9;RS #=#7#=#=#?@41a!Q$@# < 48 &*(,(,""l#(bkk)9#:lc2;;./0l l d^	l
 d38n%l E$)$%l sml l l^Wr Wr] As   s