
    sgT                         U d dl mZ d dlmZmZmZmZ d dlmZ	 d dl
mc mc mZ d dlmc mc mZ d dlmZmZ d dlmZmZmZmZ g Zee   ed<    G d d      Zy)	    )Future)AnyDictListOptionalN)MetadataSTATE_DICT_TYPE)LoadPlannerSavePlannerStorageReaderStorageWriter__all__c                       e Zd ZdZdddddddededeej                     d	e	d
e
dee   dee   fdZdedefdZdedefdZdeeef   ddfdZy)_Checkpointera  This base class specefies a high level API for saving and loading
    distributed `state_dict` 's. It provides an abstraction over the low-level APIs
    provided by :py:mod:`torch.distributed.checkpoint.storage`, essentially calling
    :py:meth: `torch.distributed.state_dict_saver.save` and
    :py:meth: `torch.distributed.state_dict_loader.load` with the provided storage
    readers and writers.

    .. warning::
        This feature is experimental and subject to removal/change.

    Nr   F)process_groupcoordinator_rankno_distload_plannersave_plannerstorage_writerstorage_readerr   r   r   r   r   c                f    || _         || _        || _        || _        || _        || _        || _        y)a{  Initializes the Checkpointer instance.

        Args:
            storage_writer: Instance of StorageWrite use to perform writes.
            storage_reader: StorageReader used to load data from.
            process_group: ProcessGroup to be used for cross-rank synchronization.
            coordinator_rank: Rank to use to coordinate the checkpoint. rank0 is used by default.
            no_dist: If ``True``, distributed checkpoint will not load in SPMD style. (Default: ``False``)
            loader_planner: Instance of LoadPlanner to use when loading.
            save_planner: Instance of SavePlanner to use when saving.
        N)r   r   r   r   r   r   r   )selfr   r   r   r   r   r   r   s           ]/var/www/html/venv/lib/python3.12/site-packages/torch/distributed/checkpoint/_checkpointer.py__init__z_Checkpointer.__init__    s;    , -,* 0((    
state_dictreturnc                     t        j                  || j                  | j                  | j                  | j
                  | j                        S )ziCalls :py:meth: `torch.distributed.state_dict_saver.save`. Utilizing values passed during initialization.)r   r   r   planner)saversaver   r   r   r   r   r   r   s     r   r"   z_Checkpointer.save>   sC    
 zz,,!22LL%%
 	
r   c                 p    t        j                  || j                  | j                  | j                        S )z
        Calls :py:meth: `torch.distributed.state_dict_saver._async_save`. Utilizing values passed during initialization.

        Returns:
            Future: A future holding the resultant Metadata object from `save`.
        )r   r   r    )r!   
async_saver   r   r   r#   s     r   r%   z_Checkpointer.async_saveL   s5     ..,,%%	
 	
r   c                 r    t        j                  || j                  | j                  | j                         y)zjCalls :py:meth: `torch.distributed.state_dict_loader.load`. Utilizing values passed during initialization.)r   r   r    N)loaderloadr   r   r   r#   s     r   r(   z_Checkpointer.load]   s,    ..,,%%		
r   )__name__
__module____qualname____doc__r   r   r   distProcessGroupintboolr
   r   r   r	   r   r"   r   r%   r   strr   r(    r   r   r   r      s    
" 6: !.2.2)%) &)
   1 12) ) ) {+) {+)<
#
 


#
 

"
tCH~ 
$ 
r   r   )concurrent.futuresr   typingr   r   r   r   torch.distributeddistributedr-   .torch.distributed.checkpoint.state_dict_loader
checkpointstate_dict_loaderr'   -torch.distributed.checkpoint.state_dict_saverstate_dict_saverr!   %torch.distributed.checkpoint.metadatar   r	   $torch.distributed.checkpoint.storager
   r   r   r   r   r1   __annotations__r   r2   r   r   <module>r?      sF    % , ,   ? ? = = K  c Q
 Q
r   