
    sg                         d dl mZmZ d dlmZmZmZmZmZ d dl	Z	d dl
mZ d dlmZmZ ddlmZ eddej                   dee   fd	       Z e       dej                   d
ej                   fd       Zy)    )contextmanagernullcontext)AnyContextManagerDictOptionalTupleN)'_checkpoint_without_reentrant_generator_DEFAULT_DETERMINISM_MODE   )contractmoduleuser_ctxc              #   B  K   |r|n	t               5  t        j                  |       j                  }dt        j                  |       _        	 d |t        j                  |       _        	 ddd       y# |t        j                  |       _        w xY w# 1 sw Y   yxY ww)zs
    Disable hooks installed by checkpoint to avoid unintentional recursion
    during backward recomputation.
    FN)r   
checkpointstateenable_hook)r   r   orig_enable_hooks      f/var/www/html/venv/lib/python3.12/site-packages/torch/distributed/_composable/checkpoint_activation.py_no_hookr      s      ;= D%++F3??/4
 ,	D3CJV$0D D 4DJV$0D Ds4   B:BA4B+	B4BBBBreturnc                    t         j                  j                  d       |j                  dd      }|rt	        d      |j                  dd      |j                  dd      |j                  d	t
              |j                  d
d      |r$t        ddj                  d |D              z         dt        j                  dt        t        df   dt        t        t        f   ddffd}dt        j                  dt        t        df   dt        dt        fd}dt        j                  |       _        | j#                  |d       | j%                  |dd       | S )a  
    This is a composable activation checkpointing API. Unlike functional
    activation checkpointing APIs, this one does not require changing model
    source code. Unlike ``nn.Module`` wrapper activation checkpointing APIs,
    this one does not modify model structure or fully-qualified names either.
    Under the hood, it registers activation checkpointing logic as pre- and
    post-forward hooks. Hence, this API can be easily applied to any model or
    sub-modules in the model.

    Args:
        module (nn.Module): the target model or sub-module to apply activation
            checkpointing.

    Example::
        >>> # xdoctest: +SKIP
        >>> import torch.nn as nn
        >>>
        >>> class MyModel(nn.Module):
        >>>     def __init__(self) -> None:
        >>>         super().__init__()
        >>>         self.l1 = nn.Linear(10, 10)
        >>>         self.l2 = nn.Linear(10, 10)
        >>>
        >>>     def forward(self, x):
        >>>         return self.l2(self.l1(x))
        >>>
        >>> model = MyModel()
        >>> checkpoint(model.l1)  # apply activation checkpointing only to l1
        >>> model(torch.zeros(2, 10)).sum().backward()

    ztorch.distributed.checkpointuse_reentrantFzsuse_reentrant=True is not supported in composable checkpoint. Please use torch.utils.checkpoint.checkpoint instead.preserve_rng_stateT
context_fnNdeterminism_checkdebugzUnexpected keyword arguments: ,c              3       K   | ]  }|  y wN ).0args     r   	<genexpr>zcheckpoint.<locals>.<genexpr>P   s     7N7Ns   r   args.kwargsr   c                      t         j                         j                  rZ fd}t         |g|i |t         j                         _        t        t         j                         j                         y y )Nc                  b            \  } }| t        |      fS t               t              fS r    )r   r   )ctx1ctx2r   user_context_fnss     r   context_fnsz9checkpoint.<locals>.forward_pre_hook.<locals>.context_fnsX   s9    #/!1!3JD$&$!777&=(6*:::    )r   r   r   r
   _ac_generatornext)r   r%   r&   r,   r   r   r   r+   s   `   r   forward_pre_hookz$checkpoint.<locals>.forward_pre_hookS   s     F#//; F"!    !!&)778) 0r-   inputsoutputc                     t         j                  |       j                  r4	 t        t         j                  |       j                         t        d      d t         j                  |       _        y # t        $ r Y &w xY w)NzWExpected non-reentrant activation checkpoint generator to be exhausted, but it was not!)r   r   r   r/   r.   RuntimeErrorStopIteration)r   r1   r2   s      r   forward_hookz checkpoint.<locals>.forward_hookl   so    F#//Z%%f-;;< #m  26
 . ! s   (A/ /	A;:A;)with_kwargs)prependalways_call)torch_C_log_api_usage_oncepopNotImplementedErrorr   
ValueErrorjoinnnModuler	   r   r   strr   r   r   register_forward_pre_hookregister_forward_hook)	r   r&   r   r0   r6   r   r   r   r+   s	        @@@@r   r   r       s]   B 
HH  !?@JJ6M!D
 	
  $8$?zz,5

#68QRJJw&E,sxx7Nv7N/NN
 	
9		9!&sCx9:>sCx.9	9 926RYY 6c3h 6 6QT 6 ,0JV(
$$%54$H
  t NMr-   r    )
contextlibr   r   typingr   r   r   r   r	   r:   torch.nnrA   torch.utils.checkpointr
   r   r   rB   r   r   r!   r-   r   <module>rJ      s    3 = =  
  DRYY D(>*B D D 
]ryy ]ryy ] ]r-   