
    sg?                        d dl mZ d dlZd dlmZmZmZ d dlZg dZ edd      Z	 edd	      Z
	 d	 	 	 	 	 	 	 dd
Z	 d	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZ	 	 	 	 	 	 ddZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZy)    )annotationsN)OptionalTupleTypeVar)fuse_conv_bn_evalfuse_conv_bn_weightsfuse_linear_bn_evalfuse_linear_bn_weightsConvTztorch.nn.modules.conv._ConvNd)boundLinearTztorch.nn.Linearc           
     l   | j                   s|j                   rJ d       t        j                  |       }|j                  |j                  J t        |j                  |j                  |j                  |j                  |j                  |j                  |j                  |      \  |_        |_        |S )a+  Fuse a convolutional module and a BatchNorm module into a single, new convolutional module.

    Args:
        conv (torch.nn.modules.conv._ConvNd): A convolutional module.
        bn (torch.nn.modules.batchnorm._BatchNorm): A BatchNorm module.
        transpose (bool, optional): If True, transpose the convolutional weight. Defaults to False.

    Returns:
        torch.nn.modules.conv._ConvNd: The fused convolutional module.

    .. note::
        Both ``conv`` and ``bn`` must be in eval mode, and ``bn`` must have its running buffers computed.
    Fusion only for eval!)	trainingcopydeepcopyrunning_meanrunning_varr   weightbiaseps)convbn	transpose
fused_convs       H/var/www/html/venv/lib/python3.12/site-packages/torch/nn/utils/fusion.pyr   r      s    $ F/FF-t$J??&2>>+EEE)=



		
	*&Jz     c                   | j                   }||j                   n|}	|t        j                  |      }|t        j                  |      }|t        j                  |      }t        j                  ||z         }
|r"ddgdgt        | j                        dz
  z  z   }n!ddgdgt        | j                        dz
  z  z   }| ||
z  j                  |      z  j                  |      }||z
  |
z  |z  |z   j                  |	      }t        j                  j                  || j                        t        j                  j                  ||j                        fS )a  Fuse convolutional module parameters and BatchNorm module parameters into new convolutional module parameters.

    Args:
        conv_w (torch.Tensor): Convolutional weight.
        conv_b (Optional[torch.Tensor]): Convolutional bias.
        bn_rm (torch.Tensor): BatchNorm running mean.
        bn_rv (torch.Tensor): BatchNorm running variance.
        bn_eps (float): BatchNorm epsilon.
        bn_w (Optional[torch.Tensor]): BatchNorm weight.
        bn_b (Optional[torch.Tensor]): BatchNorm bias.
        transpose (bool, optional): If True, transpose the conv weight. Defaults to False.

    Returns:
        Tuple[torch.nn.Parameter, torch.nn.Parameter]: Fused convolutional weight and bias.
          dtype)r#   torch
zeros_like	ones_likersqrtlenshapereshapetonn	Parameterrequires_grad)conv_wconv_bbn_rmbn_rvbn_epsbn_wbn_br   conv_weight_dtypeconv_bias_dtypebn_var_rsqrtr)   fused_conv_wfused_conv_bs                 r   r   r   8   sS   2 &,&8fll>OO~!!%(|u%|&;;uv~.LB1#V\\!2Q!677Q1#V\\!2Q!677d\1::5AAEE F L e^|3d:TAEE F L
 	<)=)=><)=)=> r   c           	        | j                   s|j                   rJ d       t        j                  |       }	 | j                  |j                  k(  s|j                  dk(  sJ d       |j
                  |j                  J t        |j                  |j                  |j
                  |j                  |j                  |j                  |j                        \  |_        |_	        |S )a  Fuse a linear module and a BatchNorm module into a single, new linear module.

    Args:
        linear (torch.nn.Linear): A Linear module.
        bn (torch.nn.modules.batchnorm._BatchNorm): A BatchNorm module.

    Returns:
        torch.nn.Linear: The fused linear module.

    .. note::
        Both ``linear`` and ``bn`` must be in eval mode, and ``bn`` must have its running buffers computed.
    r   r   zGTo fuse, linear.out_features == bn.num_features or bn.num_features == 1)r   r   r   out_featuresnum_featuresr   r   r
   r   r   r   )linearr   fused_linears      r   r	   r	   m   s      2;;H1HH/==(L	 	r."//Q2FQPQF ??&2>>+EEE-C



		
.*L* r   c                   | j                   }||j                   n|}|t        j                  |      }|t        j                  ||z         z  }	| |	j	                  d      j                  |      z  }
||z
  |	z  |z   j                  |      }t        j                  j                  |
| j                        t        j                  j                  ||j                        fS )a2  Fuse linear module parameters and BatchNorm module parameters into new linear module parameters.

    Args:
        linear_w (torch.Tensor): Linear weight.
        linear_b (Optional[torch.Tensor]): Linear bias.
        bn_rm (torch.Tensor): BatchNorm running mean.
        bn_rv (torch.Tensor): BatchNorm running variance.
        bn_eps (float): BatchNorm epsilon.
        bn_w (torch.Tensor): BatchNorm weight.
        bn_b (torch.Tensor): BatchNorm bias.

    Returns:
        Tuple[torch.nn.Parameter, torch.nn.Parameter]: Fused linear weight and bias.
    r    r"   )	r#   r$   r%   r'   	unsqueezer+   r,   r-   r.   )linear_wlinear_br1   r2   r3   r4   r5   linear_weight_dtypelinear_bias_dtypebn_scalefused_wfused_bs               r   r
   r
      s    . #..*2*>DW##E*ekk%&.11H++B/229L2MMG5 H,t377>O7PG88gx'='=>@R@R''A  r   )F)r   r   r   %torch.nn.modules.batchnorm._BatchNormr   boolreturnr   )r/   torch.Tensorr0   Optional[torch.Tensor]r1   rL   r2   rL   r3   floatr4   rM   r5   rM   r   rJ   rK   -Tuple[torch.nn.Parameter, torch.nn.Parameter])r>   r   r   rI   rK   r   )rB   rL   rC   rM   r1   rL   r2   rL   r3   rN   r4   rL   r5   rL   rK   rO   )
__future__r   r   typingr   r   r   r$   __all__r   r   r   r   r	   r
    r   r   <module>rT      s8   "  + +  	>?
)#4
5 !
!-! ! 	!X 22"2 2 	2
 2 !2 !2 2 32j,,-, ,^""$" " 	"
 " " " 3"r   