
    sg                     l    d dl mZmZ d dlmZ d dlmZ ddlmZ ddgZ	 G d de      Z
 G d	 de      Zy
)    )TupleUnion)Tensor)_size   )ModuleFlatten	Unflattenc                   l     e Zd ZU dZddgZeed<   eed<   d
dededdf fdZdedefdZ	de
fd	Z xZS )r	   a  
    Flattens a contiguous range of dims into a tensor.

    For use with :class:`~nn.Sequential`, see :meth:`torch.flatten` for details.

    Shape:
        - Input: :math:`(*, S_{\text{start}},..., S_{i}, ..., S_{\text{end}}, *)`,'
          where :math:`S_{i}` is the size at dimension :math:`i` and :math:`*` means any
          number of dimensions including none.
        - Output: :math:`(*, \prod_{i=\text{start}}^{\text{end}} S_{i}, *)`.

    Args:
        start_dim: first dim to flatten (default = 1).
        end_dim: last dim to flatten (default = -1).

    Examples::
        >>> input = torch.randn(32, 1, 5, 5)
        >>> # With default parameters
        >>> m = nn.Flatten()
        >>> output = m(input)
        >>> output.size()
        torch.Size([32, 25])
        >>> # With non-default parameters
        >>> m = nn.Flatten(0, 2)
        >>> output = m(input)
        >>> output.size()
        torch.Size([160, 5])
    	start_dimend_dimreturnNc                 >    t         |           || _        || _        y N)super__init__r   r   )selfr   r   	__class__s      K/var/www/html/venv/lib/python3.12/site-packages/torch/nn/modules/flatten.pyr   zFlatten.__init__/   s    "    inputc                 N    |j                  | j                  | j                        S r   )flattenr   r   r   r   s     r   forwardzFlatten.forward4   s    }}T^^T\\::r   c                 :    d| j                    d| j                   S )Nz
start_dim=z
, end_dim=)r   r   r   s    r   
extra_reprzFlatten.extra_repr7   s    DNN+:dll^DDr   )r   )__name__
__module____qualname____doc____constants__int__annotations__r   r   r   strr   __classcell__r   s   @r   r	   r	      sY    : !),MNL# C  
;V ; ;EC Er   c                        e Zd ZU dZeeeef      ZddgZe	eef   e
d<   e	eef   e
d<   de	eef   de	eef   ddf fdZd Zd Zd	edefd
ZdefdZ xZS )r
   a  
    Unflattens a tensor dim expanding it to a desired shape. For use with :class:`~nn.Sequential`.

    * :attr:`dim` specifies the dimension of the input tensor to be unflattened, and it can
      be either `int` or `str` when `Tensor` or `NamedTensor` is used, respectively.

    * :attr:`unflattened_size` is the new shape of the unflattened dimension of the tensor and it can be
      a `tuple` of ints or a `list` of ints or `torch.Size` for `Tensor` input;  a `NamedShape`
      (tuple of `(name, size)` tuples) for `NamedTensor` input.

    Shape:
        - Input: :math:`(*, S_{\text{dim}}, *)`, where :math:`S_{\text{dim}}` is the size at
          dimension :attr:`dim` and :math:`*` means any number of dimensions including none.
        - Output: :math:`(*, U_1, ..., U_n, *)`, where :math:`U` = :attr:`unflattened_size` and
          :math:`\prod_{i=1}^n U_i = S_{\text{dim}}`.

    Args:
        dim (Union[int, str]): Dimension to be unflattened
        unflattened_size (Union[torch.Size, Tuple, List, NamedShape]): New shape of the unflattened dimension

    Examples:
        >>> input = torch.randn(2, 50)
        >>> # With tuple of ints
        >>> m = nn.Sequential(
        >>>     nn.Linear(50, 50),
        >>>     nn.Unflatten(1, (2, 5, 5))
        >>> )
        >>> output = m(input)
        >>> output.size()
        torch.Size([2, 2, 5, 5])
        >>> # With torch.Size
        >>> m = nn.Sequential(
        >>>     nn.Linear(50, 50),
        >>>     nn.Unflatten(1, torch.Size([2, 5, 5]))
        >>> )
        >>> output = m(input)
        >>> output.size()
        torch.Size([2, 2, 5, 5])
        >>> # With namedshape (tuple of tuples)
        >>> input = torch.randn(2, 50, names=('N', 'features'))
        >>> unflatten = nn.Unflatten('features', (('C', 2), ('H', 5), ('W', 5)))
        >>> output = unflatten(input)
        >>> output.size()
        torch.Size([2, 2, 5, 5])
    dimunflattened_sizer   Nc                     t         |           t        |t              r| j	                  |       n-t        |t
              r| j                  |       nt        d      || _        || _	        y )Nz'invalid argument type for dim parameter)
r   r   
isinstancer%   _require_tuple_intr'   _require_tuple_tuple	TypeErrorr+   r,   )r   r+   r,   r   s      r   r   zUnflatten.__init__p   s]     	c3##$45S!%%&67EFF 0r   c                     t        |t              rKt        |      D ]<  \  }}t        |t              rt        ddt	        |      j
                   d| z          y t        ddt	        |      j
                   z         )Nz*unflattened_size must be tuple of tuples, but found element of type  at pos z,unflattened_size must be a tuple of tuples, zbut found type )r.   tuple	enumerater1   typer    r   r   idxelems       r   r0   zUnflatten._require_tuple_tuple   s    eU#&u- 	T!$.#D6tDz7J7J6K8TWSXYZ  :U 4 4567
 	
r   c                    t        |t        t        f      rKt        |      D ]<  \  }}t        |t              rt        ddt        |      j                   d| z          y t        dt        |      j                         )Nz(unflattened_size must be tuple of ints, r3   r4   z9unflattened_size must be a tuple of ints, but found type )r.   r5   listr6   r%   r1   r7   r    r8   s       r   r/   zUnflatten._require_tuple_int   s    eeT]+&u- 	T!$,#B6tDz7J7J6K8TWSXYZ  GUH\H\G]^
 	
r   r   c                 N    |j                  | j                  | j                        S r   )	unflattenr+   r,   r   s     r   r   zUnflatten.forward   s    txx)>)>??r   c                 :    d| j                    d| j                   S )Nzdim=z, unflattened_size=)r+   r,   r   s    r   r   zUnflatten.extra_repr   s!    dhhZ243H3H2IJJr   )r    r!   r"   r#   r   r'   r%   
NamedShaper$   r   r&   r   r   r0   r/   r   r   r   r(   r)   s   @r   r
   r
   ;   s    ,\ uS#X'J./M	sCxE:-..1c?16;E:<M6N1	1

@V @ @KC Kr   N)typingr   r   torchr   torch.typesr   moduler   __all__r	   r
    r   r   <module>rG      s<        k
"+Ef +E\cK cKr   