U
    <c                     @   s   d dl mZmZ d dlZd dlmZmZ d dlmZ d dlm	Z
 d dlmZ ddd	gZddd
d
dee ee
 ee ee ee eddd	Zdee ee
 ee edddZeejdZdS )    )ListOptionalN)_add_docstr_nested)Tensor)_dtype)_deviceto_padded_tensoras_nested_tensornested_tensorF)dtypedevicerequires_grad
pin_memory)tensor_listr   r   r   r   returnc                C   sV   t | trtdd | D r$tddd | D }t||d||}|rR|| |S )u  
    Constructs a nested tensor with no autograd history (also known as a “leaf tensor”, see
    :ref:`Autograd mechanics <autograd-mechanics>`) from :attr:`tensor_list` a list of tensors.

    Args:
        tensor_list (List[Tensor]): a list of tensors with the same ndim

    Keyword arguments:
        dtype (:class:`torch.dtype`, optional): the desired type of returned nested tensor.
            Default: if None, same :class:`torch.dtype` as leftmost tensor in the list.
        device (:class:`torch.device`, optional): the desired device of returned nested tensor.
            Default: if None, same :class:`torch.device` as leftmost tensor in the list
        requires_grad (bool, optional): If autograd should record operations on the
            returned nested tensor. Default: ``False``.
        pin_memory (bool, optional): If set, returned nested tensor would be allocated in
            the pinned memory. Works only for CPU tensors. Default: ``False``.

    Example::

        >>> a = torch.arange(3, dtype=torch.float, requires_grad=True)
        >>> b = torch.arange(5, dtype=torch.float, requires_grad=True)
        >>> nt = torch.nested.nested_tensor([a, b], requires_grad=True)
        >>> nt.is_leaf
        True
    c                 S   s   g | ]}t | qS  torchZ	is_tensor.0tr   r   9/tmp/pip-unpacked-wheel-gikjz4vx/torch/nested/__init__.py
<listcomp>,   s     z!nested_tensor.<locals>.<listcomp>Anested_tensor(): Expected first argument to be a list of tensors c                 S   s   g | ]}|  qS r   )detachr   r   r   r   r   .   s     N)
isinstancelistany	TypeErrorr   _nested_tensor_from_tensor_listZrequires_grad_)r   r   r   r   r   Znew_datantr   r   r   r      s    
)r   r   r   r   c                 C   s6   t | trtdd | D r$tdt| |d|dS )a  
    Constructs a nested tensor preserving autograd history from :attr:`tensor_list` a list of tensors.

    .. note::
        Tensors within the list are always copied by this function due to current nested tensor semantics.

    Args:
        tensor_list (List[Tensor]): a list of tensors with the same ndim

    Keyword arguments:
        dtype (:class:`torch.dtype`, optional): the desired type of returned nested tensor.
            Default: if None, same :class:`torch.dtype` as leftmost tensor in the list.
        device (:class:`torch.device`, optional): the desired device of returned nested tensor.
            Default: if None, same :class:`torch.device` as leftmost tensor in the list

    Example::

        >>> a = torch.arange(3, dtype=torch.float, requires_grad=True)
        >>> b = torch.arange(5, dtype=torch.float, requires_grad=True)
        >>> nt = torch.nested.as_nested_tensor([a, b])
        >>> nt.is_leaf
        False
        >>> fake_grad = torch.nested_tensor([torch.ones_like(a), torch.zeros_like(b)])
        >>> nt.backward(fake_grad)
        >>> a.grad
        tensor([1., 1., 1.])
        >>> b.grad
        tensor([0., 0., 0., 0., 0.])
    c                 S   s   g | ]}t | qS r   r   r   r   r   r   r   R   s     z$as_nested_tensor.<locals>.<listcomp>r   N)r   r   r   r   r   r    )r   r   r   r   r   r   r
   4   s    a	  
to_padded_tensor(input, padding, output_size=None, out=None) -> Tensor

Returns a new (non-nested) Tensor by padding the :attr:`input` nested tensor.
The leading entries will be filled with the nested data,
while the trailing entries will be padded.

.. warning::

    :func:`to_padded_tensor` always copies the underlying data,
    since the nested and the non-nested tensors differ in memory layout.

Args:
    padding (float): The padding value for the trailing entries.

Keyword args:
    output_size (Tuple[int]): The size of the output tensor.
                              If given, it must be large enough to contain all nested data;
                              else, will infer by taking the max size of each nested sub-tensor along each dimension.
    out (Tensor, optional): the output tensor.

Example::

    >>> nt = torch.nested.nested_tensor([torch.randn((2, 5)), torch.randn((3, 4))])
    nested_tensor([
      tensor([[ 1.6862, -1.1282,  1.1031,  0.0464, -1.3276],
              [-1.9967, -1.0054,  1.8972,  0.9174, -1.4995]]),
      tensor([[-1.8546, -0.7194, -0.2918, -0.1846],
              [ 0.2773,  0.8793, -0.5183, -0.6447],
              [ 1.8009,  1.8468, -0.9832, -1.5272]])
    ])
    >>> pt_infer = torch.nested.to_padded_tensor(nt, 0.0)
    tensor([[[ 1.6862, -1.1282,  1.1031,  0.0464, -1.3276],
             [-1.9967, -1.0054,  1.8972,  0.9174, -1.4995],
             [ 0.0000,  0.0000,  0.0000,  0.0000,  0.0000]],
            [[-1.8546, -0.7194, -0.2918, -0.1846,  0.0000],
             [ 0.2773,  0.8793, -0.5183, -0.6447,  0.0000],
             [ 1.8009,  1.8468, -0.9832, -1.5272,  0.0000]]])
    >>> pt_large = torch.nested.to_padded_tensor(nt, 1.0, (2, 4, 6))
    tensor([[[ 1.6862, -1.1282,  1.1031,  0.0464, -1.3276,  1.0000],
             [-1.9967, -1.0054,  1.8972,  0.9174, -1.4995,  1.0000],
             [ 1.0000,  1.0000,  1.0000,  1.0000,  1.0000,  1.0000],
             [ 1.0000,  1.0000,  1.0000,  1.0000,  1.0000,  1.0000]],
            [[-1.8546, -0.7194, -0.2918, -0.1846,  1.0000,  1.0000],
             [ 0.2773,  0.8793, -0.5183, -0.6447,  1.0000,  1.0000],
             [ 1.8009,  1.8468, -0.9832, -1.5272,  1.0000,  1.0000],
             [ 1.0000,  1.0000,  1.0000,  1.0000,  1.0000,  1.0000]]])
    >>> pt_small = torch.nested.to_padded_tensor(nt, 2.0, (2, 2, 2))
    RuntimeError: Value in output_size is less than NestedTensor padded size. Truncation is not supported.

)NN)typingr   r   r   Ztorch._Cr   r   r   Ztorch.typesr   ZDTyper   ZDevice__all__boolr   r
   Znested_to_padded_tensorr	   r   r   r   r   <module>   s*      #"%