U
    Jc3                     @   sd   d dl mZ d dlmZ eedddZee dddZd	d
ddZdd Zdd Zdd Z	d	S )    )List)ShardMetadata)shard1shard2c                 C   sd   t | j}t|D ]L}| j| |j| |j|  kr: dS |j| | j| | j|  kr dS qdS )z'
    Checks if two shards overlap.
    FT)lenshard_offsetsrangeshard_sizes)r   r   Zndimsi r   U/tmp/pip-unpacked-wheel-gikjz4vx/torch/distributed/_shard/sharding_spec/_internals.py"_check_shard_metadata_pair_overlap   s    	
r   )shardsc                 C   s^   t t| D ]L}t |d t| D ]4}t| | | | r"td| |  d| |  dq"qdS )a  
    Ensures none of the shards overlap with each other.

    Args:
        shards(List[ShardMetadata]): List of :class:`ShardMetadata` objects representing
            each shard.
    Raises:
        ``ValueError`` if there's overlap in any two shards.
       zShards z and z overlapN)r   r   r   
ValueError)r   r
   jr   r   r   (validate_non_overlapping_shards_metadata   s    r   N)returnc                 C   s   t |}t | d j}||kr2td| d| d}| D ]x}d}t|jD ]\\}}||9 }|j| |j|  || krLtd|j|  d|j|  d||  d| qL||7 }q:d}	|D ]}
|	|
9 }	q||	krtd	| d
|	 ddS )ab  
    Checks if the shards_metadata is compatible with the provided tensor dims.

    Args:
        shards_metadata(List[ShardMetadata]): List of :class:`ShardMetadata`
            objects representing each shard of the tensor.
        tensor_dims(Sequence of int): Dimensions of tensor to verify
    Raises:
        ``ValueError`` if not compatible.
    r   zRank of tensor is z, but shards rank is r   zShard offset z and length z exceeds tensor dim: z for shard zTotal volume of shards: z does not match tensor volume: zI, in other words all the individual shards do not cover the entire tensorN)r   r   r   	enumerater	   )Zshards_metadataZtensor_dimsZtensor_rankZshards_rankZtotal_shard_volumeZshardZshard_volumer
   Zshard_lengthZtensor_volumesizer   r   r   check_tensor(   s*    *

r   c                 C   s   | | d | S )a  
    Computes the split size inline with ``torch.chunk``

    Args:
        dim_size(int): Size of the dimension being chunked.
        chunks(int): Number of chunks to create for ``dim_size``.

    Returns:
        An int indicating the split size to use.
    r   r   )dim_sizechunksr   r   r   get_split_sizeS   s    r   c                 C   s    t t| ||d  ||  dS )a  
    Computes the dim size of the chunk for provided ``idx`` given ``dim_size``
    and ``split_size``.

    Args:
        dim_size(int): Size of the dimension being chunked.
        split_size(int): The chunk size for each chunk of ``dim_size``.
        idx(int): The index of chunk whose dim size is being requested.

    Returns:
        An int indicating the dim size of the chunk.
    r   r   )maxmin)r   
split_sizeidxr   r   r   get_chunked_dim_size`   s    r   c           
      C   sV   t | |}d}|}t|jD ]0\}}t| ||}	|| krD|} qN||	7 }q||	fS )a  
    Generate the start pos and offset length for the current rank for
    chunk sharding.

    Args:
        sharding_dim_size(int): The dimension length which we shard on.
        world_size(int): number of ranks.
        spec (:class:`torch.distributed._shard.sharding_spec.ChunkShardingSpec`):
            sharding spec.
        rank(int): # of cuda process.

    Returns:
        start_pos(int): start position of sharded tensor on the given rank.
        chunk_size(int): chunk size of sharded tensor on the given rank.
    r   )r   r   Z
placementsr   rank)
Zsharding_dim_sizeZ
world_sizespecr   r   Zcurrent_offsetsZ	start_posr   Z	placement
chunk_sizer   r   r   get_chunk_sharding_paramso   s    

r"   )
typingr   Z!torch.distributed._shard.metadatar   r   r   r   r   r   r"   r   r   r   r   <module>   s   +