U
    Jc	G                     @   s  d dl Z d dlmZ d dlmZmZmZmZmZm	Z	m
Z
mZmZ d dlmZ d dlmZ ddddd	d
dgZedddZejeeeedddZejeee	eej  edddZejeedddZejeeedddZedddfejeeeee	eej   ee	eej   edddZejejhe_ejhe_e j eeed ddd	Z!ejeejddd
Z"eG d d dZ#ejeejd!d"d#Z$d*ejeee	ej e	ej% eee
ejef d%d&d'Z&G d(d) d)Z'dS )+    N)	dataclass)	AnyCallableDict	GeneratorOptionalSetTupleTypecast)
_BatchNormalways_wrap_policylambda_auto_wrap_policytransformer_auto_wrap_policysize_based_auto_wrap_policyenable_wrapwrapParamExecOrderWrapPolicyreturnc                  O   s   dS )z
    A simple wrapper policy that always returns ``True``,
    i.e. when passed as the `auto_wrap_policy` into FSDP,
    this will result in all submodules being wrapped as
    distinct FSDP instances.
    T )argskwargsr   r   ?/tmp/pip-unpacked-wheel-gikjz4vx/torch/distributed/fsdp/wrap.pyr   #   s    )modulerecurseunwrapped_params	lambda_fnr   c                 C   s   |rdS || S dS )a  
    A convenient auto wrap policy to wrap submodules based on an arbitrary user
    function. If `lambda_fn(submodule) == True``, the submodule will be wrapped as
    a `wrapper_cls` unit.

    Return if a module should be wrapped during auto wrapping.

    The first three parameters are required by :func:`_recursive_wrap`.

    Args:
       module (nn.Module):
           The module to be considered in this decision.
       recurse (bool):
           Indicate if this is called to make a decision on whether we
           should recurse down a subgraph of the module structure.
           If False, it means this function is called to make a decision
           on whether we should wrap the said module.
       unwrapped_params (int):
           The number of parameters yet to be wrapped in this module.

       lambda_fn (Callable[nn.Module] -> bool):
           If this returns ``True``, this module will be wrapped by
           wrapper_cls individually.
    TNr   )r   r   r   r   r   r   r   r   ,   s    )r   r   r   transformer_layer_clsr   c                 C   s   |rdS t | t|S dS )ae  
    A convenient auto wrap policy for transformer models. If the submodule
    is an instance of transformer_layer_cls, the submodule will be wrapped
    as a FSDP unit. Otherwise, all the other remainder submodules are wrapped
    by the outermost FSDP unit. Right now, FSDP requires submodules that share
    weights to be wrapped in the same FSDP unit, this auto wrap policy can
    conviniently wrap the shared embeddings into the same FSDP unit for transformer
    models. In the near future, FSDP will support submodules that share weights
    to be wrapped in the separated FSDP units.

    Return if a module should be wrapped during FSDP auto wrapping.

    The first three parameters are required by :func:`_recursive_wrap`.


    Args:
       module (nn.Module):
           The module to be considered in this decision.
       recurse (bool):
           Indicate if this is called to make a decision on whether we
           should recurse down a subgraph of the module structure.
           If False, it means this function is called to make a decision
           on whether we should wrap the said module.
       unwrapped_params (int):
           The number of parameters yet to be wrapped in this module.

       transformer_layer_cls (int):
           Submodules with one of the `transformer_layer_cls` names
           will be wrapped as separated FSDP units
    TN)
isinstancetuple)r   r   r   r   r   r   r   r   Q   s    $)r   r   r   c                 O   s   |rdS t | tS dS )zM
    A policy that wraps ``BatchNorm`` instances in their own FSDP unit.
    TN)r   r   )r   r   r   r   r   r   r   _wrap_batchnorm_individually|   s    	r!   )r   r   r   r   c                    s   t  fdd|D S )zv
    A policy that wraps ``module`` if any policy in the passed in iterable of
    ``policies`` returns ``True``.
    c                 3   s   | ]}| V  qd S Nr   ).0policyr   r   r   r   r   	<genexpr>   s    z_or_policy.<locals>.<genexpr>)any)r   r   r   Zpoliciesr   r%   r   
_or_policy   s    
r(   g    חA)r   r   r   min_num_paramsforce_leaf_modulesexclude_wrap_modulesr   c                 C   s\   |dkrt jn|}|dkr t jn|}||k}|rD|oBt| t| S |oVt| t| S dS )a  A size based auto_wrap_policy function for FSDP API.

       Return if a module should be wrapped during FSDP auto wrapping.

       The first three parameters are used by :func:`_recursive_wrap`. If
       you write a custom version of this policy function, your version
       needs to at least accept the first three parameters and free
       to do whatever you want in the function.

    Args:
       module (nn.Module):
           The module to be considered in this decision.
       recurse (bool):
           Indicate if this is called to make a decision on whether we
           should recurse down a subgraph of the module structure.
           If False, it means this function is called to make a decision
           on whether we should wrap the said module.
       unwrapped_params (int):
           The number of parameters yet to be wrapped in this module.

       min_num_params (int):
           Customizable policy input. It controls the size threshold
           on how big should a module be to be considered wrapped.
       force_leaf_modules (Set[Type[nn.Module]]): set of module types to
           keep as leaves, i.e., their children will never be wrapped.
       exclude_wrap_modules (Set[Type[nn.Module]]):
           Customizable set of module types to be excluded in wrapping.
    N)r   FORCE_LEAF_MODULESEXCLUDE_WRAP_MODULESr   r    )r   r   r   r)   r*   r+   Zis_larger   r   r   r      s    ')NNN)wrapper_clswrapper_kwargsr   c              	   k   s,   d| i|}t f | dV  W 5 Q R X dS )a  
    Context manager to wrap modules using a wrapper.

    Useful for when you'd like to apply the same configuration arguments to all
    child modules that you wrap. A particularly important use case is wrapping
    large layers so that they get sharded (in-place) during initialization, to
    avoid running out of system memory. Large layers can indicate that they
    should be sharded via the ``wrap`` annotation and this context manager can
    provide the exact configuration for these nested instances.

    Usage::

        with enable_wrap(wrapper_cls, **params):
            # Wraps layer in FSDP by default if within context
            self.l1 = wrap(torch.nn.Linear(5, 5))

    Args:
        wrapper_cls:
            Class that `wrap` annotation will `wrap` modules with, such as
            `FullyShardedDataParallel`.
        **wrapper_kwargs:
            Configuration settings that will be passed to all ``wrap``
            instances inside the context
    r.   N)_ConfigAutoWrap)r.   r/   r   r   r   r   r      s
    )r   wrap_overridesr   c                 K   s2   t jr.t jdk	stt j|}t| t jf|S | S )a  
    Annotate that a module should be wrapped. Annotated modules will only be
    wrapped if inside of an :func:`enable_wrap` context manager. This allows
    a module to be initialized both with and without a wrapper without code
    change.

    The class that this function wraps the passed in ``nn.Module`` with is the
    passed in ``wrapper_cls`` argument into ``enable_wrap``. Both
    ``enable_wrap`` and ``wrap`` can take in kwargs specifying how to construct
    the ``wrapper_cls`` instance. In the case of duplicate kwargs in
    ``enable_wrap`` and ``wrap``, the argument passed into ``wrap`` will be
    respected.

    Usage::

        with enable_wrap(wrapper_cls=FSDP, **fsdp_config):
            # Wraps layer in FSDP by default if within context
            self.l1 = wrap(torch.nn.Linear(5, 5))

    Args:
        module (nn.Module): module to wrap (if in :func:`enable_wrap` context)
        **wrap_overrides: configuration overrides that will take priority over
            the values provided by the :func:`enable_wrap` context
    N)r0   in_autowrap_contextr.   AssertionErrorr   _wrap)r   r1   r   r   r   r      s    
c                   @   s*   e Zd ZU dZeZeed< dZe	ed< dS )r   av
  
    This is the class used for the wrapping policy that wraps parameters and performs
    the communication scheduling based on the parameter execution order in the forward pass
    (also called non-recursive wrapping policy).

    The policy contains multiple wraps. Each wrap contains original parameters that will be executed together,
    and the wrap transfers these parameters into one ``FlattenParameter``. In both forward and the backward passes,
    the sharded parameters in each wrap will be gathered just before these parameters are used in the passes.
    These parameters will then be reshaded once they have been used.

    TODO (linjianma): For now, the parameters contained in each wrap of ``ParamExecOrderWrapPolicy``
    are the parameters in each wrap of the ``init_policy`` (a recursive wrapping policy).
    Later we will wrap parameters based on bucket size.

    Args:
        init_policy (Callable):
            The initial recursive wrapping policy used to guide the wrapping of
            this policy. If tracing_config is none, in the first forward and
            backward iteration, ``init_policy`` is used to record parameter
            execution order. Otherwise, init_policy is only used in FSDP
            constructor for module level wrapping.

            The default ``always_wrap_policy`` might not be the best choice for every model. For example, for
            transformer based models, setting ``transformer_auto_wrap_policy`` as the ``init_policy`` will guarantee
            wrapping each transformer layer into one FSDP unit, and can be easily combined with checkpointing
            within each transformer layer.

        tracing_config (Optional[TracingConfig]):
            The configuration used to perform symbolic tracing at FSDP
            constructor to get the module and parameter execution order. The
            type of ``tracing_config`` needs to be either ``None`` or
            ``TracingConfig``. If set as ``None``, then symbolic tracing is not
            enabled, and one forward as well as backward iteration are needed to
            get the parameter execution order.

    ..warning :: Note that not all modules can be successfully traced when
    ``tracing_config`` is not None and symbolic tracing is enabled. The two
    cases below may be unable to trace: 1. when there is a data-dependent
    branch, 2. when the forward pass contains operators that don't support
    ``torch.fx.Proxy`` as the input type (e.g. ``arange``, ``zeros``, ``ones``,
    ``full``, ``full_like``, ``eye``, ``empty``, ``tensor``). For those cases,
    users can set ``tracing_config = None`` to disable symbolic tracing.
    init_policyNtracing_config)
__name__
__module____qualname____doc__r   r5   r   __annotations__r6   r   r   r   r   r   r   #  s   
+)r   r.   r   c                 K   s8   |d k	st t| dr,|| j}|| f|S || f|S )N_wrap_overrides)r3   hasattrr<   )r   r.   r   Z	overridesr   r   r   r4   T  s
    

r4   F)r   auto_wrap_policyr.   ignored_modulesignored_paramsonly_wrap_childrenr   r   c              	      s2  |dk	st d|dk	s t d|  D ]@\}}||kr:q(zt|tt|rPt W q( tk
rf   Y q(X q(t fdd|  D }	|dk	st || d|	dr*d}
|  D ]D\}}||krqt	f |||| d	|\}}t
| || |
|7 }
q|	|
 }|s"|| d
|dr"t| |f||	fS | |
fS | dfS )a  
    Automatically wrap child modules of *module* that meet the given
    criteria with :func:`auto_wrap`. Does not rely on _ConfigAutoWrap.
    Args:
        module (nn.Module):
            module to recursively wrap
        auto_wrap_policy (Callable):
            A callable specifying a policy to recursively wrap layers with FSDP.
        ignored_modules (Set[torch.nn.Module]): Modules to ignore when
            wrapping.
        ignored_params (Set[torch.nn.Parameter]): Parameters to ignore when
            wrapping; these should be the parameters contained in the modules
            in ``ignored_modules``.
    Returns:
        (nn.Module, int):
            Wrapped module and the number parameters wrapped recursively.
    NzMust specify auto_wrap_policy.zMust specify wrapper_clsc                 3   s   | ]}| kr|  V  qd S r"   )Znumel)r#   pr@   r   r   r&     s     z"_recursive_wrap.<locals>.<genexpr>Tr%   r   )r   r>   r.   r?   r@   F)r3   Znamed_modulesr   r   type	TypeErrorsum
parametersZnamed_children_recursive_wrapsetattrr4   )r   r>   r.   r?   r@   rA   r   _childZ
num_paramsZtotal_wrapped_paramsnameZwrapped_childZnum_wrapped_params	remainderr   rC   r   rH   a  sL    

  rH   c                   @   s   e Zd ZU dZdZeed< dZee	 ed< i Z
eeef ed< eeef ddd	Zeedd
ddZeddddZddddZeeeddddZdS )r0   z
    Helper class to wrap modules based on default config args via a context manager.
    See :func:`enable_wrap` for more information.
    Fr2   Nr.   r   r   c                 K   s
   || _ d S r"   rN   )selfr   r   r   r   __init__  s    z_ConfigAutoWrap.__init__)r   r   c                 C   sH   t jrtddt _d|  ks(tdtt| d t _| d= | t _d S )Nz]You are already within an autowrap context and we currently do not supported nested autowrap.Tr.   z9Expected to pass in wrapper_cls arg into _ConfigAutoWrap.)	r0   r2   NotImplementedErrorkeysr3   r   r   r.   r   rN   r   r   r   enable_autowrap_context  s    
z'_ConfigAutoWrap.enable_autowrap_contextr   c                   C   s   dt _d t _i t _d S )NF)r0   r2   r.   r   r   r   r   r   disable_autowrap_context  s    z(_ConfigAutoWrap.disable_autowrap_contextc                 C   s   |  | j d S r"   )rS   r   )rO   r   r   r   	__enter__  s    z_ConfigAutoWrap.__enter__)exc_typeexc_valexc_tbr   c                 C   s   |    d S r"   )rT   )rO   rV   rW   rX   r   r   r   __exit__  s    z_ConfigAutoWrap.__exit__)r7   r8   r9   r:   r2   boolr;   r.   r   r   r   r   strr   rP   staticmethodrS   rT   rU   rY   r   r   r   r   r0     s   
r0   )F)(
contextlibZdataclassesr   typingr   r   r   r   r   r   r	   r
   r   Ztorch.nnnnZtorch.nn.modules.batchnormr   __all__rZ   r   Moduleintr   r   r!   r(   r   Z
ModuleListZ
ModuleDictr-   ZMultiheadAttentionr,   contextmanagerr   r   r   r4   	ParameterrH   r0   r   r   r   r   <module>   s   ,
&,:
 #%0 J