a
    OSic'                     @   s   d dl Z d dlZd dlZd dlZd dlmZmZmZmZ g dZ	edef Z
ede
dZG dd dZG d	d
 d
eZG dd deZG dd deZG dd deZdS )    N)AnyCallableTypeVarcast)no_gradenable_gradset_grad_enabledinference_mode.F)boundc                   @   sR   e Zd ZdZeedddZdd Zddd	d
ZeeeddddZ	dd Z
dS )_DecoratorContextManagerz1Allow a context manager to be used as a decorator)funcreturnc                    s6   t  r S t  fdd}tt|S )Nc                     s:       | i |W  d    S 1 s,0    Y  d S N)clone)argskwargsr   self T/var/www/html/django/DPS/env/lib/python3.9/site-packages/torch/autograd/grad_mode.pydecorate_context   s    
z;_DecoratorContextManager.__call__.<locals>.decorate_context)inspectisgeneratorfunction_wrap_generator	functoolswrapsr   r
   )r   r   r   r   r   r   __call__   s
    

z!_DecoratorContextManager.__call__c                    s   t   fdd}|S )z7Wrap each generator invocation with the context managerc                  ?   sD   | i |}z   |d }W d    n1 s:0    Y  z
|V }W n ty      |  W d    n1 s0    Y   Y qD ty      |jt  }W d    n1 s0    Y  Y qD0    ||}W d    qD1 s0    Y  qDW n, ty> } z|j	W  Y d }~S d }~0 0 d S r   )
r   sendGeneratorExitcloseBaseExceptionthrowsysexc_infoStopIterationvalue)r   r   genresponserequester   r   r   generator_context    s"    
(

&
2
0zC_DecoratorContextManager._wrap_generator.<locals>.generator_context)r   r   )r   r   r+   r   r   r   r      s    )z(_DecoratorContextManager._wrap_generatorNr   c                 C   s   t d S r   NotImplementedErrorr   r   r   r   	__enter__L   s    z"_DecoratorContextManager.__enter__exc_type	exc_value	tracebackr   c                 C   s   t d S r   r-   r   r2   r3   r4   r   r   r   __exit__O   s    z!_DecoratorContextManager.__exit__c                 C   s   |   S r   	__class__r/   r   r   r   r   R   s    z_DecoratorContextManager.clone)__name__
__module____qualname____doc__r
   r   r   r0   r   r6   r   r   r   r   r   r      s   
.r   c                       sH   e Zd ZdZdd fddZddddZeeeddd	d
Z  ZS )r   a-  Context-manager that disabled gradient calculation.

    Disabling gradient calculation is useful for inference, when you are sure
    that you will not call :meth:`Tensor.backward()`. It will reduce memory
    consumption for computations that would otherwise have `requires_grad=True`.

    In this mode, the result of every computation will have
    `requires_grad=False`, even when the inputs have `requires_grad=True`.

    This context manager is thread local; it will not affect computation
    in other threads.

    Also functions as a decorator. (Make sure to instantiate with parenthesis.)

    .. note::
        No-grad is one of several mechanisms that can enable or
        disable gradients locally see :ref:`locally-disable-grad-doc` for
        more information on how they compare.

    .. note::
        This API does not apply to :ref:`forward-mode AD <forward-mode-ad>`.
        If you want to disable forward AD for a computation, you can unpack
        your dual tensors.

    Example::

        >>> x = torch.tensor([1.], requires_grad=True)
        >>> with torch.no_grad():
        ...   y = x * 2
        >>> y.requires_grad
        False
        >>> @torch.no_grad()
        ... def doubler(x):
        ...     return x * 2
        >>> z = doubler(x)
        >>> z.requires_grad
        False
    Nr,   c                    s   t j st   d| _d S NF)torch_jit_internalis_scriptingsuper__init__prevr/   r7   r   r   rB   ~   s    

zno_grad.__init__c                 C   s   t  | _t d d S r=   )r>   is_grad_enabledrC   r   r/   r   r   r   r0      s    
zno_grad.__enter__r1   c                 C   s   t | j d S r   )r>   r   rC   r5   r   r   r   r6      s    zno_grad.__exit__)	r9   r:   r;   r<   rB   r0   r   r6   __classcell__r   r   r7   r   r   W   s   &r   c                   @   s2   e Zd ZdZddddZeeeddddZdS )	r   aH  Context-manager that enables gradient calculation.

    Enables gradient calculation, if it has been disabled via :class:`~no_grad`
    or :class:`~set_grad_enabled`.

    This context manager is thread local; it will not affect computation
    in other threads.

    Also functions as a decorator. (Make sure to instantiate with parenthesis.)

    .. note::
        enable_grad is one of several mechanisms that can enable or
        disable gradients locally see :ref:`locally-disable-grad-doc` for
        more information on how they compare.

    .. note::
        This API does not apply to :ref:`forward-mode AD <forward-mode-ad>`.

    Example::

        >>> x = torch.tensor([1.], requires_grad=True)
        >>> with torch.no_grad():
        ...   with torch.enable_grad():
        ...     y = x * 2
        >>> y.requires_grad
        True
        >>> y.backward()
        >>> x.grad
        >>> @torch.enable_grad()
        ... def doubler(x):
        ...     return x * 2
        >>> with torch.no_grad():
        ...     z = doubler(x)
        >>> z.requires_grad
        True

    Nr,   c                 C   s   t  | _t jd d S )NT)r>   rD   rC   _C_set_grad_enabledr/   r   r   r   r0      s    
zenable_grad.__enter__r1   c                 C   s   t j| j d S r   r>   rF   rG   rC   r5   r   r   r   r6      s    zenable_grad.__exit__)r9   r:   r;   r<   r0   r   r6   r   r   r   r   r      s   %r   c                   @   sJ   e Zd ZdZeddddZddddZeeedd	d
dZdd Z	dS )r   a  Context-manager that sets gradient calculation to on or off.

    ``set_grad_enabled`` will enable or disable grads based on its argument :attr:`mode`.
    It can be used as a context-manager or as a function.

    This context manager is thread local; it will not affect computation
    in other threads.

    Args:
        mode (bool): Flag whether to enable grad (``True``), or disable
                     (``False``). This can be used to conditionally enable
                     gradients.

    .. note::
        set_grad_enabled is one of several mechanisms that can enable or
        disable gradients locally see :ref:`locally-disable-grad-doc` for
        more information on how they compare.

    .. note::
        This API does not apply to :ref:`forward-mode AD <forward-mode-ad>`.

    Example::

        >>> x = torch.tensor([1.], requires_grad=True)
        >>> is_train = False
        >>> with torch.set_grad_enabled(is_train):
        ...   y = x * 2
        >>> y.requires_grad
        False
        >>> torch.set_grad_enabled(True)
        >>> y = x * 2
        >>> y.requires_grad
        True
        >>> torch.set_grad_enabled(False)
        >>> y = x * 2
        >>> y.requires_grad
        False

    N)moder   c                 C   s    t  | _t j| || _d S r   )r>   rD   rC   rF   rG   rI   r   rI   r   r   r   rB      s    
zset_grad_enabled.__init__r,   c                 C   s   d S r   r   r/   r   r   r   r0      s    zset_grad_enabled.__enter__r1   c                 C   s   t j| j d S r   rH   r5   r   r   r   r6      s    zset_grad_enabled.__exit__c                 C   s   |  | jS r   r8   rI   r/   r   r   r   r      s    zset_grad_enabled.clone)
r9   r:   r;   r<   boolrB   r0   r   r6   r   r   r   r   r   r      s
   (r   c                       sF   e Zd ZdZd fdd	Zdd Zeeeddd	d
Zdd Z  Z	S )r	   a  Context-manager that enables or disables inference mode

    InferenceMode is a new context manager analogous to :class:`~no_grad`
    to be used when you are certain your operations will have no interactions
    with autograd (e.g., model training). Code run under this mode gets better
    performance by disabling view tracking and version counter bumps. Note that
    unlike some other mechanisms that locally enable or disable grad,
    entering inference_mode also disables to :ref:`forward-mode AD <forward-mode-ad>`.

    This context manager is thread local; it will not affect computation
    in other threads.

    Also functions as a decorator. (Make sure to instantiate with parenthesis.)

    .. note::
        Inference mode is one of several mechanisms that can enable or
        disable gradients locally see :ref:`locally-disable-grad-doc` for
        more information on how they compare.

    Args:
        mode (bool): Flag whether to enable or disable inference mode

    Example::
        >>> import torch
        >>> x = torch.ones(1, 2, 3, requires_grad=True)
        >>> with torch.inference_mode():
        ...   y = x * x
        >>> y.requires_grad
        False
        >>> y._version
        Traceback (most recent call last):
        File "<stdin>", line 1, in <module>
        RuntimeError: Inference tensors do not track version counter.
        >>> @torch.inference_mode()
        ... def func(x):
        ...   return x * x
        >>> out = func(x)
        >>> out.requires_grad
        False

    Tc                    s$   t j st   d | _|| _d S r   )r>   r?   r@   rA   rB   _inference_mode_raii_guardrI   rJ   r7   r   r   rB     s    

zinference_mode.__init__c                 C   s   t j| j| _d S r   )r>   rF   Z_InferenceModerI   rM   r/   r   r   r   r0   #  s    zinference_mode.__enter__Nr1   c                 C   s   | ` d S r   )rM   r5   r   r   r   r6   &  s    zinference_mode.__exit__c                 C   s   |  | jS r   rK   r/   r   r   r   r   )  s    zinference_mode.clone)T)
r9   r:   r;   r<   rB   r0   r   r6   r   rE   r   r   r7   r   r	      s
   )r	   )r#   r>   r   r   typingr   r   r   r   __all__ZFuncTyper
   r   r   r   r   r	   r   r   r   r   <module>   s   F4.8