a
    Sic                     @   sd   d Z ddlm  mZ ddlmZ ddlmZ ddl	m
Z
 ddlmZ edG dd	 d	ejZdS )
z Contains the AlphaDropout layer.    N)backend)
base_layer)tf_utils)keras_exportzkeras.layers.AlphaDropoutc                       sN   e Zd ZdZd fdd	Zdd ZdddZ fd	d
Zej	dd Z
  ZS )AlphaDropouta  Applies Alpha Dropout to the input.

    Alpha Dropout is a `Dropout` that keeps mean and variance of inputs
    to their original values, in order to ensure the self-normalizing property
    even after this dropout.
    Alpha Dropout fits well to Scaled Exponential Linear Units
    by randomly setting activations to the negative saturation value.

    Args:
      rate: float, drop probability (as with `Dropout`).
        The multiplicative noise will have
        standard deviation `sqrt(rate / (1 - rate))`.
      seed: Integer, optional random seed to enable deterministic behavior.

    Call arguments:
      inputs: Input tensor (of any rank).
      training: Python boolean indicating whether the layer should behave in
        training mode (adding dropout) or in inference mode (doing nothing).

    Input shape:
      Arbitrary. Use the keyword argument `input_shape`
      (tuple of integers, does not include the samples axis)
      when using this layer as the first layer in a model.

    Output shape:
      Same shape as input.
    Nc                    s2   t  jf d|i| || _|| _|| _d| _d S )NseedT)super__init__ratenoise_shaper   supports_masking)selfr
   r   r   kwargs	__class__ e/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/layers/regularization/alpha_dropout.pyr	   :   s
    zAlphaDropout.__init__c                 C   s   | j r| j S t|S N)r   tfshape)r   inputsr   r   r   _get_noise_shapeA   s    zAlphaDropout._get_noise_shapec                    sN   dj   k rdk rJn n0| |j f fdd	}tj|||dS |S )Ng        g      ?c           	         s~   d}d}| | }t j |}t || j}d| d||d    d }| | | }| | |d|   }|| | S )Ng,x?g2֫?      g      )r   greater_equal_random_generatorrandom_uniformcastdtype)	r   r
   alphascaleZalpha_pZkept_idxabxr   r   r   r   dropped_inputsH   s    
z)AlphaDropout.call.<locals>.dropped_inputs)training)r
   r   r   in_train_phase)r   r   r&   r%   r   r$   r   callD   s    
zAlphaDropout.callc                    s4   | j | jd}t  }tt| t|  S )N)r
   r   )r
   r   r   
get_configdictlistitems)r   configbase_configr   r   r   r)   a   s    
zAlphaDropout.get_configc                 C   s   |S r   r   )r   input_shaper   r   r   compute_output_shapef   s    z!AlphaDropout.compute_output_shape)NN)N)__name__
__module____qualname____doc__r	   r   r(   r)   r   shape_type_conversionr0   __classcell__r   r   r   r   r      s   
r   )r4   tensorflow.compat.v2compatv2r   kerasr   keras.enginer   keras.utilsr    tensorflow.python.util.tf_exportr   BaseRandomLayerr   r   r   r   r   <module>   s   