a
    Sic
                     @   sP   d Z ddlmZ ddlmZ ddlmZ ddlmZ edG dd deZ	d	S )
z:Leaky version of a Rectified Linear Unit activation layer.    )backend)Layer)tf_utils)keras_exportzkeras.layers.LeakyReLUc                       sD   e Zd ZdZd fdd	Zdd Z fddZejd	d
 Z	  Z
S )	LeakyReLUa4  Leaky version of a Rectified Linear Unit.

    It allows a small gradient when the unit is not active:

    ```
      f(x) = alpha * x if x < 0
      f(x) = x if x >= 0
    ```

    Usage:

    >>> layer = tf.keras.layers.LeakyReLU()
    >>> output = layer([-3.0, -1.0, 0.0, 2.0])
    >>> list(output.numpy())
    [-0.9, -0.3, 0.0, 2.0]
    >>> layer = tf.keras.layers.LeakyReLU(alpha=0.1)
    >>> output = layer([-3.0, -1.0, 0.0, 2.0])
    >>> list(output.numpy())
    [-0.3, -0.1, 0.0, 2.0]

    Input shape:
      Arbitrary. Use the keyword argument `input_shape`
      (tuple of integers, does not include the batch axis)
      when using this layer as the first layer in a model.

    Output shape:
      Same shape as the input.

    Args:
      alpha: Float >= 0. Negative slope coefficient. Default to 0.3.

    333333?c                    s>   t  jf i | |d u r(td| d| _t|| _d S )NzSThe alpha value of a Leaky ReLU layer cannot be None, Expecting a float. Received: T)super__init__
ValueErrorsupports_maskingr   cast_to_floatxalpha)selfr   kwargs	__class__ ^/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/layers/activation/leaky_relu.pyr	   =   s    zLeakyReLU.__init__c                 C   s   t j|| jdS )N)r   )r   relur   )r   inputsr   r   r   callG   s    zLeakyReLU.callc                    s4   dt | ji}t  }tt| t|  S )Nr   )floatr   r   
get_configdictlistitems)r   configbase_configr   r   r   r   J   s    
zLeakyReLU.get_configc                 C   s   |S )Nr   )r   input_shaper   r   r   compute_output_shapeO   s    zLeakyReLU.compute_output_shape)r   )__name__
__module____qualname____doc__r	   r   r   r   shape_type_conversionr   __classcell__r   r   r   r   r      s   !
r   N)
r#   kerasr   keras.engine.base_layerr   keras.utilsr    tensorflow.python.util.tf_exportr   r   r   r   r   r   <module>   s   