a
    Sicq                     @   sP   d Z ddlmZ ddlmZ ddlmZ ddlmZ edG dd deZ	d	S )
z'Rectified Linear Unit activation layer.    )backend)Layer)tf_utils)keras_exportzkeras.layers.ReLUc                       sD   e Zd ZdZd fdd	Zdd Z fdd	Zejd
d Z	  Z
S )ReLUa  Rectified Linear Unit activation function.

    With default values, it returns element-wise `max(x, 0)`.

    Otherwise, it follows:

    ```
      f(x) = max_value if x >= max_value
      f(x) = x if threshold <= x < max_value
      f(x) = negative_slope * (x - threshold) otherwise
    ```

    Usage:

    >>> layer = tf.keras.layers.ReLU()
    >>> output = layer([-3.0, -1.0, 0.0, 2.0])
    >>> list(output.numpy())
    [0.0, 0.0, 0.0, 2.0]
    >>> layer = tf.keras.layers.ReLU(max_value=1.0)
    >>> output = layer([-3.0, -1.0, 0.0, 2.0])
    >>> list(output.numpy())
    [0.0, 0.0, 0.0, 1.0]
    >>> layer = tf.keras.layers.ReLU(negative_slope=1.0)
    >>> output = layer([-3.0, -1.0, 0.0, 2.0])
    >>> list(output.numpy())
    [-3.0, -1.0, 0.0, 2.0]
    >>> layer = tf.keras.layers.ReLU(threshold=1.5)
    >>> output = layer([-3.0, -1.0, 1.0, 2.0])
    >>> list(output.numpy())
    [0.0, 0.0, 0.0, 2.0]

    Input shape:
      Arbitrary. Use the keyword argument `input_shape`
      (tuple of integers, does not include the batch axis)
      when using this layer as the first layer in a model.

    Output shape:
      Same shape as the input.

    Args:
      max_value: Float >= 0. Maximum activation value. Default to None, which
        means unlimited.
      negative_slope: Float >= 0. Negative slope coefficient. Default to 0.
      threshold: Float >= 0. Threshold value for thresholded activation. Default
        to 0.
    N        c                    s   t  jf i | |d ur0|dk r0td| |d u s@|dk rNtd| |d u s^|dk rltd| d| _|d urt|}|| _t|| _t|| _d S )Nr   z@max_value of a ReLU layer cannot be a negative value. Received: zEnegative_slope of a ReLU layer cannot be a negative value. Received: z@threshold of a ReLU layer cannot be a negative value. Received: T)	super__init__
ValueErrorsupports_maskingr   cast_to_floatx	max_valuenegative_slope	threshold)selfr   r   r   kwargs	__class__ X/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/layers/activation/relu.pyr	   K   s2    
zReLU.__init__c                 C   s   t j|| j| j| jdS )N)alphar   r   )r   relur   r   r   )r   inputsr   r   r   callf   s    z	ReLU.callc                    s8   | j | j| jd}t  }tt| t|  S )N)r   r   r   )r   r   r   r   
get_configdictlistitems)r   configbase_configr   r   r   r   p   s    
zReLU.get_configc                 C   s   |S )Nr   )r   input_shaper   r   r   compute_output_shapey   s    zReLU.compute_output_shape)Nr   r   )__name__
__module____qualname____doc__r	   r   r   r   shape_type_conversionr!   __classcell__r   r   r   r   r      s   0 
	r   N)
r%   kerasr   keras.engine.base_layerr   keras.utilsr    tensorflow.python.util.tf_exportr   r   r   r   r   r   <module>   s   