a
    0Sic/                     @   s^  d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddlm
Z
 ddlmZ dd	lmZ dd
lmZ edG dd dZeddG dd deZeddG dd deZeddG dd deZeddG dd deZeddG d d! d!eZeZeZeZeZeZeZeZeZed"d#d$ Zed%d,d'd(Zed)d*d+ Z d&S )-z@Constraints: functions that impose constraints on weight values.    )tensor_shape)backend)deserialize_keras_objectserialize_keras_object)	array_ops)control_flow_ops)math_ops)keras_export)doc_controlszkeras.constraints.Constraintc                   @   s    e Zd ZdZdd Zdd ZdS )
ConstraintaK  Base class for weight constraints.

  A `Constraint` instance works like a stateless function.
  Users who subclass this
  class should override the `__call__` method, which takes a single
  weight parameter and return a projected version of that parameter
  (e.g. normalized or clipped). Constraints can be used with various Keras
  layers via the `kernel_constraint` or `bias_constraint` arguments.

  Here's a simple example of a non-negative weight constraint:

  >>> class NonNegative(tf.keras.constraints.Constraint):
  ...
  ...  def __call__(self, w):
  ...    return w * tf.cast(tf.math.greater_equal(w, 0.), w.dtype)

  >>> weight = tf.constant((-1.0, 1.0))
  >>> NonNegative()(weight)
  <tf.Tensor: shape=(2,), dtype=float32, numpy=array([0.,  1.], dtype=float32)>

  >>> tf.keras.layers.Dense(4, kernel_constraint=NonNegative())
  c                 C   s   |S )aG  Applies the constraint to the input weight variable.

    By default, the inputs weight variable is not modified.
    Users should override this method to implement their own projection
    function.

    Args:
      w: Input weight variable.

    Returns:
      Projected variable (by default, returns unmodified inputs).
     selfwr   r   _/var/www/html/django/DPS/env/lib/python3.9/site-packages/tensorflow/python/keras/constraints.py__call__7   s    zConstraint.__call__c                 C   s   i S )a  Returns a Python dict of the object config.

    A constraint config is a Python dictionary (JSON-serializable) that can
    be used to reinstantiate the same object.

    Returns:
      Python dict containing the configuration of the constraint object.
    r   r   r   r   r   
get_configF   s    	zConstraint.get_configN)__name__
__module____qualname____doc__r   r   r   r   r   r   r      s   r   zkeras.constraints.MaxNormzkeras.constraints.max_normc                   @   s6   e Zd ZdZdddZejdd Zejdd	 Zd
S )MaxNorma/  MaxNorm weight constraint.

  Constrains the weights incident to each hidden unit
  to have a norm less than or equal to a desired value.

  Also available via the shortcut function `tf.keras.constraints.max_norm`.

  Args:
    max_value: the maximum norm value for the incoming weights.
    axis: integer, axis along which to calculate weight norms.
      For instance, in a `Dense` layer the weight matrix
      has shape `(input_dim, output_dim)`,
      set `axis` to `0` to constrain each weight vector
      of length `(input_dim,)`.
      In a `Conv2D` layer with `data_format="channels_last"`,
      the weight tensor has shape
      `(rows, cols, input_depth, output_depth)`,
      set `axis` to `[0, 1, 2]`
      to constrain the weights of each filter tensor of size
      `(rows, cols, input_depth)`.

     r   c                 C   s   || _ || _d S N	max_valueaxis)r   r   r   r   r   r   __init__k   s    zMaxNorm.__init__c                 C   sB   t tjt|| jdd}t |d| j}||t  |   S )NTr   keepdimsr   )	r   sqrtr	   
reduce_sumsquarer   clipr   epsilonr   r   normsZdesiredr   r   r   r   o   s
    zMaxNorm.__call__c                 C   s   | j | jdS )Nr   r   r   r   r   r   r   v   s    zMaxNorm.get_configN)r   r   	r   r   r   r   r   r   do_not_generate_docsr   r   r   r   r   r   r   R   s   

r   zkeras.constraints.NonNegzkeras.constraints.non_negc                   @   s   e Zd ZdZdd ZdS )NonNegzyConstrains the weights to be non-negative.

  Also available via the shortcut function `tf.keras.constraints.non_neg`.
  c                 C   s   |t t |dt  S )N        )r	   castgreater_equalr   floatxr   r   r   r   r      s    zNonNeg.__call__N)r   r   r   r   r   r   r   r   r   r+   {   s   r+   zkeras.constraints.UnitNormzkeras.constraints.unit_normc                   @   s6   e Zd ZdZd
ddZejdd Zejdd Zd	S )UnitNorma  Constrains the weights incident to each hidden unit to have unit norm.

  Also available via the shortcut function `tf.keras.constraints.unit_norm`.

  Args:
    axis: integer, axis along which to calculate weight norms.
      For instance, in a `Dense` layer the weight matrix
      has shape `(input_dim, output_dim)`,
      set `axis` to `0` to constrain each weight vector
      of length `(input_dim,)`.
      In a `Conv2D` layer with `data_format="channels_last"`,
      the weight tensor has shape
      `(rows, cols, input_depth, output_depth)`,
      set `axis` to `[0, 1, 2]`
      to constrain the weights of each filter tensor of size
      `(rows, cols, input_depth)`.
  r   c                 C   s
   || _ d S r   r   )r   r   r   r   r   r      s    zUnitNorm.__init__c              	   C   s*   |t  t tjt|| jdd  S )NTr    )r   r&   r"   r	   r#   r$   r   r   r   r   r   r      s    
zUnitNorm.__call__c                 C   s
   d| j iS )Nr   r1   r   r   r   r   r      s    zUnitNorm.get_configN)r   r)   r   r   r   r   r0      s   

r0   zkeras.constraints.MinMaxNormzkeras.constraints.min_max_normc                   @   s6   e Zd ZdZdddZejdd Zejd	d
 ZdS )
MinMaxNorma  MinMaxNorm weight constraint.

  Constrains the weights incident to each hidden unit
  to have the norm between a lower bound and an upper bound.

  Also available via the shortcut function `tf.keras.constraints.min_max_norm`.

  Args:
    min_value: the minimum norm for the incoming weights.
    max_value: the maximum norm for the incoming weights.
    rate: rate for enforcing the constraint: weights will be
      rescaled to yield
      `(1 - rate) * norm + rate * norm.clip(min_value, max_value)`.
      Effectively, this means that rate=1.0 stands for strict
      enforcement of the constraint, while rate<1.0 means that
      weights will be rescaled at each step to slowly move
      towards a value inside the desired interval.
    axis: integer, axis along which to calculate weight norms.
      For instance, in a `Dense` layer the weight matrix
      has shape `(input_dim, output_dim)`,
      set `axis` to `0` to constrain each weight vector
      of length `(input_dim,)`.
      In a `Conv2D` layer with `data_format="channels_last"`,
      the weight tensor has shape
      `(rows, cols, input_depth, output_depth)`,
      set `axis` to `[0, 1, 2]`
      to constrain the weights of each filter tensor of size
      `(rows, cols, input_depth)`.
  r,         ?r   c                 C   s   || _ || _|| _|| _d S r   	min_valuer   rater   )r   r5   r   r6   r   r   r   r   r      s    zMinMaxNorm.__init__c                 C   sX   t tjt|| jdd}| jt || j| j	 d| j |  }||t 
 |   S )NTr       )r   r"   r	   r#   r$   r   r6   r%   r5   r   r&   r'   r   r   r   r      s    zMinMaxNorm.__call__c                 C   s   | j | j| j| jdS )Nr4   r4   r   r   r   r   r      s
    zMinMaxNorm.get_configN)r,   r3   r3   r   r)   r   r   r   r   r2      s   

r2   z"keras.constraints.RadialConstraintz#keras.constraints.radial_constraintc                   @   s&   e Zd ZdZejdd Zdd ZdS )RadialConstraintas  Constrains `Conv2D` kernel weights to be the same for each radius.

  Also available via the shortcut function
  `tf.keras.constraints.radial_constraint`.

  For example, the desired output for the following 4-by-4 kernel:

  ```
      kernel = [[v_00, v_01, v_02, v_03],
                [v_10, v_11, v_12, v_13],
                [v_20, v_21, v_22, v_23],
                [v_30, v_31, v_32, v_33]]
  ```

  is this::

  ```
      kernel = [[v_11, v_11, v_11, v_11],
                [v_11, v_33, v_33, v_11],
                [v_11, v_33, v_33, v_11],
                [v_11, v_11, v_11, v_11]]
  ```

  This constraint can be applied to any `Conv2D` layer version, including
  `Conv2DTranspose` and `SeparableConv2D`, and with either `"channels_last"` or
  `"channels_first"` data format. The method assumes the weight tensor is of
  shape `(rows, cols, input_depth, output_depth)`.
  c                 C   s   |j }|jd u s|jdkr&td| |\}}}}t||||| f}t| jtjtj	|dddd}ttjtj	|dddd||||fS )N   z8The weight tensor must be of rank 4, but is of shape: %sr1   r   )
shaperank
ValueErrorr   reshapemap_fn_kernel_constraintstackr   unstack)r   r   Zw_shapeheightwidthchannelskernelsr   r   r   r     s    
zRadialConstraint.__call__c              	      s   t jddgddggddt  d }t |d dt t t|dd fdd fd	d}t t t|ddd
d dd }fdd} fdd}tj||||g|	 t
ddggd\}}|S )zCRadially constraints a kernel with shape (height, width, channels).r7   int32dtyper   r   boolc                      s    d d f S )Nr7   r   r   kernelstartr   r   <lambda>      z5RadialConstraint._kernel_constraint.<locals>.<lambda>c                      s,    d d f t jd jd S )Nr7   )r   r   rH   )r   zerosrI   r   rK   r   r   rN     s   c                   S   s   t jdddS )Nr   rG   rH   r   constantr   r   r   r   rN   !  rO   c                   S   s   t jdddS )Nr7   rG   rH   rQ   r   r   r   r   rN   "  rO   c                    s   t |  S r   )r   less)indexargs)rM   r   r   rN   #  rO   c                    s(   | d t j| |  |  f dfS )Nr7   )constant_values)r   pad)iarrayrL   paddingrM   r   r   body_fn%  s
    
z4RadialConstraint._kernel_constraint.<locals>.body_fnN)shape_invariants)r   rR   r;   r-   switchr	   floormodr   
while_loop	get_shaper   TensorShape)r   rL   kernel_shapeZ
kernel_newrT   Zwhile_conditionr\   _r   rZ   r   r@     s0    
z#RadialConstraint._kernel_constraintN)r   r   r   r   r   r*   r   r@   r   r   r   r   r8      s   
r8   zkeras.constraints.serializec                 C   s   t | S r   r   )
constraintr   r   r   	serializeB  s    rf   zkeras.constraints.deserializeNc                 C   s   t | t |ddS )Nre   )module_objectscustom_objectsprintable_module_name)r   globals)configrh   r   r   r   deserializeG  s    rl   zkeras.constraints.getc                 C   s^   | d u rd S t | trt| S t | tr>t| i d}t|S t| rJ| S tdt|  d S )N)
class_namerk   z+Could not interpret constraint identifier: )
isinstancedictrl   strcallabler=   )
identifierrk   r   r   r   getP  s    

rs   )N)!r   tensorflow.python.frameworkr   tensorflow.python.kerasr   +tensorflow.python.keras.utils.generic_utilsr   r   tensorflow.python.opsr   r   r	    tensorflow.python.util.tf_exportr
   tensorflow.tools.docsr   r   r   r+   r0   r2   r8   max_normZnon_negZ	unit_normZmin_max_normZradial_constraintmaxnormZnonnegZunitnormrf   rl   rs   r   r   r   r   <module>   sJ   3(
"8R
