a
    Sic`A                     @   s   d Z ddlm  mZ ddlmZ ddlmZ ddl	m
Z
 ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ edG dd deZdS )z8Wrapper layer to apply every temporal slice of an input.    N)backend)Layer)	InputSpec)Wrapper)generic_utils)layer_utils)tf_utils)keras_exportzkeras.layers.TimeDistributedc                       sZ   e Zd ZdZ fddZdddZdd Z fd	d
Zdd ZdddZ	dddZ
  ZS )TimeDistributeda  This wrapper allows to apply a layer to every temporal slice of an input.

    Every input should be at least 3D, and the dimension of index one of the
    first input will be considered to be the temporal dimension.

    Consider a batch of 32 video samples, where each sample is a 128x128 RGB
    image with `channels_last` data format, across 10 timesteps.
    The batch input shape is `(32, 10, 128, 128, 3)`.

    You can then use `TimeDistributed` to apply the same `Conv2D` layer to each
    of the 10 timesteps, independently:

    >>> inputs = tf.keras.Input(shape=(10, 128, 128, 3))
    >>> conv_2d_layer = tf.keras.layers.Conv2D(64, (3, 3))
    >>> outputs = tf.keras.layers.TimeDistributed(conv_2d_layer)(inputs)
    >>> outputs.shape
    TensorShape([None, 10, 126, 126, 64])

    Because `TimeDistributed` applies the same instance of `Conv2D` to each of
    the timestamps, the same set of weights are used at each timestamp.

    Args:
      layer: a `tf.keras.layers.Layer` instance.

    Call arguments:
      inputs: Input tensor of shape (batch, time, ...) or nested tensors,
        and each of which has shape (batch, time, ...).
      training: Python boolean indicating whether the layer should behave in
        training mode or in inference mode. This argument is passed to the
        wrapped layer (only if the layer supports this argument).
      mask: Binary tensor of shape `(samples, timesteps)` indicating whether
        a given timestep should be masked. This argument is passed to the
        wrapped layer (only if the layer supports this argument).

    Raises:
      ValueError: If not initialized with a `tf.keras.layers.Layer` instance.
    c                    sP   t |tstd| t j|fi | d| _t|oHt|dd | _	d S )Nz]Please initialize `TimeDistributed` layer with a `tf.keras.layers.Layer` instance. Received: TstatefulF)

isinstancer   
ValueErrorsuper__init__supports_maskingr   is_builtin_layergetattr_always_use_reshape)selflayerkwargs	__class__ ]/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/layers/rnn/time_distributed.pyr   H   s    
zTimeDistributed.__init__Nc                 C   s   |du rt ||d }t|tjr.| }tdd |D sL|t| S t |}t	|}t
|D ]\}}|sf|||  ||< qf|t| S )aa  Finds non-specific dimensions in the static shapes.

        The static shapes are replaced with the corresponding dynamic shapes of
        the tensor.
        Args:
          init_tuple: a tuple, the first part of the output shape
          tensor: the tensor from which to get the (static and dynamic) shapes
            as the last part of the output shape
          start_idx: int, which indicate the first dimension to take from
            the static shape of the tensor
          int_shape: an alternative static shape to take as the last part
            of the output shape
        Returns:
          The new int_shape with the first part from init_tuple
          and the last part from either `int_shape` (if provided)
          or `tensor.shape`, where every `None` is replaced by
          the corresponding dimension from `tf.shape(tensor)`.
        Nc                 s   s   | ]}| V  qd S Nr   ).0sr   r   r   	<genexpr>o       z3TimeDistributed._get_shape_tuple.<locals>.<genexpr>)r   	int_shaper   tfTensorShapeas_listanytupleshapelist	enumerate)r   Z
init_tupletensor	start_idxr    r&   ir   r   r   r   _get_shape_tupleW   s    
z TimeDistributed._get_shape_tuplec                 C   s$   |  }t|d g|dd   S )Nr      r#   r!   r"   )r   dimsr   r   r   _remove_timestepsx   s    z!TimeDistributed._remove_timestepsc                    s   t j|dd}tjtjdd |}tdd |D rHtd| tjdd || _tj| j	|}t |}t
 t| d	| _d S )
NF	to_tuplesc                 S   s   | j S r   )ndimsxr   r   r   <lambda>   r   z'TimeDistributed.build.<locals>.<lambda>c                 s   s   | ]}|d k V  qdS )   Nr   )r   dimr   r   r   r      r   z(TimeDistributed.build.<locals>.<genexpr>za`TimeDistributed` Layer should be passed an `input_shape ` with at least 3 dimensions, received: c                 S   s   t d d g|  dd   dS )Nr-   )r&   )r   r#   r4   r   r   r   r6      r   T)r   convert_shapesr!   nestflattenmap_structurer$   r   
input_specr0   r   buildr%   built)r   input_shape
input_dimschild_input_shaper   r   r   r>   |   s(    
zTimeDistributed.buildc                    sl   t j|dd}tj| j|}| j|}t j|dd}t | tj d   fdd}tj||S )NFr1      c                    s&   |   } t| d  g| dd   S )Nr   rC   r.   )r/   	timestepsr   r   insert_timesteps   s    z>TimeDistributed.compute_output_shape.<locals>.insert_timesteps)	r   r9   r!   r:   r<   r0   r   compute_output_shaper;   )r   r@   rB   Zchild_output_shaperF   r   rD   r   rG      s    
z$TimeDistributed.compute_output_shapec                    s  i t jjdr|d< tjdd |}t|}tj	|d }|rԈj
st|\}d ut| tj	 d  fdd}tj||g rd n |dd	\}}}tjfd
d|}	ntjdd |tj	trJtjdd |}
tjdd |}j|
fi }	tjtjj|	|}	n>trftd| n"t| tj	 d   stjdd | t tj	 d  tjfdd|}tjj|tj||}t jjdr|d urd|d}t||d< j|fi }	|}tj fdd|	|}tjj|	tj|	|}	t stjj|	dd |	| |	S )Ntrainingc                 S   s   t t| S r   r!   r"   r   r    r4   r   r   r   r6      r   z&TimeDistributed.call.<locals>.<lambda>r   rC   c                    s   j | fi  }|g fS r   )r   )r5   _output)r   r   r   r   step   s    z"TimeDistributed.call.<locals>.stepF)initial_statesinput_lengthmaskunrollc                    s   t  | S r   )r   maybe_convert_to_ragged)rK   )is_ragged_inputrow_lengthsr   r   r6      s   c                 S   s   t | tjS r   r   r!   RaggedTensorr4   r   r   r   r6      r   c                 S   s   | j S r   )valuesr4   r   r   r   r6      r   c                 S   s   |   d S )Nr   )nested_row_lengthsr4   r   r   r   r6      r   zDAll inputs has to be either ragged or not, but not mixed. Received: c                 S   s   t | d S NrC   )r!   r&   r4   r   r   r   r6      r   c                    s     d| dS Nr-   r,   r4   r   r   r   r6      r   rO   rZ   r-   c                    s    d f| d|dd  S )Nr[   rC   r-   r\   )r)   r    )rN   r   r   r   r6     s   c                 S   s
   |  |S r   )	set_shape)r)   r&   r   r   r   r6     r   )r   has_argr   callr!   r:   r<   r   r9   r;   r   r   convert_inputs_if_raggedrnnallrU   from_row_lengthsr$   r   to_list__internal__map_structure_up_toreshaper,   rG   executing_eagerly)r   inputsrH   rO   r@   
batch_sizerL   rJ   outputsyinput_valuesZinput_row_lenghtsinner_input_shapeinner_mask_shapeoutput_shaper   )rN   rR   r   rS   r   r   r`      s    













zTimeDistributed.callc                    s  t jdd |}tj|dd}t|}t j|d }t jdd |}tt j|}|rh jrpt	|rt|S |}|dur 
d|d	}t||}t j fd
d|}t jj|t j||}	 j|	|}
|
du r|du rdS |}
td	tt|D ]}tj	|
dd}
qnt|}t j|d }|s\t jdd |}t j|d }t|
}|du r|durt|}n(tt j|d }t|dd } 
d|f|
d|dd }t|
|}
|
S )a  Computes an output mask tensor for Embedding layer.

        This is based on the inputs, mask, and the inner layer.
        If batch size is specified:
        Simply return the input `mask`. (An rnn-based implementation with
        more than one rnn inputs is required but not supported in tf.keras yet.)
        Otherwise we call `compute_mask` of the inner layer at each time step.
        If the output mask at each time step is not `None`:
        (E.g., inner layer is Masking or RNN)
        Concatenate all of them and return the concatenation.
        If the output mask at each time step is `None` and the input mask is not
        `None`:(E.g., inner layer is Dense)
        Reduce the input_mask to 2 dimensions and return it.
        Otherwise (both the output mask and the input mask are `None`):
        (E.g., `mask` is not used at all)
        Return `None`.

        Args:
          inputs: Tensor with shape [batch size, timesteps, ...] indicating the
            input to TimeDistributed. If static shape information is available
            for "batch size", `mask` is returned unmodified.
          mask: Either None (indicating no masking) or a Tensor indicating the
            input mask for TimeDistributed. The shape can be static or dynamic.

        Returns:
          Either None (no masking), or a [batch size, timesteps, ...] Tensor
          with an output mask for the TimeDistributed layer with the shape
          beyond the second dimension being the value of the input mask shape(if
          the computed output mask is none), an output mask with the shape
          beyond the first dimension being the value of the mask shape(if mask
          is not None) or output mask with the shape beyond the first dimension
          being the value of the computed output shape.

        c                 S   s   t t| S r   rI   r4   r   r   r   r6   >  r   z.TimeDistributed.compute_mask.<locals>.<lambda>Fr1   r   c                 S   s   t | tjS r   rT   r4   r   r   r   r6   D  r   NrZ   r-   c                    s     d| dS rY   r\   )r)   r]   r   r   r6   S  r   r[   )axisrC   c                 S   s   t | d S rX   )r   r&   r4   r   r   r   r6   g  r   )r!   r:   r<   r   r9   r;   r   re   r   r$   r,   r   rh   rf   rg   r   compute_maskrangelenr    rG   )r   rj   rO   r@   rk   rR   Z
inner_maskrp   ro   Zinner_inputsoutput_maskrJ   rN   Zoutput_mask_int_shapeZoutput_mask_shaper   r]   r   rs     sr    %








zTimeDistributed.compute_mask)N)NN)N)__name__
__module____qualname____doc__r   r,   r0   r>   rG   r`   rs   __classcell__r   r   r   r   r
       s   &
!
sr
   )rz   tensorflow.compat.v2compatv2r!   kerasr   keras.engine.base_layerr   keras.engine.input_specr   keras.layers.rnn.base_wrapperr   keras.utilsr   r   r    tensorflow.python.util.tf_exportr	   r
   r   r   r   r   <module>   s   