a
    Sic                    @   sx  d Z ddlZddlZddlZddlZddlZddlZddlZddl	m
  mZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ zddl Z!W n e"y   dZ!Y n0 ej#j$%dddZ&G dd de'ej(dZ)G dd de)Z*G dd de*Z+G dd de)Z,G dd de)Z-G dd de)Z.G dd de)Z/G dd  d e)Z0G d!d" d"e0Z1e.e*e+e/e0e1e-e,gZ2d#d$ Z3d%d& Z4d'd( Z5d)d* Z6d+d, Z7G d-d. d.Z8G d/d0 d0e8Z9ed1g d2d3d4 Z:d5d6 Z;d7d8 Z<ed9g d2d:d; Z=ed<g d2dMd=d>Z>dNd?d@Z?dAdB Z@dCdD ZAdEdF ZBdGdH ZCdIdJ ZDdKdL ZEdS )OzIAdapter module that convert different input data objects into tf.dataset.    N)backend)training_utils)
data_utils)dataset_creator)tf_utils)DistributedDataset)context)	type_spec)
tf_logging)keras_exportz#/tensorflow/api/keras/data_adapterszkeras data adapter usagemethodc                   @   s   e Zd ZdZedddZejdddZejdd Z	ejd	d
 Z
ejdd Zdd Zejdd Zejdd Zejdd Zdd Zdd ZdS )DataAdaptera  Base class for input data adapter.

    In TF 2.0, tf.data is the preferred API for user to feed in data. In order
    to simplify the training code path, all the input data object will be
    converted to `tf.data.Dataset` if possible.

    Note that since this class is mainly targeted for TF 2.0, it might have a
    lot of assumptions under the hood, e.g. eager context by default,
    distribution strategy, etc. In the meantime, some legacy feature support
    might be dropped, eg, Iterator from dataset API in v1, etc.

    The sample usage of this class is like:

    ```
    x = tf.data.Dataset.range(100)
    adapter_cls = [NumpyArrayDataAdapter, ..., DatasetAdapter]
    applicable_adapters = [cls for cls in adapter_cls if cls.can_handle(x)]
    if len(applicable_adapters) != 1:
      raise ValueError("Expect only one adapter class to handle the input")

    dataset = applicable_adapters[0](x).get_dataset()
    for data in dataset:
      # training
    ```
    Nc                 C   s   t dS )a  Whether the current DataAdapter could handle the input x and y.

        Structure wise, x and y can be single object, or list of objects if
        there multiple input/output, or dictionary of objects when the
        input/output are named.

        Args:
          x: input features.
          y: target labels. Note that y could be None in the case of prediction.

        Returns:
          boolean
        NNotImplementedErrorxy r   U/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/engine/data_adapter.py
can_handleO   s    zDataAdapter.can_handlec                 K   s$   |  ||s td| j||dS )a\  Create a DataAdapter based on data inputs.

        The caller must make sure to call `can_handle()` first before invoking
        this method. Provide unsupported data type will result into unexpected
        behavior.

        Args:
          x: input features.
          y: target labels. Note that y could be None in the case of prediction.
          **kwargs: Other keyword arguments for DataAdapter during the
            construction of the tf.dataset.Dataset. For example:
            - Numpy data might have `sample_weights` which will be used for
              weighting the loss function during training.
            - Numpy data might need to have `batch_size` parameter when
              constructing the dataset and iterator.
            - Certain input might need to be distribution strategy aware. When
              `distribution_strategy` is passed, the created dataset need to
              respect the strategy.
            DataAdapter might choose to ignore any keyword argument if it
            doesn't use it, or raise exception if any required argument is not
            provided.
        z{} Cannot handle input {}, {}N)r   
ValueErrorformat	__class__selfr   r   kwargsr   r   r   __init__`   s    zDataAdapter.__init__c                 C   s   t dS )a  Get a dataset instance for the current DataAdapter.

        Note that the dataset returned does not repeat for epoch, so caller
        might need to create new iterator for the same dataset at the beginning
        of the epoch. This behavior might change in the future.

        Returns:
          A `tf.data.Dataset`. Caller might use the dataset in different
          context, e.g. iter(dataset) in eager to get the value directly, or in
          graph mode, provide the iterator tensor to Keras model function.
        Nr   r   r   r   r   get_dataset}   s    zDataAdapter.get_datasetc                 C   s   t dS )aI  Return the size (number of batches) for the dataset created.

        For certain type of the data input, the number of batches is known, eg
        for Numpy data, the size is same as (number_of_element / batch_size).
        Whereas for dataset or python generator, the size is unknown since it
        may or may not have an end state.

        Returns:
          int, the number of batches for the dataset, or None if it is unknown.
          The caller could use this to control the loop of training, show
          progress bar, or handle unexpected StopIteration error.
        Nr   r   r   r   r   get_size   s    zDataAdapter.get_sizec                 C   s   t dS )aL  Return the batch size of the dataset created.

        For certain type of the data input, the batch size is known, and even
        required, like numpy array. Whereas for dataset, the batch is unknown
        unless we take a peek.

        Returns:
          int, the batch size of the dataset, or None if it is unknown.
        Nr   r   r   r   r   
batch_size   s    zDataAdapter.batch_sizec                 C   s   |   S )am  Return a representative size for batches in the dataset.

        This is not guaranteed to be the batch size for all batches in the
        dataset. It just needs to be a rough approximation for batch sizes in
        the dataset.

        Returns:
          int, a representative size for batches found in the dataset,
          or None if it is unknown.
        )r    r   r   r   r   representative_batch_size   s    z%DataAdapter.representative_batch_sizec                 C   s   t dS )z1Whether the dataset has partial batch at the end.Nr   r   r   r   r   has_partial_batch   s    zDataAdapter.has_partial_batchc                 C   s   t dS )zThe size of the final partial batch for dataset.

        Will return None if has_partial_batch is False or batch_size is None.
        Nr   r   r   r   r   partial_batch_size   s    zDataAdapter.partial_batch_sizec                 C   s   t dS )z=Returns whether a new iterator should be created every epoch.Nr   r   r   r   r   should_recreate_iterator   s    z$DataAdapter.should_recreate_iteratorc                 C   sD   |   r|  sdS |   |   }|  r@||  |   8 }|S )z1Returns number of samples in the data, or `None`.N)r   r    r"   r#   )r   total_sampler   r   r   get_samples   s    zDataAdapter.get_samplesc                 C   s   dS )zA hook called after each epoch.Nr   r   r   r   r   on_epoch_end   s    zDataAdapter.on_epoch_end)N)N)__name__
__module____qualname____doc__staticmethodr   abcabstractmethodr   r   r   r    r!   r"   r#   r$   r&   r'   r   r   r   r   r   4   s(   





	r   )	metaclassc                       sh   e Zd ZdZedddZd fdd	Zd	d
 Zdd Zdd Z	dd Z
dd Zdd Zdd Z  ZS )TensorLikeDataAdapterzEAdapter that handles Tensor-like objects, e.g. EagerTensor and NumPy.Nc                    sL   t j| }|d ur$|t j|7 }t fdd t fdd|D S )Nc                    s   t |  rdS dS )NTF)
isinstancev)tensor_typesr   r   
_is_tensor   s    
z4TensorLikeDataAdapter.can_handle.<locals>._is_tensorc                 3   s   | ]} |V  qd S Nr   .0r3   )r5   r   r   	<genexpr>       z3TensorLikeDataAdapter.can_handle.<locals>.<genexpr>)tfnestflatten_get_tensor_typesallr   r   flat_inputsr   )r5   r4   r   r      s    z TensorLikeDataAdapter.can_handle   Fc	                    s  t  j||fi |	 t|||f\}}}t||}tj|||dd\}}
}
t|||}tdd tj	
|D  t|  s|rtt| nd tt  _ _t    _ttrވ _tjjd}dkr|}fdd	}||d} fd
d}||}||}dkrjdd }||}tj }tjj j!j"|j#_$|%|}|_&d S )NTcheck_all_flatc                 s   s   | ]}t |jd  V  qdS r   Nintshaper8   ir   r   r   r9     s   z1TensorLikeDataAdapter.__init__.<locals>.<genexpr>    rB   batchc                    s,   t j t jd}r(dkr(t j|}|S )NdtyperL   )r;   rangeint64randomshuffle)_indices)num_samplesrR   r   r   permutation&  s    z3TensorLikeDataAdapter.__init__.<locals>.permutationc                    s     }t | dg|g}t | g}t jj|}jrft jjt | |gjg}||}dkr~|	d
}|S )a  Convert a Tensor of indices into a dataset of batched indices.

            This step can be accomplished in several ways. The most natural is
            to slice the Tensor in a Dataset map. (With a condition on the upper
            index to handle the partial batch.) However it turns out that
            coercing the Tensor into a shape which is divisible by the batch
            size (and handling the last partial batch separately) allows for a
            much more favorable memory access pattern and improved performance.

            Args:
              indices: Tensor which determines the data order for an entire
                epoch.

            Returns:
              A Dataset of batched indices.
            r   rL   i   )r;   slicereshapedataDatasetfrom_tensor_slices_partial_batch_sizefrom_tensorsconcatenaterR   repeat)rT   num_in_full_batchfirst_k_indicesflat_datasetindex_remainder)r    epochsnum_full_batchesr   rR   r   r   slice_batch_indices6  s     
z;TensorLikeDataAdapter.__init__.<locals>.slice_batch_indicesc                  W   s   t jt jj| S r6   )r;   r<   map_structurerQ   rR   )rL   r   r   r   shuffle_batcha  s    z5TensorLikeDataAdapter.__init__.<locals>.shuffle_batch)'superr   _process_tensorlikebroadcast_sample_weight_modesr   handle_partial_sample_weightspack_x_y_sample_weightsetr;   r<   r=   pop_check_data_cardinalityrG   mathceil_size_batch_sizer\   r1   strlower_shufflerY   rZ   rO   r_   mapprefetchflat_mapslice_inputsOptionsexperimentalAutoShardPolicyDATAexperimental_distributeauto_shard_policywith_options_dataset)r   r   r   sample_weightssample_weight_modesr    rd   stepsrR   r   rS   inputsindices_datasetrV   rf   datasetrh   optionsr   )r    rd   re   rU   r   rR   r   r      sL    




%





zTensorLikeDataAdapter.__init__c                 C   sn   t jj|t jj| f}dd }|j|t jjd}t j }d|j	_
| jr`t jjjj|_||}|S )  Slice inputs into a Dataset of batches.

        Given a Dataset of batch indices and the unsliced inputs,
        this step slices the inputs in a parallelized fashion
        and produces a dataset of input batches.

        Args:
          indices_dataset: A Dataset of batched indices
          inputs: A python data structure that contains the inputs, targets,
            and possibly sample weights.

        Returns:
          A Dataset of input batches matching the batch indices.
        c                    s   t j fdd|S )Nc                    s   t j|  ddS )Nr   axis)r;   gather)drJ   r   r   <lambda>  r:   zHTensorLikeDataAdapter.slice_inputs.<locals>.grab_batch.<locals>.<lambda>)r;   r<   rg   )rJ   rY   r   r   r   
grab_batch  s    z6TensorLikeDataAdapter.slice_inputs.<locals>.grab_batchnum_parallel_callsF)r;   rY   rZ   zipr]   r_   rx   AUTOTUNEr|   experimental_optimizationapply_default_optimizationsrw   r}   ExternalStatePolicyIGNORE"experimental_external_state_policyr   )r   r   r   r   r   r   r   r   r   r{   n  s    


z"TensorLikeDataAdapter.slice_inputsc                 C   s   | j S r6   r   r   r   r   r   r     s    z!TensorLikeDataAdapter.get_datasetc                 C   s   | j S r6   rs   r   r   r   r   r     s    zTensorLikeDataAdapter.get_sizec                 C   s   | j S r6   rt   r   r   r   r   r      s    z TensorLikeDataAdapter.batch_sizec                 C   s
   | j dkS Nr   r\   r   r   r   r   r"     s    z'TensorLikeDataAdapter.has_partial_batchc                 C   s
   | j pd S r6   r   r   r   r   r   r#     s    z(TensorLikeDataAdapter.partial_batch_sizec                 C   s   dS NFr   r   r   r   r   r$     s    z.TensorLikeDataAdapter.should_recreate_iterator)N)NNNNrB   NF)r(   r)   r*   r+   r,   r   r   r{   r   r   r    r"   r#   r$   __classcell__r   r   r   r   r0      s&           &r0   c                       s6   e Zd ZdZed	ddZ fddZdd Z  ZS )
GenericArrayLikeDataAdaptera  Adapter that handles array-like data without forcing it into memory.

    This adapter handles array-like datasets that may be too big to fully
    fit into memory.

    Specifically, this adapter handles any Python class which implements:
    `__get_item__`, `__len__`, `shape`, and `dtype` with the same meanings
    as Numpy, but it ignores any case where all the inputs are Tensors or Numpy
    arrays (because that case is handled by the base TensorLikeDataAdapter).

    It ignores scipy sparse matrices and Composite Tensors because those are
    handled by the CompositeTensorDataAdapter.

    It also does not handle lists/tuples of scalars, because those are handled
    by the ListsOfScalarsDataAdapter.
    Nc                    sb   t j| }|d ur$|t j|7 }dd  t| |sZt| |sZt fdd|D S dS d S )Nc                 S   s(   t | do&t | do&t | do&t | dS )z6Return True if v is a Tensor, array, or is array-like.__getitem__rH   rN   __len__)hasattrr2   r   r   r   _is_array_like  s    
z>GenericArrayLikeDataAdapter.can_handle.<locals>._is_array_likec                 3   s   | ]} |V  qd S r6   r   r7   r   r   r   r9     r:   z9GenericArrayLikeDataAdapter.can_handle.<locals>.<genexpr>F)r;   r<   r=   r0   r   CompositeTensorDataAdapterr?   r@   r   r   r   r     s    	
z&GenericArrayLikeDataAdapter.can_handlec                    s    t d t j|i | d S )NzKeras is training/fitting/evaluating on array-like data. Keras may not be optimized for this format, so if your input data format is supported by TensorFlow I/O (https://github.com/tensorflow/io) we recommend using that to load a Dataset instead.)loggingwarningri   r   )r   argsr   r   r   r   r     s    z$GenericArrayLikeDataAdapter.__init__c                    sd   t jdd dd D d | jr:| jdkr:d  fdd	}|j|t jjd
}|S )r   c                 S   s   t | j}d |d< t|S r   )listrH   tuple)trH   r   r   r   dynamic_shape_like  s    
zDGenericArrayLikeDataAdapter.slice_inputs.<locals>.dynamic_shape_likec                 S   s   g | ]
}|j qS r   rM   r8   inpr   r   r   
<listcomp>  r:   z<GenericArrayLikeDataAdapter.slice_inputs.<locals>.<listcomp>TrL   Fc                    sN    fdd}t || g}t|D ]\}}|| q(t j|S )z%Grab a batch of data from the inputs.c                    s     fddfddD S )Nc                    s   t j|   dS )N)
contiguous)r   slice_arraysnumpyrY   )r   indr   r   slice_array  s    
zdGenericArrayLikeDataAdapter.slice_inputs.<locals>.grab_batch.<locals>.py_method.<locals>.slice_arrayc                    s   g | ]} |qS r   r   r   )r   r   r   r     r:   zcGenericArrayLikeDataAdapter.slice_inputs.<locals>.grab_batch.<locals>.py_method.<locals>.<listcomp>r   )r   )r   rA   )r   r   r   	py_method  s    zOGenericArrayLikeDataAdapter.slice_inputs.<locals>.grab_batch.<locals>.py_method)r;   py_functionr   	set_shaper<   pack_sequence_as)rT   r   flat_outr3   original_inpr   r   flat_dtypesrA   r   r   r   r     s
    z<GenericArrayLikeDataAdapter.slice_inputs.<locals>.grab_batchr   )r;   r<   r=   rw   rx   rY   r   )r   r   r   r   r   r   r   r   r{     s    z(GenericArrayLikeDataAdapter.slice_inputs)N)	r(   r)   r*   r+   r,   r   r   r{   r   r   r   r   r   r     s
   r   c                       s`   e Zd ZdZd fdd	ZedddZdd Zd	d
 Zdd Z	dd Z
dd Zdd Z  ZS )DatasetCreatorAdapterz'Adapter that handles dataset functions.Nc                    sX   t  j|fi | t|tjs2tdt||d u rBtd|| _|| _	|| _
d S )Nz\The input of a `DatasetCreatorAdapter` should be a `DatasetCreator` but it received type {}.zWhen using a `tf.keras.utils.experimental.DatasetCreator`, `steps_per_epoch`, `validation_steps` or `steps` argument must be provided in `Model.fit`, `Model.evaluate`, or `Model.predict`.)ri   r   r1   r   DatasetCreator	TypeErrorr   typer   r   strategy)r   r   r   r   distribution_strategyr   r   r   r   r     s    zDatasetCreatorAdapter.__init__c                 C   s    t | tjr|d u sJ dS d S NT)r1   r   r   r   r   r   r   r   &  s    z DatasetCreatorAdapter.can_handlec                 C   s   dS r   r   r   r   r   r   r$   ,  s    z.DatasetCreatorAdapter.should_recreate_iteratorc                 C   s   d S r6   r   r   r   r   r   r   3  s    zDatasetCreatorAdapter.get_sizec                 C   s   | j j| j| jjdS N)r   )r   !distribute_datasets_from_functionr   input_optionsr   r   r   r   r   6  s    
z!DatasetCreatorAdapter.get_datasetc                 C   s
   t  d S r6   r   r   r   r   r   r    ;  s    z DatasetCreatorAdapter.batch_sizec                 C   s
   t  d S r6   r   r   r   r   r   r"   >  s    z'DatasetCreatorAdapter.has_partial_batchc                 C   s
   t  d S r6   r   r   r   r   r   r#   A  s    z(DatasetCreatorAdapter.partial_batch_size)NN)N)r(   r)   r*   r+   r   r,   r   r$   r   r   r    r"   r#   r   r   r   r   r   r     s   r   c                       s`   e Zd ZdZedddZd fdd	Zdd	 Zd
d Zdd Z	dd Z
dd Zdd Z  ZS )r   z&Adapter that handles composite tensor.Nc                    sd   t j| }|d ur$|t j|7 }dd   fddt fdd|D obtfdd|D S )Nc                 S   s4   t | r,t| tjjtjjfs,t| s,dS t| S r   )	r   is_extension_typer1   r;   rY   rZ   Iterator_is_distributed_dataset_is_scipy_sparser2   r   r   r   _is_compositeN  s    z<CompositeTensorDataAdapter.can_handle.<locals>._is_compositec                    s   t | tjtjfrdS  | S r   )r1   r;   Tensornpndarrayr2   r   r   r   _is_tensor_or_compositeZ  s    zFCompositeTensorDataAdapter.can_handle.<locals>._is_tensor_or_compositec                 3   s   | ]} |V  qd S r6   r   r7   r   r   r   r9   _  r:   z8CompositeTensorDataAdapter.can_handle.<locals>.<genexpr>c                 3   s   | ]} |V  qd S r6   r   r7   )r   r   r   r9   _  s   )r;   r<   r=   anyr?   r@   r   )r   r   r   r   H  s    "z%CompositeTensorDataAdapter.can_handleFc                    s  t  j||fi | t|||f\}}}t||}tj|||dd\}}	}	t|||}
tjj	
|
}ttj|d jd }|r||}|s|rtt|| nd}||}tt|| | _|| _| j|| k| _d | _| jr|| jd | j  | _|| _d S )NTrC   r   rK   rB   )ri   r   rj   rk   r   rl   rm   r;   rY   rZ   r[   rG   r<   r=   rH   rR   rq   rr   rL   rs   rt   _has_partial_batchr\   r   )r   r   r   r   r   r    r   rR   r   rS   r   r   rU   r   r   r   r   c  s0    

z#CompositeTensorDataAdapter.__init__c                 C   s   | j S r6   r   r   r   r   r   r     s    z&CompositeTensorDataAdapter.get_datasetc                 C   s   | j S r6   r   r   r   r   r   r     s    z#CompositeTensorDataAdapter.get_sizec                 C   s   | j S r6   r   r   r   r   r   r      s    z%CompositeTensorDataAdapter.batch_sizec                 C   s   | j S r6   )r   r   r   r   r   r"     s    z,CompositeTensorDataAdapter.has_partial_batchc                 C   s   | j S r6   r   r   r   r   r   r#     s    z-CompositeTensorDataAdapter.partial_batch_sizec                 C   s   dS r   r   r   r   r   r   r$     s    z3CompositeTensorDataAdapter.should_recreate_iterator)N)NNNNNF)r(   r)   r*   r+   r,   r   r   r   r   r    r"   r#   r$   r   r   r   r   r   r   E  s          /r   c                       sl   e Zd ZdZedddZedd Zd fdd		Zd
d Zdd Z	dd Z
dd Zdd Zdd Z  ZS )ListsOfScalarsDataAdapterzDAdapter that handles lists of scalars and lists of lists of scalars.Nc                 C   s(   t | }d}|d ur t |}|o&|S r   )r   _is_list_of_scalars)r   r   	handles_x	handles_yr   r   r   r     s
    

z$ListsOfScalarsDataAdapter.can_handlec                 C   s<   t | tttttfrdS t | ttfr8| r8t	| d S dS )NTr   F)
r1   floatrG   ru   bytes	bytearrayr   r   r   r   )r   r   r   r   r     s
    z-ListsOfScalarsDataAdapter._is_list_of_scalarsFc                    sp   t  j||fi | t|}|d ur2t|}|d urDt|}t||}t|f|||||d|| _d S )N)r   r   r   r    rR   )ri   r   r   asarrayrk   r0   _internal_adapter)r   r   r   r   r   r    rR   r   r   r   r   r     s(    



z"ListsOfScalarsDataAdapter.__init__c                 C   s
   | j  S r6   )r   r   r   r   r   r   r     s    z%ListsOfScalarsDataAdapter.get_datasetc                 C   s
   | j  S r6   )r   r   r   r   r   r   r     s    z"ListsOfScalarsDataAdapter.get_sizec                 C   s
   | j  S r6   )r   r    r   r   r   r   r      s    z$ListsOfScalarsDataAdapter.batch_sizec                 C   s
   | j  S r6   )r   r"   r   r   r   r   r"     s    z+ListsOfScalarsDataAdapter.has_partial_batchc                 C   s
   | j  S r6   )r   r#   r   r   r   r   r#     s    z,ListsOfScalarsDataAdapter.partial_batch_sizec                 C   s   dS r   r   r   r   r   r   r$     s    z2ListsOfScalarsDataAdapter.should_recreate_iterator)N)NNNNF)r(   r)   r*   r+   r,   r   r   r   r   r   r    r"   r#   r$   r   r   r   r   r   r     s"   

     r   c                       sh   e Zd ZdZedddZd fdd	Zdd Zd	d
 Zdd Z	dd Z
dd Zdd Zdd Z  ZS )DatasetAdapterz'Adapter that handles `tf.data.Dataset`.Nc                 C   s"   t | tjjjjtjjfp t| S r6   )r1   r;   compatv1rY   rZ   r   r   r   r   r   r     s
    zDatasetAdapter.can_handlec                    s4   t  j||fi | || _|| _| ||| d S r6   )ri   r   r   _user_steps_validate_args)r   r   r   r   r   r   r   r   r   r     s    zDatasetAdapter.__init__c                 C   s   | j S r6   r   r   r   r   r   r     s    zDatasetAdapter.get_datasetc                 C   s   d S r6   r   r   r   r   r   r      s    zDatasetAdapter.get_sizec                 C   s   d S r6   r   r   r   r   r   r      s    zDatasetAdapter.batch_sizec                 C   s   dS r   r   r   r   r   r   r"     s    z DatasetAdapter.has_partial_batchc                 C   s   d S r6   r   r   r   r   r   r#   	  s    z!DatasetAdapter.partial_batch_sizec                 C   s2   t | jrdS | jd u p0tjj| j | jkS r   )r   r   r   r;   rY   r}   cardinalityr   r   r   r   r   r$     s    

z'DatasetAdapter.should_recreate_iteratorc                 C   sp   t |stdt |s td|du rlt| jr:tdtjj| j }|tjjj	krl|du rltddS )zValidates `__init__` arguments.z:`y` argument is not supported when using dataset as input.zF`sample_weight` argument is not supported when using dataset as input.NzRWhen providing a distributed dataset, you must specify the number of steps to run.zWhen providing an infinite dataset, you must specify the number of steps to run (if you did not intend to create an infinite dataset, make sure to not call `repeat()` on the dataset).)
is_none_or_emptyr   r   r   r;   rY   r}   r   r   INFINITE_CARDINALITY)r   r   r   r   sizer   r   r   r     s*    
zDatasetAdapter._validate_args)N)NNN)r(   r)   r*   r+   r,   r   r   r   r   r    r"   r#   r$   r   r   r   r   r   r   r     s   r   c                       s   e Zd ZdZedddZd fdd		Zd
d Zedd Zdd Z	dd Z
dd Zdd Zdd Zdd Zdd Zdd Z  ZS ) GeneratorDataAdapterz5Adapter that handles python generators and iterators.Nc                 C   s,   t | dst | do*t | do*t| tj S )N__next__next__iter__)r   r1   r   Sequencer   r   r   r   r   ?  s
    zGeneratorDataAdapter.can_handlerB   F
   c                    s6  | dd  t|stdt|s,tdt j||fi | |\}	}|	}	t|	}	d urjst	|	\}
}}zj
jfdd|
fd W n ty   Y n0 ttj|	d jd _dd	 }tj||	}||||  fd
d}tjjj||d}|dkr,|s,|d}|_d S )NrR   zC`y` argument is not supported when using python generator as input.zO`sample_weight` argument is not supported when using python generator as input.c                    s    | ddS )NF)trainingr   r   )modelr   r   r   n  r:   z/GeneratorDataAdapter.__init__.<locals>.<lambda>)r   r   c                 S   s   t |  S r6   )r	   type_spec_from_value_with_tensor_ranks_onlyr   r   r   r   _get_tensor_specy  s    z7GeneratorDataAdapter.__init__.<locals>._get_tensor_specc                  3   s     D ]}  | V  qd S r6   )_standardize_batchr   )generator_fnr   r   r   wrapped_generator  s    
z8GeneratorDataAdapter.__init__.<locals>.wrapped_generator)output_signaturerB   )ro   r   r   ri   r   _peek_and_restorer   rj   builtunpack_x_y_sample_weightdistribute_strategyrunr   rG   r;   r<   r=   rH   _first_batch_sizerg   _handle_multiprocessingrY   rZ   from_generatorry   r   )r   r   r   r   workersuse_multiprocessingmax_queue_sizer   r   peek
concrete_xrS   r   r   r   r   r   )r   r   r   r   r   G  sD    


zGeneratorDataAdapter.__init__c                 C   sB   t |\}}}t|||}tjj|}dd }tj||}|S )z+Standardizes a batch output by a generator.c                 S   s2   t | tjr.t| jjtjr.tj| t	 dS | S )NrM   )
r1   r   r   
issubclassrN   r   floatingarrayr   floatxr   r   r   r   _convert_dtype  s
    
z?GeneratorDataAdapter._standardize_batch.<locals>._convert_dtype)r   rm   r;   __internal__r<   list_to_tuplerg   )r   rY   r   r   sample_weightr  r   r   r   r     s    z'GeneratorDataAdapter._standardize_batchc                 C   s   t | }|t|g| fS r6   )r   	itertoolschain)r   r  r   r   r   r     s    z&GeneratorDataAdapter._peek_and_restorec                    s8   dksdkr(r( fdd}nfdd}|S )z2Create a callable, possibly including an Enqueuer.rB   r   c                     s$   t jd} | j d |  S )N)r  r  r  )r   GeneratorEnqueuerstartget)enqueuerr  r  r  r   r   r   r     s
    zBGeneratorDataAdapter._handle_multiprocessing.<locals>.generator_fnc                      s    S r6   r   r   r   r   r   r     r:   z>GeneratorDataAdapter._handle_multiprocessing.<locals>.<lambda>r   r   r   r  r  r  r   r   r  r   r    s    z,GeneratorDataAdapter._handle_multiprocessingc                 C   s   | j S r6   r   r   r   r   r   r     s    z GeneratorDataAdapter.get_datasetc                 C   s   d S r6   r   r   r   r   r   r     s    zGeneratorDataAdapter.get_sizec                 C   s   d S r6   r   r   r   r   r   r      s    zGeneratorDataAdapter.batch_sizec                 C   s   | j S r6   )r  r   r   r   r   r!     s    z.GeneratorDataAdapter.representative_batch_sizec                 C   s   dS r   r   r   r   r   r   r"     s    z&GeneratorDataAdapter.has_partial_batchc                 C   s   d S r6   r   r   r   r   r   r#     s    z'GeneratorDataAdapter.partial_batch_sizec                 C   s   dS r   r   r   r   r   r   r$     s    z-GeneratorDataAdapter.should_recreate_iterator)N)NNrB   Fr   N)r(   r)   r*   r+   r,   r   r   r   r   r  r   r   r    r!   r"   r#   r$   r   r   r   r   r   r   <  s*   
      L
r   c                       s\   e Zd ZdZedddZd fdd		Zed
d Zdd Zdd Z	dd Z
dd Z  ZS )KerasSequenceAdapterz,Adapter that handles `keras.utils.Sequence`.Nc                 C   s   t | tjS r6   )r1   r   r   r   r   r   r   r     s    zKerasSequenceAdapter.can_handleFrB   r   c	           
         sV   t |stdt |s td|| _|| _d | _t j|fd||||d|	 d S )NzI`y` argument is not supported when using `keras.utils.Sequence` as input.zU`sample_weight` argument is not supported when using `keras.utils.Sequence` as input.F)rR   r  r  r  r   )r   r   _shuffle_sequence_keras_sequence	_enqueuerri   r   )
r   r   r   r   rR   r  r  r  r   r   r   r   r   r     s,    zKerasSequenceAdapter.__init__c                 C   s   | d | fS r   r   r   r   r   r   r     s    z&KerasSequenceAdapter._peek_and_restorec                    s<   dksdkr*r* fdd}nfdd}|S )NrB   r   c                      s.   t jjd_jj d j S )N)r  rR   r  )r   OrderedEnqueuerr  r  r  r  r   r  r   r  r  r   r   r   r     s    zBKerasSequenceAdapter._handle_multiprocessing.<locals>.generator_fnc                  3   s<   t t}  jr$t| } t|  | D ]}| V  q(d S r6   )rO   lenr  r   rQ   rR   )orderrJ   )r   r   r   r   r     s    
r   r  r   r  r   r     s    
z,KerasSequenceAdapter._handle_multiprocessingc                 C   s
   t | jS r6   )r   r  r   r   r   r   r     s    zKerasSequenceAdapter.get_sizec                 C   s   dS r   r   r   r   r   r   r$   !  s    z-KerasSequenceAdapter.should_recreate_iteratorc                 C   s   | j r| j   | j  d S r6   )r  stopr  r'   r   r   r   r   r'   $  s    
z!KerasSequenceAdapter.on_epoch_end)N)NNFrB   Fr   N)r(   r)   r*   r+   r,   r   r   r   r  r   r$   r'   r   r   r   r   r   r    s"          $
r  c                    sv    fddt D }|s2tdt tn&t|dkrXtd|t tt|d j	d |d S )z7Selects a data adapter that can handle a given x and y.c                    s   g | ]}|  r|qS r   )r   )r8   clsr   r   r   r   8  r:   z'select_data_adapter.<locals>.<listcomp>z9Failed to find data adapter that can handle input: {}, {}rB   zrData adapters should be mutually exclusive for handling inputs. Found multiple adapters {} to handle input: {}, {}r   T)
ALL_ADAPTER_CLSr   r   
_type_namer   RuntimeErrorkeras_data_adapter_gaugeget_cellr(   rn   )r   r   adapter_clsr   r   r   select_data_adapter6  s    r*  c                 C   s   t | trHtdd |  D }tdd |  D }dt| ||S t | ttfrxtdd | D }dt| |S t	t| S )z1Generates a description of the type of an object.c                 s   s   | ]}t |V  qd S r6   r%  r8   keyr   r   r   r9   M  r:   z_type_name.<locals>.<genexpr>c                 s   s   | ]}t |V  qd S r6   r+  r,  r   r   r   r9   N  r:   z%({} containing {} keys and {} values)c                 s   s   | ]}t |V  qd S r6   r+  )r8   valr   r   r   r9   S  r:   z"({} containing values of types {}))
r1   dictrn   keysvaluesr   r   r   r   ru   )r   	key_types	val_typestypesr   r   r   r%  J  s    

r%  c                 C   s$   dd }t j|| } t jj| S )a  Process tensor-like inputs.

    This function:

    (1) Converts `Numpy` arrays to `Tensor`s.
    (2) Converts `Scipy` sparse matrices to `SparseTensor`s.
    (3) Converts `pandas.Series` to `Tensor`s
    (4) Converts `list`s to `tuple`s (for `tf.data` support).

    Args:
      inputs: Structure of `Tensor`s, `NumPy` arrays, or tensor-like.

    Returns:
      Structure of `Tensor`s or tensor-like.
    c                 S   sd   t | rtj|  dd} t| tjrPd }t| jjtj	rBt
 }tj| |dS t| r`t| S | S )Nr   rM   )_is_pandas_seriesr   expand_dimsto_numpyr1   r   r	  rN   r   r
  r   r  r;   convert_to_tensorr   _scipy_sparse_to_sparse_tensor)r   rN   r   r   r   _convert_single_tensori  s    z3_process_tensorlike.<locals>._convert_single_tensor)r;   r<   rg   r  r  )r   r;  r   r   r   rj   X  s    rj   c                 C   s   | d u pt j|  S r6   )r;   r<   r=   )r   r   r   r   r   z  s    r   c                    s  | du st j| s S t trTt| trB fdd|  D S  fdd| D S  rzt jt	| t	  W n t
tfy   tt jdd | }tt jdd  }z,t j| t j  td	|| W n( t
tfy   t
d
||Y n0 Y n0  S )z:Match sample_weight_modes structure with output structure.Nc                    s   i | ]
}| qS r   r   r,  r   r   r   
<dictcomp>  r:   z1broadcast_sample_weight_modes.<locals>.<dictcomp>c                    s   g | ]} qS r   r   )r8   rS   r<  r   r   r     r:   z1broadcast_sample_weight_modes.<locals>.<listcomp>c                 S   s   dS Nz...r   rS   r   r   r   r     r:   z/broadcast_sample_weight_modes.<locals>.<lambda>c                 S   s   dS r>  r   r?  r   r   r   r     r:   z8sample_weight modes were coerced from
  {}
    to  
  {}zVUnable to match target structure and sample_weight_modes structure:
  {}
    to  
  {})r;   r<   r=   r1   ru   r/  r0  assert_same_structurer   r  r   r   rg   r   r   r   r   )target_structurer   
target_strmode_strr   r<  r   rk     sF    

rk   c                   @   s   e Zd ZdZd$dd	Zd
d Zdd Zejdd Z	dd Z
ejdd Zdd Zedd Zedd Zedd Zdd Zdd Zed d! Zd"d# ZdS )%DataHandlerz>Handles iterating over epoch-level `tf.data.Iterator` objects.Nr   rB   Fr   Tc                 C   s   || _ d| _|| _d| _|| _|| _|du r:td| _n|| _t	||}||||||| |||
||tj
 |d| _tj
 }d| _| j  d | _d| _| ||||	| dS )a  Initializes a `DataHandler`.

        Arguments:
          x: See `Model.fit`.
          y: See `Model.fit`.
          sample_weight: See `Model.fit`.
          batch_size: See `Model.fit`.
          steps_per_epoch: See `Model.fit`.
          initial_epoch: See `Model.fit`.
          epochs: See `Model.fit`.
          shuffle: See `Model.fit`.
          class_weight: See `Model.fit`.
          max_queue_size: See `Model.fit`.
          workers: See `Model.fit`.
          use_multiprocessing: See `Model.fit`.
          model: The `Model` instance. Needed in order to correctly `build` the
            `Model` using generator-like inputs (see `GeneratorDataAdapter`).
          steps_per_execution: See `Model.compile`.
          distribute: Whether to distribute the `tf.dataset`.
            `PreprocessingLayer.adapt` does not support distributed datasets,
            `Model` should always set this to `True`.
        r   FNrB   )
r    r   rd   r   rR   r  r  r  r   r   )_initial_epoch_initial_step_epochs_insufficient_data_model_steps_per_epochr;   Variable_steps_per_executionr*  
distributeget_strategy_adapter_current_stepr   item_step_increment%_configure_dataset_and_inferred_steps)r   r   r   r  r    steps_per_epochinitial_epochrd   rR   class_weightr  r  r  r   steps_per_executionrM  r)  r   r   r   r   r     s>    )


zDataHandler.__init__c                 C   sT   ~| j  }|r|t|}| ||| _|rBt|sB||}|| _| 	  dS )z:Configure the `_dataset` and `_inferred_steps` attributes.N)
rO  r   rx   _make_class_weight_map_fn_infer_steps_inferred_stepsr   experimental_distribute_datasetr   _validate_data_handler)r   r   r   rT  rV  rM  r   r   r   r   rS    s    

z1DataHandler._configure_dataset_and_inferred_stepsc                 c   s   |    t| j}t| j| jD ]`}| jr0 q| j rnt| j}t	| jt
sn| | j| j}|durn|| _||fV  | j  q"W d   n1 s0    Y  dS )z#Yields `(epoch, tf.data.Iterator)`.N)_truncate_execution_to_epochiterr   rO   rE  rG  rH  rO  r$   r1   r   rY  rJ  rZ  r'   )r   data_iteratorepochr   r   r   r   enumerate_epochs  s    




zDataHandler.enumerate_epochsc              	   c   sp   | j duo| j  | j k}| j  }z,|r@| j| j  dV  W |rl| j| n|rj| j| 0 dS )z3Truncates steps per execution to at most one epoch.N)rZ  rL  r   rQ  assign)r   should_truncateoriginal_valuer   r   r   r]  *  s    
z(DataHandler._truncate_execution_to_epochc                 C   s   t   d S r6   )r   
async_waitr   r   r   r   sync:  s    zDataHandler.syncc              	   c   sn   zdV  |    W nV ttjjfyh   | jdu r<| j| _n(d| _| j| j	 }t
d|| j  Y n0 dS )z1Catches errors when an iterator runs out of data.NTzYour input ran out of data; interrupting training. Make sure that your dataset or generator can generate at least `steps_per_epoch * epochs` batches (in this case, {} batches). You may need to use the repeat() function when building your dataset.)rf  StopIterationr;   errorsOutOfRangeErrorrZ  rP  rH  rG  rE  r   r   r   )r   total_epochsr   r   r   catch_stop_iteration=  s    

z DataHandler.catch_stop_iterationc                 c   s   | j | _d| _ | jdu s$| j| jk r| jr,q| j  }|dkpZ| jdu pZ| j| j |k}|r|d | _| jV  |  j|7  _q| j| j }| j| |d | _| jV  |  j|7  _| j| qdS )z#Yields steps for the current epoch.r   NrB   )	rF  rP  rZ  rH  rL  r   rQ  rR  rb  )r   Zoriginal_specan_run_full_executionsteps_remainingr   r   r   r   S  s0    


zDataHandler.stepsc                 C   s   | j S )z<The number to increment the step for `on_batch_end` methods.)rR  r   r   r   r   step_incrementr  s    zDataHandler.step_incrementc                 C   s   | j S )a  The inferred steps per epoch of the created `Dataset`.

        This will be `None` in the case where:

        (1) A `Dataset` of unknown cardinality was passed to the `DataHandler`,
        (2) `steps_per_epoch` was not provided, and
        (3) The first epoch of iteration has not yet completed.

        Returns:
          The inferred steps per epoch of the created `Dataset`.
        rZ  r   r   r   r   inferred_stepsw  s    zDataHandler.inferred_stepsc                 C   s
   | j d u S r6   ro  r   r   r   r   should_sync  s    zDataHandler.should_syncc                 C   s   t d d S )NzThe training loop will run indefinitely since you have set `steps_per_epoch=-1`. Please use batch-level callbacks to save checkpoints or log training progress, etc)r   r   r   r   r   r    _log_indefinite_training_warning  s    z,DataHandler._log_indefinite_training_warningc                 C   s   |dkr|    dS |dur |S | j }|dur6|S t|tjjsHdS tjj|}|tjjj	krt|du rtt
d|dkr|  S dS )z8Infers steps_per_epoch needed to loop through a dataset.r5  Na!  When passing an infinitely repeating dataset, please specify a `steps_per_epoch` value so that epoch level callbacks continue to work. The value can be arbitrary, or a number that you think correctly defines the size of an epoch. Epoch-level callbacks will then be called at this interval.r   )rr  rO  r   r1   r;   rY   rZ   r}   r   r   r   r   rQ  )r   r   r   adapter_stepsr   r   r   r   rY    s$    
zDataHandler._infer_stepsc                 C   s
   | j  S r6   )rO  r&   r   r   r   r   _samples  s    zDataHandler._samplesc                 C   s(   | j   dkr$| jd u r$tdd S )NrB   zrCould not infer the size of the data. With `steps_per_execution > 1`, you must specify the number of steps to run.)rL  r   rQ  rZ  r   r   r   r   r   r\    s    z"DataHandler._validate_data_handler)NNNNr   rB   FNr   rB   FNNT)r(   r)   r*   r+   r   rS  ra  
contextlibcontextmanagerr]  rf  rk  r   propertyrn  rp  rq  rr  rY  rt  r\  r   r   r   r   rD    sF                 
S





rD  c                       s:   e Zd ZdZd fdd	Zdd Zdd Zd	d
 Z  ZS )_ClusterCoordinatorDataHandlerz=A `DataHandler` that is compatible with `ClusterCoordinator`.Nc                    sJ   t |s0t|tjtjjfs0| j||fi |}t j	f d|i| d S )Nr   )
r   r1   r   r   r;   rY   rZ   _convert_to_dataset_creatorri   r   r   r   r   r   r     s
    
z'_ClusterCoordinatorDataHandler.__init__c                    s>    fdd}t t r2t t r2t|S tddS )z;Converts non-tf.data.Dataset to `DatasetCreator` instances.c                    s$   ~ t }|f d  S )Nr   )r*  r   )input_contextdata_adapter_clsr   r   r   r   r   _dataset_fn  s    
zO_ClusterCoordinatorDataHandler._convert_to_dataset_creator.<locals>._dataset_fnzOnly `tf.keras.utils.experimental.DatasetCreator`, `tf.Tensor`, numpy arrays and pandas dataframes are supported types at this time.N)r1   r>   r   r   r   )r   r   r   r   r}  r   r|  r   ry    s    
z:_ClusterCoordinatorDataHandler._convert_to_dataset_creatorc                    s   t tjr0 fdd}| jj}||| _n.|s8J tsJ | jj}|| _|dkrvd | _	| 
  n|| _	d S )Nc                      s    j jdS r   )r   r   r   r   r   r   r   per_worker_dataset_fn  s    zc_ClusterCoordinatorDataHandler._configure_dataset_and_inferred_steps.<locals>.per_worker_dataset_fnr5  )r1   r   r   rI  _cluster_coordinatorcreate_per_worker_datasetr   r   r[  rZ  rr  )r   r   r   rT  rV  rM  r  coordinatorr   r~  r   rS    s    

zD_ClusterCoordinatorDataHandler._configure_dataset_and_inferred_stepsc                 C   s   | j j  d S r6   )rI  r  joinr   r   r   r   rf    s    z#_ClusterCoordinatorDataHandler.sync)N)	r(   r)   r*   r+   r   ry  rS  rf  r   r   r   r   r   rx    s
   rx  z)keras.__internal__.utils.get_data_handler)r   c                  O   s,   t |d ddrt| i |S t| i |S )a   Creates a `DataHandler`, providing standardized access to a `Dataset`.

    See `DataHandler` for the list and definition of the arguments. See the
    implementation of `Model.fit()`, `evaluate()`, or `predict()` methods
    for complete usage examples. As a rule of tumb, `get_data_handler()` accepts
    the same inputs as the `x` argument of `Model.fit()`.

    Example:

    ```python
      def step(iterator):
        data = next(iterator)
        # result <= Do something with data
        return result
      tf_step = tf.function(step, reduce_retracing=True)

      # Assume x is a tf.data Dataset.
      data_handler = data_adapter.get_data_handler(x=x)
      # Epoch iteration
      for epo_idx, iterator in data_handler.enumerate_epochs():
          # Stop on dataset exhaustion.
          with data_handler.catch_stop_iteration():
            for step in data_handler.steps(): # Step iteration
                step_result = step(iterator)
    ```

    Args:
      *args: Arguments passed to the `DataHandler` constructor.
      **kwargs: Arguments passed to the `DataHandler` constructor.

    Returns:
      A `DataHandler` object. If the model's cluster coordinate is set (e.g. the
      model was defined under a parameter-server strategy), returns a
      `_ClusterCoordinatorDataHandler`.

    r   r  N)getattrrx  rD  )r   r   r   r   r   get_data_handler  s    &r  c                    sb   t t  }t tt|}||kr:d }t|t fdd|D fdd}|S )a  Applies class weighting to a `Dataset`.

    The `Dataset` is assumed to be in format `(x, y)` or `(x, y, sw)`, where
    `y` must be a single `Tensor`.

    Args:
      class_weight: A map where the keys are integer class ids and values are
        the class weights, e.g. `{0: 0.2, 1: 0.6, 2: 0.3}`

    Returns:
      A function that can be used with `tf.data.Dataset.map` to apply class
      weighting.
    zfExpected `class_weight` to be a dict with keys from 0 to one less than the number of classes, found {}c                    s   g | ]} t | qS r   )rG   )r8   c)rV  r   r   r   I  r:   z-_make_class_weight_map_fn.<locals>.<listcomp>c                     s   t | \} }tj r"td jjdkr6tdtjj jjdkoZt	 d dk fdd fdd}t
|}|durt||j}|| }n|}| |fS )	z*Convert `class_weight` to `sample_weight`.zA`class_weight` is only supported for Models with a single output.   z8`class_weight` not supported for 3+ dimensional targets.rB   c                      s   t j ddS )NrB   r   )r   argmaxr   r   r   r   r   ]  r:   zJ_make_class_weight_map_fn.<locals>._class_weights_map_fn.<locals>.<lambda>c                      s   t t dt jS )N)r5  )r;   castr   rX   rP   r   r  r   r   r   ^  r:   N)r   r;   r<   	is_nestedr   rH   rankr  
smart_condr   r   r  rN   )rY   r   sw	y_classescw)class_weight_tensorr  r   _class_weights_map_fnL  s(    


z8_make_class_weight_map_fn.<locals>._class_weights_map_fn)	r   sortedr0  rO   r   r   r   r;   r9  )rV  	class_idsexpected_class_ids	error_msgr  r   )rV  r  r   rX  1  s    rX  c                    s   dd  t j| } fdd|D }|r8td|tdd |D rR| | fS d}|D ]}|durZ|} qpqZt|jd	 }tt	|d
|  }|d	ks||krtdj||ddd }t j
tj|d	|d| }	t j
tj|||d| }
|	|
fS )a  Split arrays into train and validation subsets in deterministic order.

    The last part of data will become validation data.

    Args:
      arrays: Tensors to split. Allowed inputs are arbitrarily nested structures
        of Tensors and NumPy arrays.
      validation_split: Float between 0 and 1. The proportion of the dataset to
        include in the validation split. The rest of the dataset will be
        included in the training split.
    Returns:
      `(train_arrays, validation_arrays)`
    c                 S   s   t  }t| |p| d u S r6   )r>   r1   )r   r4   r   r   r   
_can_split|  s    z*train_validation_split.<locals>._can_splitc                    s   g | ]} |st |qS r   )r   r8   r   r  r   r   r     r:   z*train_validation_split.<locals>.<listcomp>zh`validation_split` is only supported for Tensors or NumPy arrays, found following types in the input: {}c                 s   s   | ]}|d u V  qd S r6   r   r  r   r   r   r9     r:   z)train_validation_split.<locals>.<genexpr>Nr   g      ?zTraining data contains {batch_dim} samples, which is not sufficient to split it into a validation and training set as specified by `validation_split={validation_split}`. Either provide more data, or a different value for the `validation_split` argument.)	batch_dimvalidation_splitc                 S   s   | d u r| S | || S r6   r   )r   r  endr   r   r   _split  s    z&train_validation_split.<locals>._split)r  r  )r;   r<   r=   r   r   r?   rG   rH   rq   floorrg   	functoolspartial)arraysr  flat_arraysunsplitablefirst_non_noner   r  split_atr  train_arrays
val_arraysr   r  r   train_validation_splitm  s@    
r  z$keras.utils.unpack_x_y_sample_weightc                 C   s   t | trt| } t | ts&| ddfS t| dkr@| d ddfS t| dkr^| d | d dfS t| dkr| d | d | d fS d| }t|dS )ar  Unpacks user-provided data tuple.

    This is a convenience utility to be used when overriding
    `Model.train_step`, `Model.test_step`, or `Model.predict_step`.
    This utility makes it easy to support data of the form `(x,)`,
    `(x, y)`, or `(x, y, sample_weight)`.

    Standalone usage:

    >>> features_batch = tf.ones((10, 5))
    >>> labels_batch = tf.zeros((10, 5))
    >>> data = (features_batch, labels_batch)
    >>> # `y` and `sample_weight` will default to `None` if not provided.
    >>> x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(data)
    >>> sample_weight is None
    True

    Example in overridden `Model.train_step`:

    ```python
    class MyModel(tf.keras.Model):

      def train_step(self, data):
        # If `sample_weight` is not provided, all samples will be weighted
        # equally.
        x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(data)

        with tf.GradientTape() as tape:
          y_pred = self(x, training=True)
          loss = self.compiled_loss(
            y, y_pred, sample_weight, regularization_losses=self.losses)
          trainable_variables = self.trainable_variables
          gradients = tape.gradient(loss, trainable_variables)
          self.optimizer.apply_gradients(zip(gradients, trainable_variables))

        self.compiled_metrics.update_state(y, y_pred, sample_weight)
        return {m.name: m.result() for m in self.metrics}
    ```

    Args:
      data: A tuple of the form `(x,)`, `(x, y)`, or `(x, y, sample_weight)`.

    Returns:
      The unpacked tuple, with `None`s for `y` and `sample_weight` if they are
      not provided.
    NrB   r   r     z]Data is expected to be in format `x`, `(x,)`, `(x, y)`, or `(x, y, sample_weight)`, found: {})r1   r   r   r   r   r   )rY   r  r   r   r   r     s    0


r   z"keras.utils.pack_x_y_sample_weightc                 C   s>   |du r t j| s| S | fS n|du r0| |fS | ||fS dS )a  Packs user-provided data into a tuple.

    This is a convenience utility for packing data into the tuple formats
    that `Model.fit` uses.

    Standalone usage:

    >>> x = tf.ones((10, 1))
    >>> data = tf.keras.utils.pack_x_y_sample_weight(x)
    >>> isinstance(data, tf.Tensor)
    True
    >>> y = tf.ones((10, 1))
    >>> data = tf.keras.utils.pack_x_y_sample_weight(x, y)
    >>> isinstance(data, tuple)
    True
    >>> x, y = data

    Args:
      x: Features to pass to `Model`.
      y: Ground-truth targets to pass to `Model`.
      sample_weight: Sample weight for each element.

    Returns:
      Tuple in the format used in `Model.fit`.
    N)r;   r<   r  r   r   r  r   r   r   rm     s    rm   c                 C   sz   t |||f\}}}|du r$|f}n|du r6||f}n
|||f}t| tjj|}|rh|t|}| |}t	|S )zCreates a single-batch dataset.N)
rj   rp   r;   rY   rZ   r]   rx   rX  r[  r^  )r   r   r   r  rV  rY   r   r   r   r   single_batch_iterator  s    


r  c                 C   s~   t dd tj| D }t|dkrzd}tg d| D ]0\}}|d|ddd tj|D 7 }q8|d	7 }t|d S )
Nc                 s   s   | ]}t |jd  V  qdS rE   rF   rI   r   r   r   r9   0  r:   z*_check_data_cardinality.<locals>.<genexpr>rB   zData cardinality is ambiguous:
r  z  {} sizes: {}
z, c                 s   s   | ]}t |jd  V  qdS rE   )ru   rH   rI   r   r   r   r9   6  s   z8Make sure all arrays contain the same number of samples.)	rn   r;   r<   r=   r   r   r   r  r   )rY   rU   msglabelsingle_datar   r   r   rp   /  s    

rp   c                   C   s,   t d u rtjtjfS tjtjt jt jfS d S r6   )pdr;   r   r   r   Series	DataFramer   r   r   r   r>   >  s    r>   c                 C   s0   zddl m} || W S  ty*   Y dS 0 d S )Nr   )issparseF)scipy.sparser  ImportError)r   r  r   r   r   r   E  s
    
r   c                 C   s   t d u rdS t| t jS d S r   )r  r1   r  r   r   r   r   r6  N  s    r6  c                 C   sv   |   }|j|j }}|j|j }}t|jjtj	rB|
t }tjtj|ddtj|ddfdd}t|||S )z1Converts a SciPy sparse matrix to a SparseTensor.rB   r   )tocoorowcolrY   rH   r	  rN   r   r   r
  astyper   r  r^   r7  r;   SparseTensor)r   
sparse_coor  r  rY   rH   rT   r   r   r   r:  U  s    r:  c                 C   s   t | tjjS r6   )r1   r;   rM  r   )dsr   r   r   r   b  s    r   )NN)NNN)Fr+   r-   ru  r  r  rq   rQ   r   r   tensorflow.compat.v2r   v2r;   kerasr   keras.enginer   keras.utilsr   r   r   Z&tensorflow.python.distribute.input_libr   tensorflow.python.eagerr   tensorflow.python.frameworkr	   tensorflow.python.platformr
   r    tensorflow.python.util.tf_exportr   pandasr  r  r  
monitoring	BoolGauger'  objectABCMetar   r0   r   r   r   r   r   r   r  r$  r*  r%  rj   r   rk   rD  rx  r  rX  r  r   rm   r  rp   r>   r   r6  r:  r   r   r   r   r   <module>   s   
 # Sg6`DS Z"	.  A

*<B

A
* 
	