a
    Sic                     @   s   d Z ddlmZ ddlmZ ddlmZ eddd!ddZeddd"ddZeddd#ddZedd$ddZ	edd%ddZ
ejjdejejde	_ ej
j e
_ dZeed ej e  eed ej e  eed ej e  d	S )&zResNet v2 models for Keras.

Reference:
  - [Identity Mappings in Deep Residual Networks]
    (https://arxiv.org/abs/1603.05027) (CVPR 2016)
    )imagenet_utils)resnet)keras_exportz'keras.applications.resnet_v2.ResNet50V2zkeras.applications.ResNet50V2TimagenetN  softmaxc                 C   s(   dd }t j|ddd| ||||||dS )z)Instantiates the ResNet50V2 architecture.c                 S   sJ   t j| dddd} t j| dddd} t j| dd	d
d} t j| dddddS )N@      conv2name      conv3      conv4      conv5stride1r   r   stack2x r   X/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/applications/resnet_v2.pystack_fn,   s    zResNet50V2.<locals>.stack_fnT
resnet50v2classifier_activationr   ResNetinclude_topweightsinput_tensorinput_shapepoolingclassesr!   r   r   r   r   
ResNet50V2   s    r+   z(keras.applications.resnet_v2.ResNet101V2zkeras.applications.ResNet101V2c                 C   s(   dd }t j|ddd| ||||||dS )z*Instantiates the ResNet101V2 architecture.c                 S   sJ   t j| dddd} t j| dddd} t j| dd	d
d} t j| dddddS )Nr   r	   r
   r   r   r   r   r      r   r   r   r   r   r   r   r   r   r   r   O   s    zResNet101V2.<locals>.stack_fnTresnet101v2r    r"   r$   r   r   r   ResNet101V2A   s    r.   z(keras.applications.resnet_v2.ResNet152V2zkeras.applications.ResNet152V2c                 C   s(   dd }t j|ddd| ||||||dS )z*Instantiates the ResNet152V2 architecture.c                 S   sJ   t j| dddd} t j| dddd} t j| dd	d
d} t j| dddddS )Nr   r	   r
   r   r      r   r   $   r   r   r   r   r   r   r   r   r   r   r   r   s    zResNet152V2.<locals>.stack_fnTresnet152v2r    r"   r$   r   r   r   ResNet152V2d   s    r2   z-keras.applications.resnet_v2.preprocess_inputc                 C   s   t j| |ddS )Ntf)data_formatmode)r   preprocess_input)r   r4   r   r   r   r6      s    r6   z/keras.applications.resnet_v2.decode_predictions   c                 C   s   t j| |dS )N)top)r   decode_predictions)predsr8   r   r   r   r9      s    r9    )r5   reterrora	  

  Reference:
  - [Identity Mappings in Deep Residual Networks]
    (https://arxiv.org/abs/1603.05027) (CVPR 2016)

  For image classification use cases, see
  [this page for detailed examples](
    https://keras.io/api/applications/#usage-examples-for-image-classification-models).

  For transfer learning use cases, make sure to read the
  [guide to transfer learning & fine-tuning](
    https://keras.io/guides/transfer_learning/).

  Note: each Keras Application expects a specific kind of input preprocessing.
  For ResNetV2, call `tf.keras.applications.resnet_v2.preprocess_input` on your
  inputs before passing them to the model.
  `resnet_v2.preprocess_input` will scale input pixels between -1 and 1.

  Args:
    include_top: whether to include the fully-connected
      layer at the top of the network.
    weights: one of `None` (random initialization),
      'imagenet' (pre-training on ImageNet),
      or the path to the weights file to be loaded.
    input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
      to use as image input for the model.
    input_shape: optional shape tuple, only to be specified
      if `include_top` is False (otherwise the input shape
      has to be `(224, 224, 3)` (with `'channels_last'` data format)
      or `(3, 224, 224)` (with `'channels_first'` data format).
      It should have exactly 3 inputs channels,
      and width and height should be no smaller than 32.
      E.g. `(200, 200, 3)` would be one valid value.
    pooling: Optional pooling mode for feature extraction
      when `include_top` is `False`.
      - `None` means that the output of the model will be
          the 4D tensor output of the
          last convolutional block.
      - `avg` means that global average pooling
          will be applied to the output of the
          last convolutional block, and thus
          the output of the model will be a 2D tensor.
      - `max` means that global max pooling will
          be applied.
    classes: optional number of classes to classify images
      into, only to be specified if `include_top` is True, and
      if no `weights` argument is specified.
    classifier_activation: A `str` or callable. The activation function to use
      on the "top" layer. Ignored unless `include_top=True`. Set
      `classifier_activation=None` to return the logits of the "top" layer.
      When loading pretrained weights, `classifier_activation` can only
      be `None` or `"softmax"`.

  Returns:
    A `keras.Model` instance.
__doc__)Tr   NNNr   r   )Tr   NNNr   r   )Tr   NNNr   r   )N)r7   )r>   Zkeras.applicationsr   r    tensorflow.python.util.tf_exportr   r+   r.   r2   r6   r9   PREPROCESS_INPUT_DOCformatPREPROCESS_INPUT_RET_DOC_TFPREPROCESS_INPUT_ERROR_DOCDOCsetattrr   r   r   r   <module>   sd                           
: