a
    SicM'                     @   s   d Z ddlm  mZ ddlmZ ddlmZ ddl	m
Z
 ddlmZ ddlmZ ddlmZ dd	lmZ d
ZdZe ZedddddZeddddZeddddZejjdejejde_ ejj e_ dS ) zVGG16 model for Keras.

Reference:
  - [Very Deep Convolutional Networks for Large-Scale Image Recognition]
    (https://arxiv.org/abs/1409.1556) (ICLR 2015)
    N)backend)imagenet_utils)training)VersionAwareLayers)
data_utils)layer_utils)keras_exportznhttps://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels.h5zthttps://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5zkeras.applications.vgg16.VGG16zkeras.applications.VGG16Timagenet  softmaxc                 C   sD  |dv s$t jj|s$td| |dkrH| rH|dkrHtd| dtj|ddt | |d	}|d
u rxt	j
|d}nt|st	j
||d}n|}t	jdddddd|}t	jdddddd|}t	jdddd|}t	jdddddd|}t	jdddddd|}t	jdddd|}t	jdddddd|}t	jdddddd|}t	jdddddd|}t	jdddd|}t	jd dddd!d|}t	jd dddd"d|}t	jd dddd#d|}t	jddd$d|}t	jd dddd%d|}t	jd dddd&d|}t	jd dddd'd|}t	jddd(d|}| rt	jd)d*|}t	jd+dd,d-|}t	jd+dd.d-|}t|| t	j||d/d-|}n.|d0krt	 |}n|d1krt	 |}|d
urt|}	n|}	tj|	|d2d*}
|dkr,| rtjd3td4d5d6}ntjd7td4d8d6}|
| n|d
ur@|
| |
S )9a  Instantiates the VGG16 model.

    Reference:
    - [Very Deep Convolutional Networks for Large-Scale Image Recognition](
    https://arxiv.org/abs/1409.1556) (ICLR 2015)

    For image classification use cases, see
    [this page for detailed examples](
      https://keras.io/api/applications/#usage-examples-for-image-classification-models).

    For transfer learning use cases, make sure to read the
    [guide to transfer learning & fine-tuning](
      https://keras.io/guides/transfer_learning/).

    The default input size for this model is 224x224.

    Note: each Keras Application expects a specific kind of input preprocessing.
    For VGG16, call `tf.keras.applications.vgg16.preprocess_input` on your
    inputs before passing them to the model.
    `vgg16.preprocess_input` will convert the input images from RGB to BGR,
    then will zero-center each color channel with respect to the ImageNet
    dataset, without scaling.

    Args:
        include_top: whether to include the 3 fully-connected
            layers at the top of the network.
        weights: one of `None` (random initialization),
              'imagenet' (pre-training on ImageNet),
              or the path to the weights file to be loaded.
        input_tensor: optional Keras tensor
            (i.e. output of `layers.Input()`)
            to use as image input for the model.
        input_shape: optional shape tuple, only to be specified
            if `include_top` is False (otherwise the input shape
            has to be `(224, 224, 3)`
            (with `channels_last` data format)
            or `(3, 224, 224)` (with `channels_first` data format).
            It should have exactly 3 input channels,
            and width and height should be no smaller than 32.
            E.g. `(200, 200, 3)` would be one valid value.
        pooling: Optional pooling mode for feature extraction
            when `include_top` is `False`.
            - `None` means that the output of the model will be
                the 4D tensor output of the
                last convolutional block.
            - `avg` means that global average pooling
                will be applied to the output of the
                last convolutional block, and thus
                the output of the model will be a 2D tensor.
            - `max` means that global max pooling will
                be applied.
        classes: optional number of classes to classify images
            into, only to be specified if `include_top` is True, and
            if no `weights` argument is specified.
        classifier_activation: A `str` or callable. The activation function to
            use on the "top" layer. Ignored unless `include_top=True`. Set
            `classifier_activation=None` to return the logits of the "top"
            layer.  When loading pretrained weights, `classifier_activation` can
            only be `None` or `"softmax"`.

    Returns:
      A `keras.Model` instance.
    >   Nr	   zThe `weights` argument should be either `None` (random initialization), `imagenet` (pre-training on ImageNet), or the path to the weights file to be loaded.  Received: weights=r	   r
   zlIf using `weights` as `"imagenet"` with `include_top` as true, `classes` should be 1000.  Received `classes=`       )default_sizemin_sizedata_formatrequire_flattenweightsN)shape)tensorr   @   )   r   relusameZblock1_conv1)
activationpaddingnameZblock1_conv2)   r   Zblock1_pool)stridesr      Zblock2_conv1Zblock2_conv2Zblock2_pool   Zblock3_conv1Zblock3_conv2Zblock3_conv3Zblock3_pooli   Zblock4_conv1Zblock4_conv2Zblock4_conv3Zblock4_poolZblock5_conv1Zblock5_conv2Zblock5_conv3Zblock5_poolflatten)r   i   Zfc1)r   r   Zfc2predictionsavgmaxvgg16z+vgg16_weights_tf_dim_ordering_tf_kernels.h5modelsZ 64373286793e3c8b2b4e3219cbf3544b)cache_subdir	file_hashz1vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5Z 6d6bbae143d832006294945121d1f1fc)tfiogfileexists
ValueErrorr   obtain_input_shaper   image_data_formatlayersInputis_keras_tensorConv2DMaxPooling2DFlattenDensevalidate_activationGlobalAveragePooling2DGlobalMaxPooling2Dr   get_source_inputsr   Modelr   get_fileWEIGHTS_PATHWEIGHTS_PATH_NO_TOPload_weights)include_topr   input_tensorinput_shapepoolingclassesclassifier_activation	img_inputxinputsmodelweights_path rK   T/var/www/html/django/DPS/env/lib/python3.9/site-packages/keras/applications/vgg16.pyVGG160   s   I	



















rM   z)keras.applications.vgg16.preprocess_inputc                 C   s   t j| |ddS )Ncaffe)r   mode)r   preprocess_input)rG   r   rK   rK   rL   rP      s    rP   z+keras.applications.vgg16.decode_predictions   c                 C   s   t j| |dS )N)top)r   decode_predictions)predsrR   rK   rK   rL   rS     s    rS    )rO   reterror)Tr	   NNNr
   r   )N)rQ   )__doc__Ztensorflow.compat.v2compatv2r)   kerasr   Zkeras.applicationsr   keras.enginer   keras.layersr   keras.utilsr   r    tensorflow.python.util.tf_exportr   r=   r>   r0   rM   rP   rS   PREPROCESS_INPUT_DOCformatPREPROCESS_INPUT_RET_DOC_CAFFEPREPROCESS_INPUT_ERROR_DOCrK   rK   rK   rL   <module>   sB           O