σ
ΎχXc           @@ sf  d  Z  d d l m Z d d l m Z d d l Z d d l m Z d d l m Z d d l m	 Z	 d d	 l m
 Z
 d d
 l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m Z d Z d Z e d d d d d d  Z d   Z  d S(   sZ  Xception V1 model for Keras.

On ImageNet, this model gets to a top-1 validation accuracy of 0.790
and a top-5 validation accuracy of 0.945.

Do note that the input image format for this model is different than for
the VGG16 and ResNet models (299x299 instead of 224x224),
and that the input preprocessing function
is also different (same as Inception V3).

Also do note that this model is only available for the TensorFlow backend,
due to its reliance on `SeparableConvolution` layers.

# Reference

- [Xception: Deep Learning with Depthwise Separable Convolutions](https://arxiv.org/abs/1610.02357)

i    (   t   print_function(   t   absolute_importNi   (   t   Model(   t   layers(   t   Dense(   t   Input(   t   BatchNormalization(   t
   Activation(   t   Conv2D(   t   SeparableConv2D(   t   MaxPooling2D(   t   GlobalAveragePooling2D(   t   GlobalMaxPooling2D(   t   get_source_inputs(   t   get_file(   t   backendi   (   t   decode_predictions(   t   _obtain_input_shapesv   https://github.com/fchollet/deep-learning-models/releases/download/v0.4/xception_weights_tf_dim_ordering_tf_kernels.h5s|   https://github.com/fchollet/deep-learning-models/releases/download/v0.4/xception_weights_tf_dim_ordering_tf_kernels_notop.h5t   imagenetiθ  c         C@ s*	  | d da h k r! t d   n  | d k rN |  rN | d k rN t d   n  t j   d k ro t d   n  t j   d k r€ t j d  t j d  d	 } n da } t	 | d
 d d d d t j   d |  } | da k rς t
 d |  } n- t j |  st
 d | d |  } n | } t d db d dc d t d d |  } t d d  |  } t d d d |  } t d dd d t d d |  } t d d  |  } t d d d |  } t d  de d df d" d# d t |  }	 t   |	  }	 t d  dg d" d# d t d d$ |  } t d d%  |  } t d d d& |  } t d  dh d" d# d t d d' |  } t d d(  |  } t di d dj d" d# d d) |  } t j | |	 g  } t d* dk d dl d" d# d t |  }	 t   |	  }	 t d d d+ |  } t d* dm d" d# d t d d, |  } t d d-  |  } t d d d. |  } t d* dn d" d# d t d d/ |  } t d d0  |  } t do d dp d" d# d d1 |  } t j | |	 g  } t d2 dq d dr d" d# d t |  }	 t   |	  }	 t d d d3 |  } t d2 ds d" d# d t d d4 |  } t d d5  |  } t d d d6 |  } t d2 dt d" d# d t d d7 |  } t d d8  |  } t du d dv d" d# d d9 |  } t j | |	 g  } xct d:  D]U}
 | }	 d; t |
 d<  } t d d | d= |  } t d2 dw d" d# d t d | d> |  } t d | d?  |  } t d d | d@ |  } t d2 dx d" d# d t d | dA |  } t d | dB  |  } t d d | dC |  } t d2 dy d" d# d t d | dD |  } t d | dE  |  } t j | |	 g  } qϋWt dF dz d d{ d" d# d t |  }	 t   |	  }	 t d d dG |  } t d2 d| d" d# d t d dH |  } t d dI  |  } t d d dJ |  } t dF d} d" d# d t d dK |  } t d dL  |  } t d~ d d d" d# d dM |  } t j | |	 g  } t dN d d" d# d t d dO |  } t d dP  |  } t d d dQ |  } t dR d d" d# d t d dS |  } t d dT  |  } t d d dU |  } |  rOt d dV  |  } t | dW dX d dY |  } n< | dZ k rmt   |  } n | d[ k rt   |  } n  | da k	 r¦t |  } n | } t | | d d\ } | d k r	|  rλt d] t d^ d_ } n t d` t d^ d_ } | j |  n  | r&	t j |  n  | S(   sw  Instantiates the Xception architecture.

    Optionally loads weights pre-trained
    on ImageNet. This model is available for TensorFlow only,
    and can only be used with inputs following the TensorFlow
    data format `(width, height, channels)`.
    You should set `image_data_format="channels_last"` in your Keras config
    located at ~/.keras/keras.json.

    Note that the default input image size for this model is 299x299.

    # Arguments
        include_top: whether to include the fully-connected
            layer at the top of the network.
        weights: one of `None` (random initialization)
            or "imagenet" (pre-training on ImageNet).
        input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
            to use as image input for the model.
        input_shape: optional shape tuple, only to be specified
            if `include_top` is False (otherwise the input shape
            has to be `(299, 299, 3)`.
            It should have exactly 3 inputs channels,
            and width and height should be no smaller than 71.
            E.g. `(150, 150, 3)` would be one valid value.
        pooling: Optional pooling mode for feature extraction
            when `include_top` is `False`.
            - `None` means that the output of the model will be
                the 4D tensor output of the
                last convolutional layer.
            - `avg` means that global average pooling
                will be applied to the output of the
                last convolutional layer, and thus
                the output of the model will be a 2D tensor.
            - `max` means that global max pooling will
                be applied.
        classes: optional number of classes to classify images
            into, only to be specified if `include_top` is True, and
            if no `weights` argument is specified.

    # Returns
        A Keras model instance.

    # Raises
        ValueError: in case of invalid argument for `weights`,
            or invalid input shape.
        RuntimeError: If attempting to run this model with a
            backend that does not support separable convolutions.
    R   sp   The `weights` argument should be either `None` (random initialization) or `imagenet` (pre-training on ImageNet).iθ  sS   If using `weights` as imagenet with `include_top` as true, `classes` should be 1000t
   tensorflowsA   The Xception model is only available with the TensorFlow backend.t   channels_lasts  The Xception model is only available for the input data format "channels_last" (width, height, channels). However your settings specify the default data format "channels_first" (channels, width, height). You should set `image_data_format="channels_last"` in your Keras config located at ~/.keras/keras.json. The model being returned right now will expect inputs to follow the "channels_last" data format.t   channels_firstt   default_sizei+  t   min_sizeiG   t   data_formatt   include_topt   shapet   tensori    i   t   stridesi   t   use_biast   namet   block1_conv1t   block1_conv1_bnt   relut   block1_conv1_acti@   t   block1_conv2t   block1_conv2_bnt   block1_conv2_acti   i   t   paddingt   samet   block2_sepconv1t   block2_sepconv1_bnt   block2_sepconv2_actt   block2_sepconv2t   block2_sepconv2_bnt   block2_pooli   t   block3_sepconv1_actt   block3_sepconv1t   block3_sepconv1_bnt   block3_sepconv2_actt   block3_sepconv2t   block3_sepconv2_bnt   block3_pooliΨ  t   block4_sepconv1_actt   block4_sepconv1t   block4_sepconv1_bnt   block4_sepconv2_actt   block4_sepconv2t   block4_sepconv2_bnt   block4_pooli   t   blocki   t   _sepconv1_actt	   _sepconv1t   _sepconv1_bnt   _sepconv2_actt	   _sepconv2t   _sepconv2_bnt   _sepconv3_actt	   _sepconv3t   _sepconv3_bni   t   block13_sepconv1_actt   block13_sepconv1t   block13_sepconv1_bnt   block13_sepconv2_actt   block13_sepconv2t   block13_sepconv2_bnt   block13_pooli   t   block14_sepconv1t   block14_sepconv1_bnt   block14_sepconv1_acti   t   block14_sepconv2t   block14_sepconv2_bnt   block14_sepconv2_actt   avg_poolt
   activationt   softmaxt   predictionst   avgt   maxt   xceptions.   xception_weights_tf_dim_ordering_tf_kernels.h5t   cache_subdirt   modelss4   xception_weights_tf_dim_ordering_tf_kernels_notop.h5N(   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   i   i   (   t   Nonet
   ValueErrort   KR   t   RuntimeErrort   image_data_formatt   warningst   warnt   set_image_data_formatR   R   t   is_keras_tensorR   t   FalseR   R   R	   R
   R   t   addt   ranget   strR   R   R   R   R   R   t   TF_WEIGHTS_PATHt   TF_WEIGHTS_PATH_NO_TOPt   load_weights(   R   t   weightst   input_tensort   input_shapet   poolingt   classest   old_data_formatt	   img_inputt   xt   residualt   it   prefixt   inputst   modelt   weights_path(    (    s:   /tmp/pip-build-isqEY4/keras/keras/applications/xception.pyt   Xception/   sΰ    4				'!''$''$''$+++''$''!	c         C@ s"   |  d :}  |  d 8}  |  d 9}  |  S(   Ng     ΰo@g      ΰ?g       @(    (   Rs   (    (    s:   /tmp/pip-build-isqEY4/keras/keras/applications/xception.pyt   preprocess_input  s    


(!   t   __doc__t
   __future__R    R   Ra   R[   R   t    R   R   R   R   R   R   R	   R
   R   R   t   engine.topologyR   t   utils.data_utilsR   R   R^   t   imagenet_utilsR   R   Ri   Rj   t   TrueR\   Rz   R{   (    (    (    s:   /tmp/pip-build-isqEY4/keras/keras/applications/xception.pyt   <module>   s4   Τ