ó
¾÷Xc           @@  s   d  d l  m Z d d l m Z m Z d d l m Z d d l m Z d d l m Z d d l m	 Z
 d d l m Z d	 e f d
 „  ƒ  YZ d S(   i    (   t   absolute_importi   (   t   Layert	   InputSpec(   t   initializers(   t   regularizers(   t   constraints(   t   backend(   t
   interfacest   BatchNormalizationc           B@  se   e  Z d  Z e j d d d e e d d d d d
 d
 d
 d
 d „ ƒ Z d „  Z d
 d „ Z	 d	 „  Z
 RS(   sš  Batch normalization layer (Ioffe and Szegedy, 2014).

    Normalize the activations of the previous layer at each batch,
    i.e. applies a transformation that maintains the mean activation
    close to 0 and the activation standard deviation close to 1.

    # Arguments
        axis: Integer, the axis that should be normalized
            (typically the features axis).
            For instance, after a `Conv2D` layer with
            `data_format="channels_first"`,
            set `axis=1` in `BatchNormalization`.
        momentum: Momentum for the moving average.
        epsilon: Small float added to variance to avoid dividing by zero.
        center: If True, add offset of `beta` to normalized tensor.
            If False, `beta` is ignored.
        scale: If True, multiply by `gamma`.
            If False, `gamma` is not used.
            When the next layer is linear (also e.g. `nn.relu`),
            this can be disabled since the scaling
            will be done by the next layer.
        beta_initializer: Initializer for the beta weight.
        gamma_initializer: Initializer for the gamma weight.
        moving_mean_initializer: Initializer for the moving mean.
        moving_variance_initializer: Initializer for the moving variance.
        beta_regularizer: Optional regularizer for the beta weight.
        gamma_regularizer: Optional regularizer for the gamma weight.
        beta_constraint: Optional constraint for the beta weight.
        gamma_constraint: Optional constraint for the gamma weight.

    # Input shape
        Arbitrary. Use the keyword argument `input_shape`
        (tuple of integers, does not include the samples axis)
        when using this layer as the first layer in a model.

    # Output shape
        Same shape as input.

    # References
        - [Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift](https://arxiv.org/abs/1502.03167)
    iÿÿÿÿg®Gáz®ï?gü©ñÒMbP?t   zerost   onesc         K@  sà   t  t |  ƒ j |   t |  _ | |  _ | |  _ | |  _ | |  _ | |  _	 t
 j | ƒ |  _ t
 j | ƒ |  _ t
 j | ƒ |  _ t
 j |	 ƒ |  _ t j |
 ƒ |  _ t j | ƒ |  _ t j | ƒ |  _ t j | ƒ |  _ d  S(   N(   t   superR   t   __init__t   Truet   supports_maskingt   axist   momentumt   epsilont   centert   scaleR   t   gett   beta_initializert   gamma_initializert   moving_mean_initializert   moving_variance_initializerR   t   beta_regularizert   gamma_regularizerR   t   beta_constraintt   gamma_constraint(   t   selfR   R   R   R   R   R   R   R   R   R   R   R   R   t   kwargs(    (    s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyR   7   s    						c      
   C@  sc  | |  j  } | d  k rG t d t |  j  ƒ d t | ƒ d ƒ ‚ n  t d t | ƒ d i | |  j  6ƒ |  _ | f } |  j r· |  j | d d d |  j	 d	 |  j
 d
 |  j ƒ|  _ n	 d  |  _ |  j rÿ |  j | d d d |  j d	 |  j d
 |  j ƒ|  _ n	 d  |  _ |  j | d d d |  j d t ƒ|  _ |  j | d d d |  j d t ƒ|  _ t |  _ d  S(   Ns   Axis s\    of input tensor should have a defined dimension but the layer received an input with shape t   .t   ndimt   axest   namet   gammat   initializert   regularizert
   constraintt   betat   moving_meant	   trainablet   moving_variance(   R   t   Nonet
   ValueErrort   strR   t   lent
   input_specR   t
   add_weightR   R   R   R#   R   R   R   R   R'   R   t   FalseR(   R   R*   R   t   built(   R   t   input_shapet   dimt   shape(    (    s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyt   buildW   s@    +											c   
      @  s4  t  j ˆ ƒ } t | ƒ } t t t | ƒ ƒ ƒ } | ˆ j =d g t | ƒ ‰  | ˆ j ˆ  ˆ j <t | ƒ t | ƒ d  k ‰ t  j ˆ ˆ j ˆ j	 | d ˆ j
 ƒ\ } } } | d t h k rÆ | Sˆ j t  j ˆ j | ˆ j ƒ t  j ˆ j | ˆ j ƒ g ˆ ƒ ‡  ‡ ‡ ‡ f d †  }	 t  j | |	 d | ƒS(   Ni   iÿÿÿÿR   i    c          @  sÒ   ˆ r  t  j ˆ j ˆ  ƒ }  t  j ˆ j ˆ  ƒ } ˆ j rQ t  j ˆ j ˆ  ƒ } n d  } ˆ j rx t  j ˆ j ˆ  ƒ } n d  } t  j	 ˆ |  | | | d ˆ j
 ƒSt  j	 ˆ ˆ j ˆ j ˆ j ˆ j d ˆ j
 ƒSd  S(   NR   (   t   Kt   reshapeR(   R*   R   R'   R+   R   R#   t   batch_normalizationR   (   t   broadcast_moving_meant   broadcast_moving_variancet   broadcast_betat   broadcast_gamma(   t   broadcast_shapet   inputst   needs_broadcastingR   (    s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyt   normalize_inference™   s4    				
	t   training(   R7   t	   int_shapeR.   t   listt   rangeR   t   sortedt   normalize_batch_in_trainingR#   R'   R   R1   t
   add_updatet   moving_average_updateR(   R   R*   t   in_train_phase(
   R   R?   RB   R3   R    t   reduction_axest   normedt   meant   varianceRA   (    (   R>   R?   R@   R   s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyt   call~   s.    
	!	c         C@  s  i |  j  d 6|  j d 6|  j d 6|  j d 6|  j d 6t j |  j ƒ d 6t j |  j ƒ d 6t j |  j	 ƒ d 6t j |  j
 ƒ d	 6t j |  j ƒ d
 6t j |  j ƒ d 6t j |  j ƒ d 6t j |  j ƒ d 6} t t |  ƒ j ƒ  } t t | j ƒ  ƒ t | j ƒ  ƒ ƒ S(   NR   R   R   R   R   R   R   R   R   R   R   R   R   (   R   R   R   R   R   R   t	   serializeR   R   R   R   R   R   R   R   R   R   R   R   t
   get_configt   dictRD   t   items(   R   t   configt   base_config(    (    s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyRQ   ¾   s     




N(   t   __name__t
   __module__t   __doc__R   t   legacy_batchnorm_supportR   R+   R   R6   RO   RQ   (    (    (    s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyR      s$   )	'@N(   t
   __future__R    t   engineR   R   t    R   R   R   R   R7   t   legacyR   R   (    (    (    s9   /tmp/pip-build-isqEY4/keras/keras/layers/normalization.pyt   <module>   s   