ó
¾÷Xc           @@  sŠ   d  d l  m Z d d l m Z d d l m Z d d l m Z d d l m Z d d l m	 Z	 d d l
 m Z d	 e	 f d
 „  ƒ  YZ d S(   i    (   t   absolute_importi   (   t   backend(   t   initializers(   t   regularizers(   t   constraints(   t   Layer(   t
   interfacest	   Embeddingc           B@  sb   e  Z d  Z e j d d d d e d d „ ƒ Z d „  Z d d „ Z	 d „  Z
 d „  Z d „  Z RS(	   s¼	  Turns positive integers (indexes) into dense vectors of fixed size.
    eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]

    This layer can only be used as the first layer in a model.

    # Example

    ```python
      model = Sequential()
      model.add(Embedding(1000, 64, input_length=10))
      # the model will take as input an integer matrix of size (batch, input_length).
      # the largest integer (i.e. word index) in the input should be no larger than 999 (vocabulary size).
      # now model.output_shape == (None, 10, 64), where None is the batch dimension.

      input_array = np.random.randint(1000, size=(32, 10))

      model.compile('rmsprop', 'mse')
      output_array = model.predict(input_array)
      assert output_array.shape == (32, 10, 64)
    ```

    # Arguments
      input_dim: int > 0. Size of the vocabulary, ie.
          1 + maximum integer index occurring in the input data.
      output_dim: int >= 0. Dimension of the dense embedding.
      embeddings_initializer: Initializer for the `embeddings` matrix
            (see [initializers](../initializers.md)).
      embeddings_regularizer: Regularizer function applied to
            the `embeddings` matrix
            (see [regularizer](../regularizers.md)).
      embeddings_constraint: Constraint function applied to
            the `embeddings` matrix
            (see [constraints](../constraints.md)).
      mask_zero: Whether or not the input value 0 is a special "padding"
          value that should be masked out.
          This is useful when using [recurrent layers](recurrent.md)
          which may take variable length input.
          If this is `True` then all subsequent layers
          in the model need to support masking or an exception will be raised.
          If mask_zero is set to True, as a consequence, index 0 cannot be
          used in the vocabulary (input_dim should equal `|vocabulary| + 2`).
      input_length: Length of input sequences, when it is constant.
          This argument is required if you are going to connect
          `Flatten` then `Dense` layers upstream
          (without it, the shape of the dense outputs cannot be computed).

    # Input shape
        2D tensor with shape: `(batch_size, sequence_length)`.

    # Output shape
        3D tensor with shape: `(batch_size, sequence_length, output_dim)`.

    # References
        - [A Theoretically Grounded Application of Dropout in Recurrent Neural Networks](http://arxiv.org/abs/1512.05287)
    t   uniformc	   
      K@  s¿   d |	 d <d |	 k r9 | r, | f |	 d <q9 d |	 d <n  t t |  ƒ j |	   | |  _ | |  _ t j | ƒ |  _ t	 j | ƒ |  _
 t	 j | ƒ |  _ t j | ƒ |  _ | |  _ | |  _ d  S(   Nt   int32t   dtypet   input_shape(   N(   t   Nonet   superR   t   __init__t	   input_dimt
   output_dimR   t   gett   embeddings_initializerR   t   embeddings_regularizert   activity_regularizerR   t   embeddings_constraintt	   mask_zerot   input_length(
   t   selfR   R   R   R   R   R   R   R   t   kwargs(    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyR   D   s    	
			c      
   C@  sL   |  j  |  j |  j f d |  j d d d |  j d |  j ƒ|  _ t |  _ d  S(   Nt   initializert   namet
   embeddingst   regularizert
   constraint(	   t
   add_weightR   R   R   R   R   R   t   Truet   built(   R   R   (    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyt   build^   s    		c         C@  s!   |  j  s d  St j | d ƒ Sd  S(   Ni    (   R   R   t   Kt	   not_equal(   R   t   inputst   mask(    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyt   compute_maskg   s    	c         C@  s3   |  j  s | d } n	 |  j  } | d | |  j f S(   Ni   i    (   R   R   (   R   R   R   (    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyt   compute_output_shapem   s    		c         C@  sC   t  j | ƒ d k r* t  j | d ƒ } n  t  j |  j | ƒ } | S(   NR	   (   R#   R
   t   castt   gatherR   (   R   R%   t   out(    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyt   callt   s    c         C@  sµ   i |  j  d 6|  j d 6t j |  j ƒ d 6t j |  j ƒ d 6t j |  j ƒ d 6t j |  j	 ƒ d 6|  j
 d 6|  j d 6} t t |  ƒ j ƒ  } t t | j ƒ  ƒ t | j ƒ  ƒ ƒ S(	   NR   R   R   R   R   R   R   R   (   R   R   R   t	   serializeR   R   R   R   R   R   R   R   R   R   t
   get_configt   dictt   listt   items(   R   t   configt   base_config(    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyR.   z   s    

N(   t   __name__t
   __module__t   __doc__R   t   legacy_embedding_supportR   t   FalseR   R"   R'   R(   R,   R.   (    (    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyR      s   7				N(   t
   __future__R    t    R   R#   R   R   R   t   engineR   t   legacyR   R   (    (    (    s6   /tmp/pip-build-isqEY4/keras/keras/layers/embeddings.pyt   <module>   s   