
Xc        	   @` sc  d  Z  d d l m Z m Z m Z d d l Z d d l Z d d l Z d d l m Z m	 Z	 m
 Z
 m Z d d l m Z d d l m Z d d l m Z d d l m Z m Z m Z d d	 l m Z d d
 l m Z d e j f d     YZ e e j d d Z e j e d d Z e j e e j  d   d i d d 6d d Z! e j" e e
 j# d   d e j f d     YZ$ e$ e j d d Z% e j e% d d Z& e j e$ e j  d   d i d d 6d d Z' e j" e& e
 j# d   e	 j( e g  d    Z) e j* j+ d j, d e)  d   Z- e	 j( e g  d    Z. e j* j+ d j, d e.  d e j f d     YZ/ e/ e j d d Z0 e j e0 d d  Z1 e j" e1 e
 j# d    d!   Z2 e	 j3 e j4 e d" f f e j5 e1 e j5 d" f f f d# e6 d$ e d% e2 Z7 d&   Z8 e	 j3 e j4 e j9 e: d' d( d) e8  e d" f f f e j5 e1 d" f f d# e6 d$ e d% e2 Z; e	 j3 e j< e j= d" f f e1 d" f d$ e d# e6 Z> e	 j3 e j< e j5 e d" f f f e j5 e1 d" f f d$ e d# e6 Z? e j@ e7 d d* e j@ e; d d+ e j@ e> d d, e j@ e? d d- e6 d.  ZA d/   ZB d0   ZC d1   ZD d2   ZE e j@ e	 j( e jF g  d3     ZG d4   ZH d5   ZI d6   ZJ d7   ZK d d d d d d d d8  ZM e j@ e	 j( e jN g  d9     ZO e j@ e	 j( e jP g  d:     ZQ e	 j( e j9 g  d;    ZR eS ZT eT r_e jU eR  n  d S(<   s   
Ops and optimizations: sigmoid, softplus.

These functions implement special cases of exp and log to improve numerical
stability.

i    (   t   absolute_importt   print_functiont   divisionN(   t   configt   goft   printingt   scalar(   t   imap(   t   pprint(   t   basic(   t   elemwiset   optt   NotScalarConstantError(   t   values_eq_approx_remove_inf(   t   copy_stack_tracet   ScalarSigmoidc           B` sY   e  Z d  Z e d    Z d   Z d   Z d   Z d   Z d   Z	 e d    Z
 RS(   s5   
    This is just speed opt. Not for stability.

    c         C` sv   |  d k  r d S|  d k r  d St  t |  d d   } | d k r` d d t j |  d	 d
 Sd d t j |   S(   Ng      >g        g      >@g      ?t   dtypet    t   int8t   uint8t   sigt   f(   R   R   (   t   strt   getattrt   numpyt   exp(   t   xt   x_dtype(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   st_impl"   s    c         C` s   t  j |  S(   N(   R   R   (   t   selfR   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   impl/   s    c         C` sX   | \ } | \ } t  |  } | | d | } | j j j d  d k sQ t  | g S(   Ng      ?t   floati(   t   scalar_sigmoidt   typeR   t   findt   AssertionError(   R   t   inpt   gradsR   t   gzt   yt   rval(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   grad2   s    		!c         C` s   | \ } | \ } | j  d j t j k sD | j  d j t j k rO d t   S| j  d j t j k rs d t   St d   d  S(   Ni    sQ   %(z)s = %(x)s < -88.0f ? 0.0 : %(x)s > 15.0f ? 1.0f : 1.0f /(1.0f + exp(-%(x)s));sK   %(z)s = %(x)s < -709.0 ? 0.0 : %(x)s > 19.0 ? 1.0 : 1.0 /(1.0+exp(-%(x)s));s!   only floatingpoint is implemented(   t   inputsR!   R   t   float32t   float16t   localst   float64t   NotImplementedError(   R   t   nodet   nameR$   t   outt   subR   t   z(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   c_code<   s    		c         C` s+   t  t |   j   } | r# d | S| Sd  S(   Ni   (   i   (   t   superR   t   c_code_cache_version(   R   t   v(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR7   W   s    c   
      C` s   | \ } | \ } t  j j j p? | j d j | j d j k rW t  j j j	    n  | j d j } | d k o |  j
 t k	 r d } d }	 n< | d k o |  j t k	 r d } d }	 n t  j j j	    d t   St  j j j	    (	   Ni    R+   R   t   amd_vrsa_expfR.   t   doublet   amd_vrda_exps  
        npy_intp n = PyArray_SIZE(%(z)s);
        %(dtype)s * x = (%(dtype)s*) PyArray_DATA(%(x)s);
        %(dtype)s * z = (%(dtype)s*) PyArray_DATA(%(z)s);
        // We block to keep the data in l1
        // normal l1 size = 32k: 32k/2(input + output)/8(nb bytes of double)=2k
        // We stay bellow the 2k limit to let space for
        // This is faster than the not blocking version
        for(int i=0;i<n;i+=2048){
            npy_intp nb = (n-i<2048)?n-i:2048;
            for(int j=0;j<nb;j++){
                z[i+j] = -x[i+j];
            }
            %(fct)s(nb, z+i, z+i);
            for(int j=0;j<nb;j++){
                z[i+j] = 1.0 /(1.0+z[i+j]);
            }
        }
        (   t   theanoR   t   libt   amdlibmR*   R   t   outputsR   t   utilst   MethodNotDefinedt   amd_float32t   Nonet   amd_float64R-   (
   R   R0   R1   R$   R2   R3   R   R4   R   t   fct(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   c_code_contiguous_disabled_   s    		 		c          C` sW  t  j d d d  }  d d t  j |   } d   } d   } | |   j   } | |   j   } d d l j } d d l } | j   } | j d	  }	 |	 j	 |  |  |	 j	 |  |  |	 j	 |  |  |	 j
 t  |	 j d d  | j j | j j t j  d d d d d d  }
 | j |
  t d |
  t | j    t | j    d S(   s[   
        This method was used to generate the graph: sigmoid_prec.png in the doc.

        ii   g?i   c         S` s   t  j j j |   S(   N(   R<   t   tensort   nnett   hard_sigmoid(   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyRI      s    c         S` s   t  j j j |   S(   N(   R<   RG   RH   t   ultra_fast_sigmoid(   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyRJ      s    i    Nio   t   sigmoidt
   ultra_fastt   hards
   upper lefts   ..t   doct   libraryRG   RH   s   sigmoid_prec.pngs   New picture saved at(   s   sigmoidRL   s   hard(   R   t   arangeR   t   evalt   matplotlib.pyplott   pyplott   ost   figuret   add_subplott   plott   gridt   Truet   legendt   patht   joint   dirnameR<   t   __file__t   savefigt   printt   maxt   min(   t   datat   valRI   RJ   t   val_hardt	   val_ultrat   pltRT   t   figt   axt   fname(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt	   gen_graph   s,    			(   t   __name__t
   __module__t   __doc__t   staticmethodR   R   R)   R5   R7   RF   Rk   (    (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR      s   		
			$R1   R    RK   t   inplace_patternt   sigmoid_inplacet   UltraFastScalarSigmoidc           B` s/   e  Z d  Z e d    Z d   Z d   Z RS(   s5   
    This is just speed opt. Not for stability.

    c         C` s   d |  }  |  d k ra |  d k  r7 d |  d |  } q |  d k  rX d d |  d } q d	 } nV |  } | d k  r d | d | } n' | d k  r d d | d } n d	 } | } d | d
 S(   Ng      ?i    g333333?g      ?i   i   g ?gK}?g
}?g      ?(    (   R   R4   t   xx(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR      s    
	c         C` s   t  j |  S(   N(   Rr   R   (   R   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR      s    c   	      C` s7   | \ } | \ } | j  d j j   d } d t   S(   Ni    i   s@  
        %(dtype)s x = 0.5 * %(x)s;
   // The if is a tanh approximate.
   if(x>=0) {
        %(z)s = (x<1.7 ? (1.5*x/(1+x)) :
                         (x<3 ? (0.935409070603099 + 0.0458812946797165*(x-1.7)):
                         0.99505475368673));
    } else {
        %(dtype)s xx = -x;
        %(z)s = -(xx<1.7 ? (1.5*xx/(1+xx)) :
                           (xx<3 ? (0.935409070603099 + 0.0458812946797165*(xx-1.7)):
                                   0.99505475368673));
    }

        //%(z)s = 0.5*(ultrafasttanh(0.5*x)+1.);
        %(z)s = 0.5*(%(z)s+1.);
        (   R?   R!   t   dtype_specsR-   (	   R   R0   R1   R$   R2   R3   R   R4   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR5      s    		(   Rl   Rm   Rn   Ro   R   R   R5   (    (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyRr      s   	t   ultra_fast_scalar_sigmoidRJ   t   ultra_fast_sigmoid_inplacec         C` sn   t  |  j t j  rj |  j j t k rj t |  j d  } t |  j	 d |  d   } | | j
 _ | g Sd S(   sl  
    When enabled, change all sigmoid to ultra_fast_sigmoid.

    For example do mode.including('local_ultra_fast_sigmoid')
    or use the Theano flag optimizer_including=local_ultra_fast_sigmoid.

    This speeds up the sigmoid op by using an approximation.

    This is done after the stabilization and specialize phases
    to avoid interacting with them.

    i    c         S` s   t  j j |  | d d S(   Nt   atolg{Gz?(   RG   t
   TensorTypet   values_eq_approx(   t   at   b(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt    values_eq_approx_remove_low_prec  s    N(   t
   isinstancet   opRG   t   Elemwiset	   scalar_opR    RJ   R*   R   R?   t   tagRy   (   R0   R2   R|   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   local_ultra_fast_sigmoid   s    	t   uncanonicalizeR   c         C` sv   t  j t  j d |  j   d j } t j d d | } t j d d | } |  | | }  t j |  d d  }  |  S(   s   
    An approximation of sigmoid.

    More approximate and faster than ultra_fast_sigmoid.

    Approx in 3 parts: 0, scaled linear, 1.

    Removing the slope and shift does not make it faster.

    R   i    g?g      ?i   (   R   t   upgrade_to_floatt   ScalarR   RG   t   constantt   clip(   R   t	   out_dtypet   slopet   shift(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyRI     s    %c         C` sn   t  |  j t j  rj |  j j t k rj t |  j d  } t |  j	 d |  d   } | | j
 _ | g Sd  S(   Ni    c         S` s   t  j j |  | d d S(   NRw   g?(   RG   Rx   Ry   (   Rz   R{   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR|   4  s    (   R}   R~   RG   R   R   R    RI   R*   R   R?   R   Ry   (   R0   R2   R|   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   local_hard_sigmoid-  s    	R   t   ScalarSoftplusc           B` sA   e  Z d  Z e d    Z d   Z d   Z d   Z d   Z RS(   s)   
    This helps numerical stability.
    c         C` sv   |  d k  r d S|  d k r  |  St  t |  d d   } | d
 k r` t j t j |  d d	  St j t j |    S(   Ng      >g        g      >@R   R   R   R   R   R   (   s   int8s   uint8(   R   R   R   t   log1pR   (   R   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   static_implC  s    c         C` s   t  j |  S(   N(   R   R   (   R   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR   P  s    c         C` s#   | \ } | \ } | t  |  g S(   N(   R    (   R   R$   R%   R   R&   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR)   S  s    		c         C` s   | \ } | \ } | j  d j t j k sD | j  d j t j k rO d t   S| j  d j t j k rs d t   St d   d  S(   Ni    sJ   %(z)s = %(x)s < -103.0f ? 0.0 : %(x)s > 14.0f ? %(x)s : log1p(exp(%(x)s));sH   %(z)s = %(x)s < -745.0 ? 0.0 : %(x)s > 16.0 ? %(x)s : log1p(exp(%(x)s));s!   only floatingpoint is implemented(   R*   R!   R   R+   R,   R-   R.   R/   (   R   R0   R1   R$   R2   R3   R   R4   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR5   X  s    		
c         C` s+   t  t |   j   } | r# d | S| Sd  S(   Ni   (   i   (   R6   R   R7   (   R   R8   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR7   l  s    (	   Rl   Rm   Rn   Ro   R   R   R)   R5   R7   (    (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR   ?  s   			t   scalar_softplust   softplusc         C` sj   |  j  rf |  j  j t j k rf g  |  j  j D] } t |  s+ | ^ q+ } t |  d k rf | d Sn  d  S(   Ni   i    (   t   ownerR~   RG   t   mulR*   t   _is_1t   len(   t   rt   it   not_is_1(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   _skip_mul_1y  s    +R   t   allow_multiple_clientsRy   t   skip_identities_fnc         C` s?   y# t  j |   } t j | d  SWn t j k
 r: t SXd S(   sX   

    Returns
    -------
    bool
        True iff expr is a constant close to 1.

    i   N(   R   t   get_scalar_constant_valueR   t   allcloseRG   R   t   False(   t   exprR8   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR     s
    	t   patternR'   t
   constraintt   logsigm_to_softplust   log1msigm_to_softplust   log1pexp_to_softpluss   log1p_neg_sigmoid,c         C` s   |  j  r |  j  j t j k r t j |  j  j d | \ } } } t |  d k r | d } | j  r | j  j t j k r | r | d } x | d D] } | | } q Wt	 j
 | d  r t | j  j d f Sn  t j j r t j d  q q q n  d S(   s   

    Returns
    -------
    object
        If 't' is of the form (1+exp(x)), return (False, x).
        Else return None.

    t   only_process_constantsi   i    s<  Although your current code is fine, please note that Theano versions prior to 0.5 (more specifically, prior to commit 7987b51 on 2011-12-18) may have yielded an incorrect result. To remove this warning, either set the `warn.identify_1pexp_bug` config option to False, or `warn.ignore_bug_before` to at least '0.4.1'.N(   R   R~   RG   t   addR   t   scalarconsts_restR*   R   R   R   R   R   R   t   warnt   identify_1pexp_bugt   warningsRC   (   t   tR   t   scalarst   scalar_inputst	   nonconstst	   maybe_expt   scal_sumt   s(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   is_1pexp  s     


c         C` sc   t  } t |   } | d k	 r- t } | }  n  |  j r_ |  j j t j k r_ | |  j j d f Sd S(   s  
    Match a variable with either of the `exp(x)` or `-exp(x)` patterns.

    Parameters
    ----------
    var
        The Variable to analyze.

    Returns
    -------
    tuple
        A pair (b, x) with `b` a boolean set to True if `var` is of the
        form `-exp(x)` and False if `var` is of the form `exp(x)`. If `var`
        cannot be cast into either form, then return `None`.

    i    N(	   R   t   is_negRC   RY   R   R~   RG   R   R*   (   t   vart   negt   neg_info(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   is_exp  s    	c         C` s0   |  j  r( |  j  j t j k r( |  j  j Sd Sd S(   s   
    Match a variable with `x * y * z * ...`.

    Parameters
    ----------
    var
        The Variable to analyze.

    Returns
    -------
    object
        A list [x, y, z, ...] if `var` is of the form `x * y * z * ...`,
        or None if `var` cannot be cast into this form.

    N(   R   R~   RG   R   R*   RC   (   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   is_mul  s    
c   	      C` s   |  j  r- |  j  j t j k r- |  j  j } n	 |  g } g  } t } g  } xY | D]Q } | |  } | d  k r} | j |  qO | \ } } | j |  | | N} qO W| | | f S(   N(   R   R~   RG   R   R*   R   RC   t   append(	   R   R   Rz   t   f_termsR   t   restR   t   f_tt   neg_t(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   partition_num_or_denom  s    	c         C` s  |  j  } | s d S| j t j k r0 | j d S| j t j k rt | j  d k rx t | j  D] \ } } y% t	 j
 |  } t j | d  } Wn t k
 r t } n X| rg t | j  d k r | j d | St j | j d | !| j | d   Sqg qg Wn  d S(   s   
    Match a variable with the `-x` pattern.

    Parameters
    ----------
    var
        The Variable to analyze.

    Returns
    -------
    object
        `x` if `var` is of the form `-x`, or None otherwise.

    i    i   ii   N(   R   RC   R~   RG   R   R*   R   R   t	   enumerateR   R   R   R   R   R   (   R   t   applyt   idxt	   mul_inputR   t
   is_minus_1(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR   "  s$    	'
c         C` s  |  j  t j k r|  j \ } } t | t  \ } } } t | t  \ } } } g  }	 xl | D]d }
 |
 | k r |	 j t |
   | | j	 |
  =n |	 j t |
   t
 |  j d |	 d  q^ W|	 s d S|	 g  | D] }
 t j |
  ^ q | } t |  d k r| d } n t j |   } | | Ar<| } n  t
 | |  t |  d k rb| g St |  d k r| | d } n | t j |   } t
 |  j d |  | g Sd S(   sF   
    exp(x)/(1+exp(x)) -> sigm(x)
    c/(1+exp(x)) -> c*sigm(-x)

    i    iNi   (   R~   RG   t   true_divR*   R   R   R   R   RK   t   indexR   R?   R   R   R   (   R0   t   numt   denomt	   num_exp_xt   num_restt   num_negt   denom_1pexpt
   denom_restt	   denom_negt   sigmoidsR   t   new_numR2   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   local_exp_over_1_plus_expL  s6    
*

c         C` sw   t  |   } | d k rZ t |   } | d k r: t |  g St |  \ } } | | g Sn t t t t |   g Sd S(   sQ  
    Parse a tree of multiplications starting at the given root.

    Parameters
    ----------
    root
        The variable at the root of the tree.

    Returns
    -------
    object
        A tree where each non-leaf node corresponds to a multiplication
        in the computation of `root`, represented by the list of its inputs.
        Each input is a pair [n, x] with `n` a boolean value indicating whether
        sub-tree `x` should be negated.

    Examples
    --------
        x * y               -> [False, [[False, x], [False, y]]]
        -(x * y)            -> [True, [[False, x], [False, y]]]
        -x * y              -> [False, [[True, x], [False, y]]]
        -x                  -> [True, x]
        (x * y) * -z        -> [False, [[False, [[False, x], [False, y]]],
                                        [True, z]]]

    N(   R   RC   R   R   t   parse_mul_treet   listt   map(   t   roott   mul_infoR   R   t   sub_tree(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR     s    
c         C` sw   xp t  |  D]b \ } } | d |  k r | d d c | N<| |   | d d <| j |  | j |  t Sq Wt S(   s  
    Attempt to replace a leaf of a multiplication tree.

    We search for a leaf in `leaves` whose argument is `arg`, and if we find
    one, we remove it from `leaves` and add to `new_leaves` a leaf with
    argument `arg` and variable `op(arg)`.

    Parameters
    ----------
    arg
        The argument of the leaf we are looking for.
    leaves
        List of leaves to look into. Each leaf should be a pair
        (x, l) with `x` the argument of the Op found in the leaf, and `l` the
        actual leaf as found in a multiplication tree output by `parse_mul_tree`
        (i.e. a pair [boolean, variable]).
    new_leaves
        If a replacement occurred, then the leaf is removed from `leaves`
        and added to the list `new_leaves` (after being modified by `op`).
    op
        A function that, when applied to `arg`, returns the Variable
        we want to replace the original leaf variable with.
    neg : bool
        If True, then the boolean value associated to the leaf should
        be swapped. If False, then this value should remain unchanged.

    Returns
    -------
    bool
        True if a replacement occurred, or False otherwise.

    i    i   (   R   t   popR   RY   R   (   t   argt   leavest
   new_leavesR~   R   R   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   replace_leaf  s    !c         C` s   |  \ } } t  | t  r g  } xE t t |  D]4 } | d d k rX | | d N} q1 | j |  q1 W| s~ | d g } q t |  d k r | d d c | N<| d } q | | g } n |  } | S(   s  
    Simplify a multiplication tree.

    Parameters
    ----------
    tree
        A multiplication tree (as output by `parse_mul_tree`).

    Returns
    -------
    object
        A multiplication tree computing the same output as `tree` but without
        useless multiplications by 1 nor -1 (identified by leaves of the form
        [False, None] or [True, None] respectively). Useless multiplications
        (with less than two inputs) are also removed from the tree.

    i   i    N(   R}   R   R   t   simplify_mulRC   R   R   (   t   treeR   R*   t   s_inputst   s_iR(   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR     s    c         C` sq   |  \ } } | d k r' t d   n6 t | t  rW t j t t t |     } n | } | rm | } n  | S(   s  
    Compute the Variable that is the output of a multiplication tree.

    This is the inverse of the operation performed by `parse_mul_tree`, i.e.
    compute_mul(parse_mul_tree(tree)) == tree.

    Parameters
    ----------
    tree
        A multiplication tree (as output by `parse_mul_tree`).

    Returns
    -------
    object
        A Variable that computes the multiplication represented by the tree.

    se   Function `compute_mul` found a missing leaf, did you forget to call `simplify_mul` on the tree first?N(   RC   R#   R}   R   RG   R   R   t   compute_mul(   R   R   R*   R(   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR     s    !
c         C` s  | d k r g  } n  | d k r* g  } n  | d k r? g  } n  | d k rT g  } n  | d k ri |  } n  t r t d  t d |  t d |   t d |  t d |  t d |  t d |  n  |  \ } }	 t |	 t  rOt }
 xW t |	  D]I \ } } |
 t d | d	 |  d
 | d | d | d | d | d |  O}
 q W|
 S|	 } t } t |  } | d k	 r| \ } } | | N} t |  } | d k rt	 | | | t
 |  s| j | |  f  t } qqt	 | | | d   |  s| j | |  f  t } qn | j r| j j t
 k r| j j d } t |  } | d k rt	 | | | d   |  s| j | |  f  t } qqt	 | | | t
 |  s| j | |  f  t } qn t } | s| d k	 st  t d g | d | <n  | Sd S(   s  
    Core processing of the `local_sigm_times_exp` optimization.

    This recursive function operates on a multiplication tree as output by
    `parse_mul_tree`. It walks through the tree and modifies it in-place
    by replacing matching pairs (exp, sigmoid) with the desired optimized
    version.

    Parameters
    ----------
    tree
        The sub-tree to operate on.
    exp_x
        List of arguments x so that `exp(x)` exists somewhere in the whole
        multiplication tree. Each argument is a pair (x, leaf) with `x` the
        argument of the exponential, and `leaf` the corresponding leaf in the
        multiplication tree (of the form [n, exp(x)] -- see `parse_mul_tree`).
        If None, this argument is initialized to an empty list.
    exp_minus_x
        Similar to `exp_x`, but for `exp(-x)`.
    sigm_x
        Similar to `exp_x`, but for `sigmoid(x)`.
    sigm_minus_x
        Similar to `exp_x`, but for `sigmoid(-x)`.
    parent
        Parent of `tree` (None if `tree` is the global root).
    child_idx
        Index of `tree` in its parent's inputs (None if `tree` is the global
        root).
    full_tree
        The global multiplication tree (should not be set except by recursive
        calls to this function). Used for debugging only.

    Returns
    -------
    bool
        True if a modification was performed somewhere in the whole multiplication
        tree, or False otherwise.

    s   <perform_sigm_times_exp>s     full_tree   = %ss     tree        = %ss     exp_x       = %ss     exp_minus_x = %ss     sigm_x      = %ss     sigm_minus_x= %sR   t   parentt	   child_idxt   exp_xt   exp_minus_xt   sigm_xt   sigm_minus_xt	   full_treec         S` s   t  |   S(   N(   RK   (   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   <lambda>  s    i    c         S` s   t  |   S(   N(   RK   (   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR     s    i   N(   RC   R   R`   R}   R   R   t   perform_sigm_times_expR   R   R   RK   R   RY   R   R~   R*   R#   (   R   R   R   R   R   R   R   R   R   R*   R(   t   sub_idxR   R   t   keep_itt   exp_infot   exp_negt   exp_argt   neg_argt   sigm_arg(    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyR   %  sv    +					
	

c         C` sr   |  j  t j k r d St |  j d  } t |  } | s? d St |  } t |  } t	 |  j d |  | g S(   s   
    exp(x) * sigm(-x) -> sigm(x)
    exp(-x) * sigm(x) -> sigm(-x)

    todo: add stack traces to the intermediate variables
    i    N(
   R~   RG   R   RC   R   R?   R   R   R   R   (   R0   t   mul_treet   did_somethingR2   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   local_sigm_times_exp  s    
c         C` s  |  j  t j k r|  j d } | j r| j j  t j k rt j | j j d t \ } } } t	 |  d k r| d j r| d j j  t j
 k r| rt j t j |  d  rt j t t j | d j j d   |  } t | d | |  j d g |  | Sqqqn  d S(   s#   
    1/(1+exp(x)) -> sigm(-x)

    i    R   i   N(   R~   RG   t   invR*   R   R   R   R   RY   R   R   R   R   t   sumt   _fill_chainRK   R   R   R?   (   R0   t   inv_argR   R   R   R2   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   local_inv_1_plus_exp  s    	$&!	c         C` s   |  j  t j k r |  j \ } } t | j  d k r: d S| j r | j j  t k r y t j	 |  } Wn t
 k
 r| d SXt j t j |  d  r t | j j d  } t | |  j d g |  | g Sq n  d S(   s    
    1-sigm(x) -> sigm(-x)

    i   Ni    (   R~   RG   R3   R*   R   t   clientsR   RK   R   R   t	   ExceptionR   R   R   R   R?   (   R0   t   sub_lt   sub_rt   val_lR2   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   local_1msigmoid  s    (V   Rn   t
   __future__R    R   R   R   R   R<   R   R   R   R   t   theano.compatR   t   theano.printingR   t   theano.tensorR	   RG   R
   R   R   t   theano.tensor.typeR   t   theano.gof.optR   t   UnaryScalarOpR   R   R    R   RK   t   transfer_typeRq   t   assignt   FunctionPrinterRr   Ru   RJ   Rv   t   local_optimizerR   t   compilet   optdbt   registerRI   R   R   R   R   R   t
   PatternSubt   logR   RY   R   R   R3   t   dictR   R   R   R   t   log1p_neg_sigmoidt   register_stabilizeR   R   R   R   R   R   R   R   R   R   R   RC   R   R   R   R   R   R   R   t   register_local_1msigmoidt   register_canonicalize(    (    (    s7   /tmp/pip-build-X4mzal/theano/theano/tensor/nnet/sigm.pyt   <module>   s   "	6				3							(				*!5	,	+	+	!		v!! 