ó
àÆ÷Xc           @` sM  d  Z  d d l m Z m Z m Z d d l m Z m Z d d l m	 Z	 d d l
 Z
 d d l
 m Z d d l m Z d d l m Z d d	 l m Z m Z m Z d d
 l m Z m Z d d l m Z d d l m Z d d l m Z d e j f d „  ƒ  YZ e j e g ƒ d „  ƒ Z  e j! d e j" j# e  ƒ d d d ƒ d e e <d S(   s    Define new Ops from existing Opsi    (   t   absolute_importt   divisiont   print_function(   t   reducet   partial(   t   OrderedDictN(   t   gof(   t   izip(   t   orig_function(   t   SharedVariablet   rebuild_collect_sharedt   optdb(   t   Variablet   ops_with_inner_function(   t   io_connection_pattern(   t   NullType(   t   DisconnectedTypet   OpFromGraphc           B` sÑ   e  Z d  Z e d „  ƒ Z e d „  ƒ Z e d d d d „ Z d „  Z	 d „  Z
 d „  Z d „  Z d	 „  Z d
 „  Z d „  Z d „  Z d „  Z d „  Z d „  Z d „  Z d „  Z d „  Z d „  Z d „  Z RS(   sx  
    This creates an ``Op`` from inputs and outputs lists of variables.
    The signature is similar to :func:`theano.function <theano.function>`
    and the resulting ``Op``'s perform will do the same operation as::

        orig_function(inputs, outputs, **kwargs)

    Currently does not support ``updates`` or ``givens`` argument.

    Parameters
    ----------

    inputs: list of :class:`Variable <theano.gof.Variable>`

    outputs: list of :class:`Variable <theano.gof.Variable>`

    inline: bool, optional
        Defaults to ``False``

        ``True`` : Cause the Op's original graph being used during
        compilation, the Op will not be visible in the compiled
        graph but rather its internal graph.

        ``False`` : will use a pre-compiled function inside.

    grad_overrides : single or list of {'default', OpFromGraph, callable, Variable with special type}, optional
        Defaults to ``'default'``.

        ``'default'`` : Do not override, use default grad() result

        OpFromGraph instance : Override with another OpFromGraph, should
        accept inputs as the same order and types of "inputs" and "output_grads"
        arguments as one would specify in grad() method.

        callable : similar to OpFromGraph instance, must return list of
        :class:`Variable <theano.gof.Variable>`.

        Variable :
            ``NullType() instance`` : Treat as non-differentiable
            ``DisconnectedType() instance`` : Treat as disconnected gradient, numerically gives zero

        list: Each OpFromGraph/callable must return a single
        :class:`Variable <theano.gof.Variable>`. Each list element corresponds to gradient of
        a specific input, length of list must be equal to number of inputs.

    rop_overrides : single or list of {'default', OpFromGraph, callable, Variable with special type}, optional
        Defaults to ``default``.

        ``'default'`` : Do not override, use default R_op() result

        OpFromGraph instance : Override with another OpFromGraph, should
        accept inputs as the same order and types of "inputs" and "output_grads"
        arguments as one would specify in grad() method.

        callable : similar to OpFromGraph instance, must return list of
        :class:`Variable <theano.gof.Variable>`.

        Variable :
            ``NullType() instance`` : Treat as non-differentiable
            ``DisconnectedType() instance`` : Treat as zero since DisconnectedType is not yet supported in R_op

        list: Each OpFromGraph/callable must return a single
        :class:`Variable <theano.gof.Variable>`. Each list element corresponds
        to a specific output of R_op, length of list must be equal to number of outputs.

    name : string, optional
        A name for debugging purposes

    \*\*kwargs : optional
        Check
        :func:`orig_function <theano.compile.function_module.orig_function>`
        for more arguments, only works when not inline.


    .. TODO:
        - examples for a multi-layer mlp. where?
        - __hash__, __eq__ otherwise won't merge, try
          gof.opt.is_same_graph_with_merge(op1.local_outputs, op2,
          local_outputs)
        - c_code() to remove the double overhead?
        - grad() make it support DisconnectedType and the new interface
        - extend grad() to L_op
        - add support for NullType and DisconnectedType when R_op supports them
        - check how it works with updates.
        - add test with constant as input or inside the inner graph.
        - Add support for the GPU? Probably just need an opt to remove transfer
        - Add support to pickle this Op.
        - Add support/test with random generator
        - Add optimization to removing unused inputs/outputs
        - Add optimization to work inplace on inputs when not inline

    Notes
    -----
    - We support shared variables in the inner graph. This is automatic
      and invisible to the user. They can be as input to the node or in
      the inner graph.
    - We support unused inputs. This is needed for the grad.
    - We support nested OpFromGraph.
    - ``inline=True`` will cause better runtime optimization at the cost
      of compilation time. Currently only works with ``fast_compile`` or
      ``fast_run`` mode.
    - It's recommanded to provide pure functions (no side effects like
      setting global variable) as callable(s). The callable(s) supplied
      for overriding gradient/rop will be called only once at the first
      call to grad/R_op, and will be converted to OpFromGraph instances.

    Examples
    --------

    Example 1:

    .. code-block:: python

        from theano import function, OpFromGraph, tensor
        x, y, z = tensor.scalars('xyz')
        e = x + y * z
        op = OpFromGraph([x, y, z], [e])
        # op behaves like a normal theano op
        e2 = op(x, y, z) + op(z, y, x)
        fn = function([x, y, z], [e2])

    Example 2 with shared variable:

    .. code-block:: python

        import numpy as np
        import theano
        from theano import config, function, OpFromGraph, tensor
        x, y, z = tensor.scalars('xyz')
        s = theano.shared(np.random.rand(2, 2).astype(config.floatX))
        e = x + y * z + s
        op = OpFromGraph([x, y, z], [e])
        # op behaves like a normal theano op
        e2 = op(x, y, z) + op(z, y, x)
        fn = function([x, y, z], [e2])

    Example 3 override gradient

    .. code-block:: python

        from theano import function, OpFromGraph, tensor, grad
        x, y, z = tensor.scalars('xyz')
        e = x + y * z
        def rescale_dy(inps, grads):
            x, y, z = inps
            g, = grads
            return z*2
        op = OpFromGraph(
            [x, y, z], [e], grad_overrides=['default', rescale_dy, 'default']
        e2 = op(x, y, z)
        dx, dy, dz = grad(e2, [x, y, z])
        fn = function([x, y, z], [dx, dy, dz])
        # the gradient wrt y is now doubled
        fn(2., 3., 4.) # [1., 8., 3.]

    c         C` s^   t  |  j t t f ƒ rP t | d ƒ r7 | j ƒ  |  f St j j d ƒ |  f Sn
 |  d  f Sd  S(   Nt
   zeros_likeg        (
   t
   isinstancet   typeR   R   t   hasattrR   t   theanot   tensort   constantt   None(   t   gradt   inp(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   _filter_grad_var¯   s
    c         C` sR   t  |  j t ƒ r" | j ƒ  |  f St  |  j t ƒ rD | j ƒ  d  f S|  d  f Sd  S(   N(   R   R   R   R   R   R   (   t   inpJt   out(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   _filter_rop_varÊ   s
    t   defaultc      	   K` se  t  | t ƒ s( t d t | ƒ ƒ ‚ n  x6 | | D]* } t  | t j ƒ s3 t d | ƒ ‚ q3 q3 Wd | k sy d | k rˆ t d ƒ ‚ n  | |  _ g  t j j | ƒ D] }	 t  |	 t	 ƒ r¤ |	 ^ q¤ |  _
 g  |  j
 D] }	 |	 j ƒ  ^ qÒ }
 t | d | |
 d t t |  j
 |
 ƒ ƒ d t ƒ} | \ } } \ } } } } t | ƒ t | ƒ t |  j
 ƒ k sdt ‚ t | ƒ t | ƒ k s‚t ‚ | st ‚ | sœt ‚ | s©t ‚ | |  _ | |  _ | |  _ | |  _ | |  _ g  | D] } | j ^ qÝ|  _ g  | D] } | j ^ qü|  _ |  j | ƒ |  j | ƒ | d  k	 rXt  | t ƒ sXt d	 ƒ ‚ n  | |  _ d  S(
   Ns   outputs must be list, got %ss-   inputs and outputs must be Variable instancest   updatest   givenss'   updates and givens are not allowed heret   inputst   replacet   copy_inputs_overs"   name must be None or string object(   R   t   listt	   TypeErrorR   R   R   t	   is_inlinet   graphR#   R	   t   shared_inputsR
   t   dictR   t   Falset   lent   AssertionErrort   local_inputst   local_outputst   outputst   kwargst   input_typest   output_typest   set_grad_overridest   set_rop_overridesR   t   strt   name(   t   selfR#   R1   t   inlinet   grad_overridest   rop_overridesR8   R2   t   it   vart   shared_varst   newR/   R0   t   clone_dt   update_dt   update_exprR*   R   R   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   __init__Ö   sD    	"	+					c         C` s
   |  | k S(   N(    (   R9   t   other(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   __eq__  s    c         C` s   t  t |  ƒ ƒ S(   N(   t   hashR   (   R9   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   __hash__	  s    c         C` s8   |  j  d  k r |  j j n |  j  } |  j } d t ƒ  S(   Ns   %(name)s{inline=%(is_inline)s}(   R8   R   t	   __class__t   __name__R(   t   locals(   R9   R8   R(   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   __str__  s    $	c         C` së  |  j  } |  j } t | ƒ } |  j } t | t ƒ r_ |  j s[ t |  _ d g | |  _	 n  d Sg  |  j
 D] } | ƒ  ^ qi } t t j j d d d d d d d d d	 t t | | ƒ ƒ ƒ} d
 } d }	 | d k r?| d | ƒ }
 t g  t |
 | ƒ D] \ } } t j | | ƒ ^ q÷ Œ  \ } } t | ƒ } t | ƒ } n+t | t ƒ rÃt | j t t f ƒ r­g  | D] } | j ƒ  ^ qm} g  t | ƒ D] } | j ƒ  ^ q’} qjt |	 | j ƒ ‚ n§t | t ƒ rš| } t | ƒ | k rt d | t | ƒ f | ƒ ‚ n  g  t | | ƒ D] \ } } | d k r| ^ q} t | rX| d | ƒ n g  ƒ } g  } g  } xút | | ƒ D]\ } } | d k rÐt j t | ƒ | ƒ \ } } | j | ƒ | j | ƒ q}t | t ƒ r6t | j t t f ƒ r | j | j ƒ  ƒ | j | j ƒ  ƒ q“t |	 | j ƒ ‚ q}t | d ƒ sXt | | ƒ ‚ n  t j | | | ƒ | ƒ \ } } | j | ƒ | j | ƒ q}WnÐ t | d ƒ s¼t | | ƒ ‚ n  | | | ƒ } t | t ƒ sót d t | ƒ ƒ ‚ n  t g  t | | ƒ D] \ } } t j | | ƒ ^ qŒ  \ } } t | ƒ t | ƒ k rjt d | t | ƒ f ƒ ‚ n  t | ƒ } t | ƒ } t |  ƒ d | | d | d |  j d |  j  d k r¼d n
 |  j  d d d ƒ |  _ | |  _	 t |  _ d S(   sX   
        converts self._grad_op from user supplied form to type(self) instance

        Nt   costt   disconnected_inputst   ignoret   return_disconnectedt   Disconnectedt   null_gradientst   returnt   known_gradss†   Gradient override should be (single or list of)'default' | OpFromGraph | callable | Variable with NullType or DisconnectedType, got %ssW   Overriding Variable instance can only have type of DisconnectedType or NullType, got %sR    t   wrts%   Need to override %d gradients, got %dt   __call__s;   Gradient overriding function should return a list, got "%s"sE   Gradient overriding function should return list of %d outputs, got %dR#   R1   R:   R8   t   _gradt   on_unused_input(!   R/   R0   R-   t   _grad_opR   R   t   _grad_op_is_cachedt   TrueR   t   _grad_op_stypes_lR4   R   R   t   gradientR   R   R   R   R&   R   R   R   R   R   t   ranget
   ValueErrort   itert   nextt   appendR   R'   R(   R8   (   R9   R/   R0   t   inp_lent   grad_opt   out_tt   output_gradst   fn_gradt   TYPE_ERR_MSGt   STYPE_ERR_MSGt   gdefaults_lR   R   t   all_grads_lt   all_grads_ov_lt   _t   goverrides_lt   lint   govt   wrt_lt	   gdefaultst   fn_govt   gnextt   gnext_ovt   gov_ov(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   _recompute_grad_op  s¤    					=(.!:
	"	c         C` s=  |  j  } |  j } t | ƒ } |  j } t | t ƒ r_ |  j s[ t |  _ d g | |  _	 n  d Sg  |  j
 D] } | ƒ  ^ qi } t t j j d | d | ƒ} d } d }	 | d k r| d | ƒ }
 t g  t |
 | ƒ D] \ } } t j | | ƒ ^ qÖ Œ  \ } } t | ƒ } t | ƒ } n¶t | t ƒ rÝt | j t ƒ r†g  | D] } | j ƒ  ^ qF} g  t | ƒ D] } | j ƒ  ^ qk} qÔt | j t ƒ rÇg  | D] } | j ƒ  ^ qŸ} d g | } qÔt |	 | j ƒ ‚ n÷t | t ƒ ré| } t | ƒ | k r&t d | t | ƒ f | ƒ ‚ n  g  t | | ƒ D] \ } } | d k r6| ^ q6} | d | ƒ }
 t | rx|
 n g  ƒ } g  } g  } xDt | | ƒ D]E\ } } | d k rðt j t | ƒ | ƒ \ } } | j | ƒ | j | ƒ qt | t ƒ r…t | j t ƒ r:| j | j ƒ  ƒ | j | j ƒ  ƒ n  t | j t ƒ ro| j | j ƒ  ƒ | j d ƒ qât |	 | j ƒ ‚ qt | d	 ƒ s§t | | ƒ ‚ n  t j | | | ƒ | ƒ \ } } | j | ƒ | j | ƒ qWnë t | d	 ƒ st | | ƒ ‚ n  | | | ƒ } t | t ƒ sBt d
 t | ƒ ƒ ‚ n  t g  t | | ƒ D] \ } } t j | | ƒ ^ qUŒ  \ } } t | ƒ | k r¼t d |  j | t | ƒ f | ƒ ‚ n  t | ƒ } t | ƒ } t |  ƒ d | | d | d |  j d |  j d k rd n
 |  j d d d ƒ |  _ | |  _	 t |  _ d S(   sW   
        converts self._rop_op from user supplied form to type(self) instance

        NRU   t   eval_pointssa   R_op overrides should be (single or list of)OpFromGraph | 'default' | None | 0 | callable, got %ssW   Overriding Variable instance can only have type of DisconnectedType or NullType, got %sR    t   fs   Need to override %d Rop, got %dRV   s6   Rop overriding function should return a list, got "%s"sC   Rop overriding function %s should return list of %d outputs, got %dR#   R1   R:   R8   t   _ropRX   RO   (    R/   R0   R-   t   _rop_opR   R   t   _rop_op_is_cachedR[   R   t   _rop_op_stypes_lR3   R   R   R]   t   RopR   R   R&   R   R   R   R   R^   R   R_   R`   Ra   Rb   R   R'   R(   R8   (   R9   R/   R0   t   out_lent   rop_opt   inp_tRx   t   fn_ropRh   Ri   t   rdefaults_lt   ropR   t
   all_rops_lt   all_rops_ov_lR   Rm   t   roverrides_lt   lot   rovt   odefaults_lt	   rdefaultst   fn_rovt   rnextt   rnext_ovt   rov_ov(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   _recompute_rop_opy  s°    						:(:	
	"	c         C` s   |  j  s |  j ƒ  n  |  j S(   s1   
        getter method for self._grad_op
        (   RZ   Rw   RY   (   R9   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   get_grad_opã  s    	c         C` s   |  j  s |  j ƒ  n  |  j S(   s0   
        getter method for self._rop_op
        (   R|   R   R{   (   R9   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt
   get_rop_opë  s    	c         C` s   | |  _  t |  _ d S(   sœ   
        Set gradient overrides, see help(theano.OpFromGraph) for syntax
        This will completely remove any previously set gradient overrides

        N(   RY   R,   RZ   (   R9   R;   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyR5   ó  s    	c         C` s   | |  _  t |  _ d S(   s”   
        Set R_op overrides, see help(theano.OpFromGraph) for syntax
        This will completely remove any previously set R_op overrides

        N(   R{   R,   R|   (   R9   R<   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyR6   ü  s    	c         C` s|   |  j  s |  j ƒ  n  |  j d t t | ƒ t | ƒ Œ } g  t | |  j ƒ D]$ \ } } | d  k rl | n | ^ qN } | S(   Nt   return_list(   RZ   Rw   RY   R[   R&   R   R\   R   (   R9   R#   Rf   t	   ret_ofg_lt   ret_ofgt   ovt   ret_l(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyR     s    		7c         C` s|   |  j  s |  j ƒ  n  |  j d t t | ƒ t | ƒ Œ } g  t | |  j ƒ D]$ \ } } | d  k rl | n | ^ qN } | S(   NR“   (   R|   R   R{   R[   R&   R   R}   R   (   R9   R#   Rx   R”   R•   R–   R—   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   R_op  s    		7c         G` sØ   t  |  j ƒ t  |  j ƒ } t  | ƒ | k rM t d | t  | ƒ f ƒ ‚ n  g  t | |  j ƒ D] \ } } | j | ƒ ^ q` } t j |  t	 | ƒ |  j g  |  j
 D] } | ƒ  ^ q¤ ƒ } |  j | _ |  j | _ | S(   Ns   Expected %d inputs, got %d(   R-   R/   R*   R_   R   R3   t   filter_variableR   t   ApplyR&   R4   R0   (   R9   R#   t   num_expected_inpsR   R   R   t
   apply_node(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt	   make_node  s    4"c   
      C` s  t  |  j ƒ } t  |  j ƒ } t |  j |  j ƒ } |  j ƒ  } t | j | | j ƒ } x™ t |  j ƒ D]ˆ \ } } | d k	 r½ t | j	 t
 ƒ r½ x% t | ƒ D] }	 t | | |	 <qŸ Wq½ n  x0 t | ƒ D]" }	 | | |	 c | |	 | O<qÊ Wqh Wt t t | ƒ ƒ S(   sX   
        Return connection pattern of subfgraph defined by inputs and outputs.

        N(   R-   R/   R0   R   R‘   t	   enumerateR\   R   R   R   R   R^   R,   R&   t   map(
   R9   t   nodeRc   R   t
   cpmat_selfRd   t
   cpmat_gradR=   t   tt   o(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   connection_pattern&  s     
$c   
      C` s¼   t  j j j |  j |  j | ƒ } t t |  j | j ƒ ƒ } t  j	 t
 t j | ƒ d | ƒ} g  } d } xL t t | ƒ ƒ D]8 } t | | ƒ }	 | j | | | |	 !ƒ | |	 7} q| W| S(   NR$   i    (   R   t   scan_modulet
   scan_utilst   infer_shapeR0   R/   R+   t   zipR#   t   cloneR   t   tuplet   __add__R^   R-   Rb   (
   R9   R    t   shapest   out_shpt   replt   clonedt   rett   usedR=   t   nb(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyR¨   E  s    		!c         C` sM   t  |  d ƒ rI | d k rI t |  j |  j |  j  |  _ t |  j _ n  d  S(   Nt   fnt   py(   R   R   R/   R0   R2   R´   R[   t   trust_input(   R9   R    t   storage_mapt   compute_mapt   impl(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   prepare_node]  s
    	c         C` sa   |  j  | Œ  } t | ƒ t | ƒ k s- t ‚ x- t | | ƒ D] \ } } | j ƒ  | d <q= Wd  S(   Ni    (   R´   R-   R.   R   t   copy(   R9   R    R#   R1   t	   variablest   outputt   variable(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   performd  s    N(   RJ   t
   __module__t   __doc__t   staticmethodR   R   R,   R   RD   RF   RH   RL   Rw   R   R‘   R’   R5   R6   R   R˜   R   R¥   R¨   Rº   R¿   (    (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyR      s,   œ+				g	j							
	
				c         C` sX   |  j  } t | t ƒ s t S| j s) t St j | j d „  t |  j  j	 |  j
 ƒ Dƒ ƒ S(   sº   
    This optimization expands internal graph of OpFromGraph.
    Only performed if node.op.is_inline == True
    Doing so can improve optimization at the cost of compilation speed.
    c         S` s   i  |  ] \ } } | | “ q S(    (    (   t   .0t   ut   v(    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pys
   <dictcomp>{  s   	 (   t   opR   R   R,   R(   R   Rª   R0   R   R/   R#   (   R    RÆ   (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   inline_ofg_expansionm  s    			RÇ   g{®Gáz„¿t   fast_compilet   fast_runR´   ($   RÁ   t
   __future__R    R   R   t	   functoolsR   R   t   collectionsR   R   R   t   theano.compatR   t   theano.compile.function_moduleR   t   theano.compileR	   R
   R   t
   theano.gofR   R   t   theano.gof.graphR   t   theano.gof.null_typeR   t   theano.gradientR   t   OpR   t   local_optimizerRÇ   t   registert   optt   in2out(    (    (    s7   /tmp/pip-build-X4mzal/theano/theano/compile/builders.pyt   <module>   s*   ÿ ÿ ^