
Xc           @` sy  d  Z  d d l m Z m Z m Z d d l Z d d l m Z d d l m	 Z	 d d l
 m Z d d l
 m Z m Z d d l m Z d d	 l m Z e j d
  Z e e j e j g  d     Z e e j e j g  d     Z e e j e j g  d     Z e e j e j g  d     Z e e j e g  d     Z e e j e g  d     Z d S(   s  
This file implement specialization optimization that break the
canonization form of the graph.

Currently there is problem with the order of optimization and the
definition of definition of canonized graph.

Right now there is a canonization optimization phase that try to make
all equivalent graph identical. This is not always the case, but it do
many of the basic stuff canonical. We need to extend the definition of
canonization to make this true more often.

The problem this file indent to fix in the future is that in the
"Equilibrium" specialization optimization phase, there is optimization
that request that the graph is canonical, some other request that this
is not true, and some other that break the canonicalization for some
optimization. As we can't control the order of those optimization, there
is case that some optimization requesting a canonical graph won't be
applied as optimization that break the canonicalization form of the
graph executed before.

To fix this, we need to split the specialization phase into a phase
where optimization can't break the canonicalization form and one where
this is allowed. This is also needed for the stabilized optimization
phase, but as it happen before the specialization phase, this cause less
problem.

Also, we should make the fgraph refuse optimization that break the
canonization of the graph in the optimizations phases where the graph is
supposed to be canonical.

i    (   t   absolute_importt   print_functiont   divisionN(   t   gof(   t   CAReduce(   t   basic(   t
   DimShufflet	   Subtensor(   t   register_uncanonicalize(   t   scalars   theano.tensor.optc         C` s   t  |  j t j  r |  j j |   } t |  j d j  d k rl t t	 j
 |  |  j d  } | d g St |  j d j  d k r d t j |  j d |  g Sn  d S(   s>   
    If we don't use the argmax, change it to a max only.
    i   i    N(   t
   isinstancet   opt   Tt   MaxAndArgmaxt
   get_paramst   lent   outputst   clientsR   t   scalt   maximumt   inputst   Nonet   _argmax(   t   nodet   axist   new(    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   local_max_and_argmax2   s    
c         C` s   |  j  t j k r |  j d j r |  j d } | j r t | j j  t  r | j j  j t j	 k r | j j d } | j r | j j  t j k r t t j
 | j j  j  | j j d  g Sq n  t S(   s   
    Change -(max(-x)) to min.

    This is tested in tensor/tests/test_basic.py:test_min_max.

    Notes
    -----
    We don't need an opt that will do the reverse as by default
    the interface put only MaxAndArgmax into the graph.

    i    (   R   R   t   negR   t   ownerR
   R   t	   scalar_opR   R   t   minimumR   t   False(   R   t   maxR   (    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   local_max_to_minB   s    "		)c         C` s   t  |  j t j  r |  j d } | j r t  | j j t  r | j j j } d | j | j j d j t	 t
 | j j d j   } | | k r t St j | j j d |  j d  g Sn  t S(   s   
    If a dimshuffle is inside an alloc and only adds dimension to the
    left, remove it.

    Alloc(DimShuffle(x), ...) - > Alloc(x, ...)
    i    t   xi   (   R"   (   R
   R   R   t   AllocR   R   R   t	   new_ordert   ndimt   tuplet   rangeR   t   alloc(   R   t   input_R$   t   expected_new_order(    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   local_alloc_dimshuffle]   s    	 'c         C` s   t  |  j t j  r |  j d } | j r t  | j j t  r | j j j } d } x: | D]2 } | d k rt q\ q\ | | k r t S| d 7} q\ Wt j	 | j j d |  j d d |  j
 d j g Sn  t S(   s   
    If a dimshuffle is inside a reshape and does not change the order
    of dimensions, remove it.

    Reshape(Dimshuffle(x), shp) -> Reshape(x, shp)
    i    R"   i   R%   (   R
   R   R   t   ReshapeR   R   R   R$   R   t   reshapeR   R%   (   R   R)   R$   t   offsett   dim(    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   local_reshape_dimshuffles   s    	 c         C` s   t  |  j t  r |  j d j r |  j d } t  | j j t j  r |  j j } d t |  | j	 t
 t | j	   } | | k r t St |  | j	 } d | t
 | j j d  } t j | j j d |  g Sn  t S(   s   
    If an alloc is inside a dimshuffle which only adds dimension to the left,
    scrap the dimshuffle and adds 1 into the alloc

    dimshuffle{x, 0, 1}(alloc([3 4], 3, 2) => alloc([3 4], 1, 3, 2)
    i    R"   i   (   R"   (   i   (   R
   R   R   R   R   R   R#   R$   R   R%   R&   R'   R   R(   (   R   R)   R$   R*   t   nb_new_dimst   new_shape_input(    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   local_dimshuffle_alloc   s    	" c         C` s  t  |  j t  r|  j d j rd |  j j k r8 t S|  j j } t |  d k r | d } x+ | d D] } | | k s t S| } qk Wn  |  j d } t  | j j t  r| j	 } t
 t | j   } x | D] } | j |  q Wt g  | D] } | | ^ q  st St
 | j j j  } | j j d g }	 t j d  }
 d d d g } d } d } d } x | j j j D] } t  | t  r#| } | d 7} xK | D]C } t | |  d k	 r|	 | j j d | g 7}	 | d 7} qqW| | k rR| | k rR|
 | | <|	 |
 g 7}	 qRq{|	 | j j d | g 7}	 | d 7} | d 7} q{Wx t t | j j j  |	 d j  D]_ } | | | k r|x) t |  | k  r| j t d   qW| j |
  |	 j |
  q|q|Wt |  |	   g Sn  t S(	   s   If a subtensor is inside a dimshuffle which only drop
    broadcastable dimensions, scrap the dimshuffle and index the
    subtensor with 0

    x[i:j, :, k:l].dimshuffle(0, 2) =>
        x[i:j, 0, k:l] if x.broadcastable == (False, True, False)

    i    R"   i   t   startt   stopt   stepiN(   R
   R   R   R   R   R$   R   R   R   t   broadcastablet   listR'   R%   t   removet   allt   idx_listR   t   constantt   slicet   getattrR   t   append(   R   R$   t   past_dimR/   R)   R7   t   missing_dimst   it   new_idx_listt
   new_inputst   zerot   slice_attr_listt   jt   slice_it   subtensor_removed_dimst   idxt   past_jt
   slice_attr(    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   local_dimshuffle_subtensor   s`    "
	#


(    t   __doc__t
   __future__R    R   R   t   loggingt   theanoR   t   theano.tensor.elemwiseR   t   theano.tensorR   R   R   R   t   theano.tensor.optR   R	   R   t	   getLoggert   _loggert   local_optimizerR   R   R   R!   R#   R+   R,   R0   R3   RM   (    (    (    s@   /tmp/pip-build-X4mzal/theano/theano/tensor/opt_uncanonicalize.pyt   <module>    s*   !!!!