[docs]defleaky_relu(x,leakiness):""" Leaky RELU Parameters ---------- x : tf.Tensor input tensor leakiness : float leakiness of RELU Returns ------- tf.Tensor Tensor with applied leaky RELU """returntf.maximum(x,leakiness*x)
[docs]classPReLU(AbstractModule):def__init__(self,name='prelu'):self._rank=Noneself._shape=Nonesuper(PReLU,self).__init__(name)def_build(self,inp):ifself._rankisNone:self._rank=len(inp.get_shape().as_list())assertself._rank==len(inp.get_shape().as_list()),'Module was initilialised for a different input'ifself._rank>2:ifself._shapeisNone:self._shape=[inp.get_shape().as_list()[-1]]assertself._shape[0]==inp.get_shape().as_list()[-1],'Module was initilialised for a different input'else:self._shape=[]leakiness=tf.get_variable('leakiness',shape=self._shape,initializer=tf.constant_initializer(0.01),collections=self.TRAINABLE_COLLECTIONS)returntf.maximum(inp,leakiness*inp)