Newer
Older
HCAL_project / architectures / utils / arbitrary_act.py
@Davide Lancierini Davide Lancierini on 8 Feb 2019 1 KB Arbitrary activation
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.python.framework import ops

a=6.12

def HCAL_act(x):
    
    return np.divide(a,np.add(1,np.exp(np.add(-x,a))))
    
    
np_HCAL_act = np.vectorize(HCAL_act)

def d_HCAL_act(x):
    num = np.multiply(a,np.exp(np.add(a,-x)))
    den = np.square(np.add(1., np.exp(np.add(a,-x))))
    return num/den

np_d_HCAL_act = np.vectorize(d_HCAL_act)

np_d_HCAL_act_32 = lambda x: np_d_HCAL_act(x).astype(np.float32)

def tf_d_HCAL_act(x,name=None):
    with tf.name_scope(name, "d_HCAL_act", [x]) as name:
        y = tf.py_func(np_d_HCAL_act_32,
                        [x],
                        [tf.float32],
                        name=name,
                        stateful=False)
        return y[0]

def py_func(func, inp, Tout, stateful=True, name=None, grad=None):

    # Need to generate a unique name to avoid duplicates:
    rnd_name = 'PyFuncGrad' + str(np.random.randint(0, 1E+8))

    tf.RegisterGradient(rnd_name)(grad)  # see _MySquareGrad for grad example
    g = tf.get_default_graph()
    with g.gradient_override_map({"PyFunc": rnd_name}):
        return tf.py_func(func, inp, Tout, stateful=stateful, name=name)

def HCAL_grad(op, grad):
    x = op.inputs[0]

    n_gr = tf_d_HCAL_act(x)
    return grad * n_gr 


np_HCAL_act_32 = lambda x: np_HCAL_act(x).astype(np.float32)

def tf_HCAL_act(x, name=None):

    with tf.name_scope(name, "HCAL_act", [x]) as name:
        y = py_func(np_HCAL_act_32,
                        [x],
                        [tf.float32],
                        name=name,
                        grad=HCAL_grad)  # <-- here's the call to the gradient
        return y[0]