Functional operations: Activation functions


source

Relu

 Relu (a, name=None)

Take the sigmoid of a tensor


source

Sigmoid

 Sigmoid (a, name=None)

Take the sigmoid of a tensor

## | export

# XXX This is numerically unstable. Fix it.

class Tanh(UnaryElementwiseOp):
    """Take the tanh of a tensor"""

    name_template = "tanh({})"

    def __init__(self, a, name=None):
        assert 1, "XXX Fix me first"
        super().__init__(a, name=name)
        ex = np.exp(self.args[0].data)
        emx = np.exp(-self.args[0].data)
        self.set_out((ex-emx) / (ex+emx))

    def backward(self):
        self.check_backward()
        with np.errstate(under="ignore"):  # Triggered by infinitesimally small 1-data
            self.parents[0].accum_grad(self.out.grad * (1 - self.out.data**2))