## | export
# XXX This is numerically unstable. Fix it.
class Tanh(UnaryElementwiseOp):
"""Take the tanh of a tensor"""
= "tanh({})"
name_template
def __init__(self, a, name=None):
assert 1, "XXX Fix me first"
super().__init__(a, name=name)
= np.exp(self.args[0].data)
ex = np.exp(-self.args[0].data)
emx self.set_out((ex-emx) / (ex+emx))
def backward(self):
self.check_backward()
with np.errstate(under="ignore"): # Triggered by infinitesimally small 1-data
self.parents[0].accum_grad(self.out.grad * (1 - self.out.data**2))
Functional operations: Activation functions
Relu
Relu (a, name=None)
Take the sigmoid of a tensor
Sigmoid
Sigmoid (a, name=None)
Take the sigmoid of a tensor