1
2
3
4 """ Activation functions for neural network nodes
5
6 Activation functions should implement the following API:
7
8 - _Eval(input)_: returns the value of the function at a given point
9
10 - _Deriv(input)_: returns the derivative of the function at a given point
11
12 The current Backprop implementation also requires:
13
14 - _DerivFromVal(val)_: returns the derivative of the function when its
15 value is val
16
17 In all cases _input_ is a float as is the value returned.
18
19 """
20 import math
21
23 """ "virtual base class" for activation functions
24
25 """
27 return self.Eval(input)
28
29
31 """ the standard sigmoidal function """
32 - def Eval(self,input):
33 return 1./(1.+math.exp(-self.beta*input))
34
36 val = self.Eval(input)
37 return self.beta * val * (1. - val)
38
40 return self.beta * val * (1. - val)
41
44
46 """ the standard hyperbolic tangent function """
47 - def Eval(self,input):
48 v1 = math.exp(self.beta*input)
49 v2 = math.exp(-self.beta*input)
50 return (v1 - v2)/(v1 + v2)
51
53 val = self.Eval(input)
54 return self.beta * (1 - val*val)
55
57 return self.beta * (1 - val*val)
58
61