-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMyOwnActivationFunction.lua
More file actions
87 lines (72 loc) · 2.41 KB
/
MyOwnActivationFunction.lua
File metadata and controls
87 lines (72 loc) · 2.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
-- This is a demonstration of my own complex activation function called "Piecewise Universal Unit".
-- (PwUU for short!)
-- The formula for PwUU is:
-- f(n, p, x) = min(((tanh(x)*2n)+(x*(2-2n)))/2, 0) + max(0, ((tanh(x)*2p)+(x*(2-2p)))/2)
-- Fun Fact!!!
-- If you'll set `n` to `1.5` and `p` to `-0.5`, PwUU would behave similarly to Swish! ( x*sigmoid(x) )
-- skidded out from AI, cuz idk anything about implementing a neuron lol
-- well, i added it a little.. so yeah
Neuron = {}
Neuron.__index = Neuron
function Neuron.new(activFunc, weights, bias)
local self = setmetatable({}, Neuron)
self.activFunc = activFunc
self.weights = weights
self.bias = bias
return self
end
function Neuron:activate(inputs)
assert(#inputs==#self.weights, "amount of inputs should be equal to amount of weights")
local sum = 0
for i = 1, #inputs do
sum = sum + inputs[i] * self.weights[i]
end
sum = sum + self.bias
return self.activFunc(sum)
end
function Neuron:train(inputs, reward, learningRate, gamma, nextInputs)
local output = self:activate(inputs)
local target = reward
if nextInputs then
local nextOutput = self:activate(nextInputs)
target = reward + gamma * nextOutput
end
local error = target - output
for i = 1, #self.weights do
self.weights[i] = self.weights[i] + learningRate * error * inputs[i]
end
self.bias = self.bias + learningRate * error
return output
end
-- THERE WE GO!!!
local function CreateCAF(n,p)
return function(x)
return math.min((math.tanh(x)*2*n)+(x*(2-(2*n)))/2, 0) + math.max(0, ((math.tanh(x)*2*p)+(x*(2-(2*p))))/2)
end
end
local function relu(x)
return math.max(0,x)
end
local function swish(x)
return x*(1/(1+math.exp(-x)))
end
local neuronR = Neuron.new(relu, {0.5, 0.5}, 0.5)
local neuronS = Neuron.new(swish, {0.5, 0.5}, 0.5)
local neuronP = Neuron.new(CreateCAF(1.5,-0.5), {0.5, 0.5}, 0.5)
local times = 0
while times < 1000000 do
local a = math.random(0,10)/10
local b = math.random(0,10)/10
local x = a-b
if x >= 0 then
neuronR:train({a,b},x,0.1)
neuronS:train({a,b},x,0.1)
neuronP:train({a,b},x,0.1)
times=times+1
end
end
local a = 0.7
local b = 0.2
print("ReLU Neuron:",a,"-",b,"=",neuronR:activate({a,b}))
print("Swish Neuron:",a,"-",b,"=",neuronS:activate({a,b}))
print("PwUU (mimicking Swish) Neuron:",a,"-",b,"=",neuronP:activate({a,b}))