Attribute VB_Name = "modGlobals"
'Backprop neural network by Richard Gardner
'rgardner@rgsoftware.com
'http://www.rgsoftware.com
Const LEARNING_RATE As Double = 0.5
Private Weights(1 To 3, 1 To 3) As Double
Public Function Reset()
'Reset the weights with random values
Dim i As Integer
Dim j As Integer
Randomize
For i = 1 To 3
For j = 1 To 3
Weights(i, j) = (Rnd * 2) - 1
Next j
Next i
End Function
Public Function Train(Input1 As Double, Input2 As Double, Target As Double)
Dim Deltas(1 To 3) As Double
Dim net1 As Double
Dim net2 As Double
Dim net3 As Double
Dim v1 As Double
Dim v2 As Double
Dim i As Integer
Dim out As Double
'Calculate the net values for the hidden layer neurons with activations
net1 = Activation(Weights(1, 1) + Input1 * Weights(2, 1) + Input2 * Weights(3, 1))
net2 = Activation(Weights(1, 2) + Input1 * Weights(2, 2) + Input2 * Weights(3, 2))
'Now, calculate the net for the final output layer.
net3 = Activation(Weights(1, 3) + net1 * Weights(2, 3) + net2 * Weights(3, 3))
'We have to calculate the deltas for the two layers.
'Remember, we have to calculate the errors backwards
'from the output layer to the hidden layer (that's why
'it's called BACK-propagation).
Deltas(3) = net3 * (1 - net3) * (Target - net3)
Deltas(2) = net2 * (1 - net2) * (Weights(3, 3)) * (Deltas(3))
Deltas(1) = net1 * (1 - net1) * (Weights(2, 3)) * (Deltas(3))
'Now, alter the weights accordingly.
v1 = Input1
v2 = Input2
For i = 1 To 3
'Change the values for the output layer, if necessary.
If i = 3 Then
v1 = net1
v2 = net2
End If
Weights(1, i) = Weights(1, i) + LEARNING_RATE * 1 * Deltas(i) 'Bias
Weights(2, i) = Weights(2, i) + LEARNING_RATE * v1 * Deltas(i)
Weights(3, i) = Weights(3, i) + LEARNING_RATE * v2 * Deltas(i)
Next i
DoEvents
Train = net3
End Function
Public Function Run(Input1 As Double, Input2 As Double)
Dim net1 As Double
Dim net2 As Double
Dim net3 As Double
net1 = Activation(Weights(1, 1) + Input1 * Weights(2, 1) + Input2 * Weights(3, 1))
net2 = Activation(Weights(1, 2) + Input1 * Weights(2, 2) + Input2 * Weights(3, 2))
net3 = Activation(Weights(1, 3) + net1 * Weights(2, 3) + net2 * Weights(3, 3))
Run = net3
End Function
Private Function Activation(Value As Double)
Activation = (1 / (1 + Exp(Value * -1)))
End Function
'Backprop neural network by Richard Gardner
'rgardner@rgsoftware.com
'http://www.rgsoftware.com
Const LEARNING_RATE As Double = 0.5
Private Weights(1 To 3, 1 To 3) As Double
Public Function Reset()
'Reset the weights with random values
Dim i As Integer
Dim j As Integer
Randomize
For i = 1 To 3
For j = 1 To 3
Weights(i, j) = (Rnd * 2) - 1
Next j
Next i
End Function
Public Function Train(Input1 As Double, Input2 As Double, Target As Double)
Dim Deltas(1 To 3) As Double
Dim net1 As Double
Dim net2 As Double
Dim net3 As Double
Dim v1 As Double
Dim v2 As Double
Dim i As Integer
Dim out As Double
'Calculate the net values for the hidden layer neurons with activations
net1 = Activation(Weights(1, 1) + Input1 * Weights(2, 1) + Input2 * Weights(3, 1))
net2 = Activation(Weights(1, 2) + Input1 * Weights(2, 2) + Input2 * Weights(3, 2))
'Now, calculate the net for the final output layer.
net3 = Activation(Weights(1, 3) + net1 * Weights(2, 3) + net2 * Weights(3, 3))
'We have to calculate the deltas for the two layers.
'Remember, we have to calculate the errors backwards
'from the output layer to the hidden layer (that's why
'it's called BACK-propagation).
Deltas(3) = net3 * (1 - net3) * (Target - net3)
Deltas(2) = net2 * (1 - net2) * (Weights(3, 3)) * (Deltas(3))
Deltas(1) = net1 * (1 - net1) * (Weights(2, 3)) * (Deltas(3))
'Now, alter the weights accordingly.
v1 = Input1
v2 = Input2
For i = 1 To 3
'Change the values for the output layer, if necessary.
If i = 3 Then
v1 = net1
v2 = net2
End If
Weights(1, i) = Weights(1, i) + LEARNING_RATE * 1 * Deltas(i) 'Bias
Weights(2, i) = Weights(2, i) + LEARNING_RATE * v1 * Deltas(i)
Weights(3, i) = Weights(3, i) + LEARNING_RATE * v2 * Deltas(i)
Next i
DoEvents
Train = net3
End Function
Public Function Run(Input1 As Double, Input2 As Double)
Dim net1 As Double
Dim net2 As Double
Dim net3 As Double
net1 = Activation(Weights(1, 1) + Input1 * Weights(2, 1) + Input2 * Weights(3, 1))
net2 = Activation(Weights(1, 2) + Input1 * Weights(2, 2) + Input2 * Weights(3, 2))
net3 = Activation(Weights(1, 3) + net1 * Weights(2, 3) + net2 * Weights(3, 3))
Run = net3
End Function
Private Function Activation(Value As Double)
Activation = (1 / (1 + Exp(Value * -1)))
End Function
Комментариев нет:
Отправить комментарий