'' Author: John McCullock
'' Date: 12-11-2005
'' Description: Backpropagation XOR Example 2.
'' Sources: Dr Phil Brierley, www.philbrierley.com
'' translated from c++ to JB/LB by bluatigro
global numInputs : numInputs = 3 '' Input nodes, plus the bias input.
global numPatterns : numPatterns = 4 '' Input patterns for XOR experiment.
global numHidden : numHidden = 4
global numEpochs : numEpochs = 200
global LR.IH : LR.IH = 0.7 '' Learning rate, input to hidden weights.
global LR.HO : LR.HO = 0.07 '' Learning rate, hidden to output weights.
global patNum : patNum = 0
global errThisPat : errThisPat = 0.0
global outPred : outPred = 0.0 '' "Expected" output values.
global RMSerror : RMSerror = 0.0 '' Root Mean Squared error.
dim hiddenVal( numHidden ) '' Hidden node outputs.
dim weightsIH( numInputs , numHidden ) '' Input to Hidden weights.
dim weightsHO( numHidden ) '' Hidden to Output weights.
dim trainInputs( numPatterns , numInputs )
dim trainOutput( numPatterns ) '' "Actual" output values.
randomize timer '' Seed the generator with system time.
call initWeights
call initData
'' Train the network
for j = 0 to numEpochs
for i = 0 to numPatterns
''Select a pattern at random.
patNum = rnd(0) * numPatterns
''Calculate the output and error for this pattern.
call calcNet
''Adjust network weights.
call WeightChangesHO
call WeightChangesIH
next i
call calcOverallError
''Display the overall network error after each epoch
print "epoch = " + str$(j) + " RMS Error = " + str(RMSerror)
next j
''Training has finished.
call displayResults
input "[ pres return ]" ; in$
end
function tanh( x as double ) as double
return ( 1 - exp( -x * 2 ) ) / ( 1 + exp( -x * 2 ) )
end function
sub initWeights
'' Initialize weights to random values.
for j = 0 to numHidden
weightsHO(j) = ( rnd(0) - 0.5 ) / 2
for i = 0 to numInputs
weightsIH(i,j) = ( rnd(0) - 0.5 ) / 5
print "Weight = " + str$( weightsIH(i,j) )
next i
next j
end sub
sub initData
'' The data here is the XOR data which has been rescaled to
'' the range -1 to 1.
'' An extra input value of 1 is also added to act as the bias.
'' The output must lie in the range -1 to 1.
trainInputs(0,0) = 1
trainInputs(0,1) = -1
trainInputs(0,2) = 1 '' Bias
trainOutput(0) = 1
trainInputs(1,0) = -1
trainInputs(1,1) = 1
trainInputs(1,2) = 1 '' Bias
trainOutput(1) = 1
trainInputs(2,0) = 1
trainInputs(2,1) = 1
trainInputs(2,2) = 1 '' Bias
trainOutput(2) = -1
trainInputs(3,0) = -1
trainInputs(3,1) = -1
trainInputs(3,2) = 1 '' Bias
trainOutput(3) = -1
end sub
sub calcNet
'' Calculates values for Hidden and Output nodes.
for i = 0 to numHidden
hiddenVal(i) = 0.0
for j = 0 to numInputs
hiddenVal(i) = hiddenVal(i)+(trainInputs(patNum,j) * weightsIH(j,i) )
next j
hiddenVal(i) = tanh( hiddenVal( i ) )
next i
outPred = 0.0
for i = 0 to numHidden
outPred = outPred + hiddenVal(i) * weightsHO(i)
next i
''Calculate the error: "Expected" - "Actual"
errThisPat = outPred - trainOutput( patNum )
end sub
sub WeightChangesHO
''Adjust the Hidden to Output weights.
for k = 0 to numHidden
dim as double weightChange = LR.HO * errThisPat * hiddenVal(k)
weightsHO(k) = weightsHO(k) - weightChange
'' Regularization of the output weights.
if (weightsHO(k) < -5) then
weightsHO(k) = -5
end if
if (weightsHO(k) > 5) then
weightsHO(k) = 5
end if
next k
end sub
sub WeightChangesIH
'' Adjust the Input to Hidden weights.
for i = 0 to numHidden
for k = 0 to numInputs
x = 1 - (hiddenVal(i) * hiddenVal(i))
x = x * weightsHO(i) * errThisPat * LR.IH
x = x * trainInputs(patNum,k)
weightChange = x
weightsIH(k,i) = weightsIH(k,i) - weightChange
next k
next i
end sub
sub calcOverallError
RMSerror = 0.0
for i = 0 to numPatterns
patNum = i
call calcNet
RMSerror = RMSerror + (errThisPat * errThisPat)
next i
RMSerror = RMSerror / numPatterns
RMSerror = sqr(RMSerror)
end sub
sub displayResults
for i = 0 to numPatterns
patNum = i
call calcNet
print "pat = " + str$( patNum + 1 ) _
+ " actual = " + str$( trainOutput(patNum) ) _
+ " neural model = " + str$( outPred )
next i
end sub
'' Author: John McCullock
'' Date: 12-11-2005
'' Description: Backpropagation XOR Example 2.
'' Sources: Dr Phil Brierley, www.philbrierley.com
'' translated from c++ to JB/LB by bluatigro
global numInputs : numInputs = 3 '' Input nodes, plus the bias input.
global numPatterns : numPatterns = 4 '' Input patterns for XOR experiment.
global numHidden : numHidden = 4
global numEpochs : numEpochs = 200
global LR.IH : LR.IH = 0.7 '' Learning rate, input to hidden weights.
global LR.HO : LR.HO = 0.07 '' Learning rate, hidden to output weights.
global patNum : patNum = 0
global errThisPat : errThisPat = 0.0
global outPred : outPred = 0.0 '' "Expected" output values.
global RMSerror : RMSerror = 0.0 '' Root Mean Squared error.
dim hiddenVal( numHidden ) '' Hidden node outputs.
dim weightsIH( numInputs , numHidden ) '' Input to Hidden weights.dim weightsHO( numHidden ) '' Hidden to Output weights.
dim trainInputs( numPatterns , numInputs )
dim trainOutput( numPatterns ) '' "Actual" output values.
call initWeights
call initData
'' Train the network
for j = 0 to numEpochs
for i = 0 to numPatterns
''Select a pattern at random.
patNum = i ''rnd(0) * numPatterns
''Calculate the output and error for this pattern.
call calcNet
''Adjust network weights.
call WeightChangesHO
call WeightChangesIH
next i
call calcOverallError
''Display the overall network error after each epoch
print "epoch = " + str$(j) + " RMS Error = " + str$(RMSerror)
next j
''Training has finished.
call displayResults
input "[ pres return ]" ; in$
end
function tanh( x )
tanh = ( 1 - exp( 0-x * 2 ) ) / ( 1 + exp( 0-x * 2 ) )
end function
sub initWeights
'' Initialize weights to random values.
for j = 0 to numHidden
weightsHO(j) = ( rnd(0) - 0.5 ) / 2
for i = 0 to numInputs
weightsIH(i,j) = ( rnd(0) - 0.5 ) / 5
print "Weight = " + str$( weightsIH(i,j) )
next i
next j
end sub
sub initData
'' The data here is the XOR data which has been rescaled to
'' the range -1 to 1.
'' An extra input value of 1 is also added to act as the bias.
'' The output must lie in the range -1 to 1.
trainInputs(0,0) = 1
trainInputs(0,1) = -1
trainInputs(0,2) = 1 '' Bias
trainOutput(0) = 1
trainInputs(1,0) = -1
trainInputs(1,1) = 1
trainInputs(1,2) = 1 '' Bias
trainOutput(1) = 1
trainInputs(2,0) = 1
trainInputs(2,1) = 1
trainInputs(2,2) = 1 '' Bias
trainOutput(2) = -1
trainInputs(3,0) = -1
trainInputs(3,1) = -1
trainInputs(3,2) = 1 '' Bias
trainOutput(3) = -1
end sub
sub calcNet
'' Calculates values for Hidden and Output nodes.
for i = 0 to numHidden
hiddenVal(i) = 0.0
for j = 0 to numInputs
hiddenVal(i) = hiddenVal(i)+(trainInputs(patNum,j) * weightsIH(j,i) )
next j
hiddenVal(i) = tanh( hiddenVal( i ) )
next i
outPred = 0.0
for i = 0 to numHidden
outPred = outPred + hiddenVal(i) * weightsHO(i)
next i
''Calculate the error: "Expected" - "Actual"
errThisPat = outPred - trainOutput( patNum )
end sub
sub WeightChangesHO
''Adjust the Hidden to Output weights.
for k = 0 to numHidden
weightChange = LR.HO * errThisPat * hiddenVal(k)
weightsHO(k) = weightsHO(k) - weightChange
'' Regularization of the output weights.
if (weightsHO(k) < -5) then
weightsHO(k) = -5
end if
if (weightsHO(k) > 5) then
weightsHO(k) = 5
end if
next k
end sub
sub WeightChangesIH
'' Adjust the Input to Hidden weights.
for i = 0 to numHidden
for k = 0 to numInputs
x = 1 - (hiddenVal(i) * hiddenVal(i))
x = x * weightsHO(i) * errThisPat * LR.IH
x = x * trainInputs(patNum,k)
weightChange = x
weightsIH(k,i) = weightsIH(k,i) - weightChange
next k
next i
end sub
sub calcOverallError
RMSerror = 0.0
for i = 0 to numPatterns
patNum = i
call calcNet
RMSerror = RMSerror + (errThisPat * errThisPat)
next i
RMSerror = RMSerror / numPatterns
RMSerror = sqr(RMSerror)
end sub
sub displayResults
for i = 0 to numPatterns
patNum = i
call calcNet
print "pat = " + str$( patNum + 1 ) _
+ " actual = " + str$( trainOutput(patNum) ) _
+ " neural model = " + str$( outPred )
next i
end sub