LB Booster
« neural net : xor »

Welcome Guest. Please Login or Register.
Apr 1st, 2018, 04:03am



ATTENTION MEMBERS: Conforums will be closing it doors and discontinuing its service on April 15, 2018.
We apologize Conforums does not have any export functions to migrate data.
Ad-Free has been deactivated. Outstanding Ad-Free credits will be reimbursed to respective payment methods.

Thank you Conforums members.
Speed up Liberty BASIC programs by up to ten times!
Compile Liberty BASIC programs to compact, standalone executables!
Overcome many of Liberty BASIC's bugs and limitations!
LB Booster Resources
LB Booster documentation
LB Booster Home Page
LB Booster technical Wiki
Just BASIC forum
BBC BASIC Home Page
Liberty BASIC forum (the original)

« Previous Topic | Next Topic »
Pages: 1  Notify Send Topic Print
 thread  Author  Topic: neural net : xor  (Read 420 times)
bluatigro
Full Member
ImageImageImage


member is offline

Avatar




PM

Gender: Male
Posts: 111
xx neural net : xor
« Thread started on: Jan 18th, 2017, 1:07pm »


error :
JB reports a syntaks error
but not where

Code:
'' Author:        John McCullock
'' Date:        12-11-2005
'' Description:    Backpropagation XOR Example 2.
'' Sources: Dr Phil Brierley, www.philbrierley.com
'' translated from c++ to JB/LB by bluatigro

global numInputs : numInputs = 3       '' Input nodes, plus the bias input.
global numPatterns : numPatterns = 4     '' Input patterns for XOR experiment.

global numHidden : numHidden = 4
global numEpochs : numEpochs = 200
global LR.IH : LR.IH = 0.7       '' Learning rate, input to hidden weights.
global LR.HO : LR.HO = 0.07      '' Learning rate, hidden to output weights.

global patNum : patNum = 0
global errThisPat : errThisPat = 0.0
global outPred : outPred = 0.0                  '' "Expected" output values.
global RMSerror : RMSerror = 0.0                 '' Root Mean Squared error.

dim hiddenVal( numHidden )         '' Hidden node outputs.

dim weightsIH( numInputs , numHidden )  '' Input to Hidden weights.
dim weightsHO( numHidden )          '' Hidden to Output weights.

dim trainInputs( numPatterns , numInputs )
dim trainOutput( numPatterns )         '' "Actual" output values.



    randomize timer  '' Seed the generator with system time.

    call initWeights

    call initData

    '' Train the network
    for j = 0 to numEpochs

        for i = 0 to numPatterns

            ''Select a pattern at random.
            patNum = rnd(0) * numPatterns

            ''Calculate the output and error for this pattern.
            call calcNet

            ''Adjust network weights.
            call WeightChangesHO
            call WeightChangesIH
        next i

        call calcOverallError

        ''Display the overall network error after each epoch
        print "epoch = " + str$(j) + " RMS Error = " + str(RMSerror)

    next j
    ''Training has finished.

    call displayResults

input "[ pres return ]" ; in$
end

function tanh( x as double ) as double
  return ( 1 - exp( -x * 2 ) ) / ( 1 + exp( -x * 2 ) )
end function

sub initWeights
'' Initialize weights to random values.

    for j = 0 to numHidden

        weightsHO(j) = ( rnd(0) - 0.5 ) / 2
        for i = 0 to numInputs

            weightsIH(i,j) = ( rnd(0) - 0.5 ) / 5
            print "Weight = " + str$( weightsIH(i,j) )
        next i
    next j
end sub

sub initData
    '' The data here is the XOR data which has been rescaled to
    '' the range -1 to 1.

    '' An extra input value of 1 is also added to act as the bias.

    '' The output must lie in the range -1 to 1.

    trainInputs(0,0)   =  1
    trainInputs(0,1)   = -1
    trainInputs(0,2)   =  1   '' Bias
    trainOutput(0)     =  1

    trainInputs(1,0)   = -1
    trainInputs(1,1)   =  1
    trainInputs(1,2)   =  1  '' Bias
    trainOutput(1)     =  1

    trainInputs(2,0)   =  1
    trainInputs(2,1)   =  1
    trainInputs(2,2)   =  1  '' Bias
    trainOutput(2)     = -1

    trainInputs(3,0)   = -1
    trainInputs(3,1)   = -1
    trainInputs(3,2)   =  1  '' Bias
    trainOutput(3)     = -1
end sub

sub calcNet
'' Calculates values for Hidden and Output nodes.

    for i = 0 to numHidden
      hiddenVal(i) = 0.0

        for j = 0 to numInputs
            hiddenVal(i) = hiddenVal(i)+(trainInputs(patNum,j) * weightsIH(j,i) )
        next j

        hiddenVal(i) = tanh( hiddenVal( i ) )
    next i

    outPred = 0.0

    for i = 0 to numHidden
        outPred = outPred + hiddenVal(i) * weightsHO(i)
    next i
    ''Calculate the error: "Expected" - "Actual"
    errThisPat = outPred - trainOutput( patNum )
end sub

sub WeightChangesHO
''Adjust the Hidden to Output weights.
    for k = 0 to numHidden
        dim as double weightChange = LR.HO * errThisPat * hiddenVal(k)
        weightsHO(k) = weightsHO(k) - weightChange

        '' Regularization of the output weights.
        if (weightsHO(k) < -5) then
            weightsHO(k) = -5
        end if
        if (weightsHO(k) > 5) then
            weightsHO(k) = 5
        end if
    next k
end sub

sub WeightChangesIH
'' Adjust the Input to Hidden weights.
    for i = 0 to numHidden
        for k = 0 to numInputs
            x = 1 - (hiddenVal(i) * hiddenVal(i))
            x = x * weightsHO(i) * errThisPat * LR.IH
            x = x * trainInputs(patNum,k)
            weightChange = x
            weightsIH(k,i) = weightsIH(k,i) - weightChange
        next k
    next i
end sub

sub calcOverallError
    RMSerror = 0.0
    for i = 0 to numPatterns
         patNum = i
         call calcNet
         RMSerror = RMSerror + (errThisPat * errThisPat)
    next i
    RMSerror = RMSerror / numPatterns
    RMSerror = sqr(RMSerror)
end sub

sub displayResults
    for i = 0 to numPatterns
        patNum = i
        call calcNet
        print "pat = " + str$( patNum + 1 ) _
        + " actual = " + str$( trainOutput(patNum) ) _
        + " neural model = " + str$( outPred )
    next i
end sub

 
User IP Logged

Richard Russell
Administrator
ImageImageImageImageImage


member is offline

Avatar




Homepage PM


Posts: 1348
xx Re: neural net : xor
« Reply #1 on: Jan 18th, 2017, 5:44pm »

on Jan 18th, 2017, 1:07pm, bluatigro wrote:
JB reports a syntaks error,but not where

LBB throws an error at this function definition, which looks like it hasn't been fully translated from the original C++:

Code:
function tanh( x as double ) as double 

If you delete the spurious 'as double' declarations it should help.

Richard.
User IP Logged

bluatigro
Full Member
ImageImageImage


member is offline

Avatar




PM

Gender: Male
Posts: 111
xx Re: neural net : xor
« Reply #2 on: Jan 19th, 2017, 08:48am »

@ richard :
thanks for that
on other forums they fount the remaining error's

the error's came because i translated in freebasic first

update :
it works !!

Code:
'' Author:        John McCullock
'' Date:        12-11-2005
'' Description:    Backpropagation XOR Example 2.
'' Sources: Dr Phil Brierley, www.philbrierley.com
'' translated from c++ to JB/LB by bluatigro
global numInputs : numInputs = 3       '' Input nodes, plus the bias input.
global numPatterns : numPatterns = 4     '' Input patterns for XOR experiment.
global numHidden : numHidden = 4
global numEpochs : numEpochs = 200
global LR.IH : LR.IH = 0.7       '' Learning rate, input to hidden weights.
global LR.HO : LR.HO = 0.07      '' Learning rate, hidden to output weights.
global patNum : patNum = 0
global errThisPat : errThisPat = 0.0
global outPred : outPred = 0.0                  '' "Expected" output values.
global RMSerror : RMSerror = 0.0                 '' Root Mean Squared error.
dim hiddenVal( numHidden )         '' Hidden node outputs.
dim weightsIH( numInputs , numHidden )  '' Input to Hidden weights.dim weightsHO( numHidden )          '' Hidden to Output weights.
dim trainInputs( numPatterns , numInputs )
dim trainOutput( numPatterns )         '' "Actual" output values.

call initWeights
call initData
'' Train the network
for j = 0 to numEpochs
  for i = 0 to numPatterns
    ''Select a pattern at random.
    patNum = i ''rnd(0) * numPatterns
    ''Calculate the output and error for this pattern.
    call calcNet
    ''Adjust network weights.
    call WeightChangesHO
    call WeightChangesIH
  next i
  call calcOverallError
  ''Display the overall network error after each epoch
  print "epoch = " + str$(j) + " RMS Error = " + str$(RMSerror)
next j
''Training has finished.
call displayResults
input "[ pres return ]" ; in$
end

function tanh( x )
  tanh = ( 1 - exp( 0-x * 2 ) ) / ( 1 + exp( 0-x * 2 ) )
end function

sub initWeights
'' Initialize weights to random values.
    for j = 0 to numHidden
        weightsHO(j) = ( rnd(0) - 0.5 ) / 2
        for i = 0 to numInputs
            weightsIH(i,j) = ( rnd(0) - 0.5 ) / 5
            print "Weight = " + str$( weightsIH(i,j) )
        next i
    next j
end sub

sub initData
    '' The data here is the XOR data which has been rescaled to
    '' the range -1 to 1.
    '' An extra input value of 1 is also added to act as the bias.
    '' The output must lie in the range -1 to 1.
    trainInputs(0,0)   =  1
    trainInputs(0,1)   = -1
    trainInputs(0,2)   =  1   '' Bias
    trainOutput(0)     =  1
    trainInputs(1,0)   = -1
    trainInputs(1,1)   =  1
    trainInputs(1,2)   =  1  '' Bias
    trainOutput(1)     =  1
    trainInputs(2,0)   =  1
    trainInputs(2,1)   =  1
    trainInputs(2,2)   =  1  '' Bias
    trainOutput(2)     = -1
    trainInputs(3,0)   = -1
    trainInputs(3,1)   = -1
    trainInputs(3,2)   =  1  '' Bias
    trainOutput(3)     = -1
end sub

sub calcNet
'' Calculates values for Hidden and Output nodes.
    for i = 0 to numHidden
        hiddenVal(i) = 0.0
        for j = 0 to numInputs
            hiddenVal(i) = hiddenVal(i)+(trainInputs(patNum,j) * weightsIH(j,i) )
        next j
        hiddenVal(i) = tanh( hiddenVal( i ) )
    next i
    outPred = 0.0
    for i = 0 to numHidden
        outPred = outPred + hiddenVal(i) * weightsHO(i)
    next i
    ''Calculate the error: "Expected" - "Actual"
    errThisPat = outPred - trainOutput( patNum )
end sub

sub WeightChangesHO
''Adjust the Hidden to Output weights.
    for k = 0 to numHidden
        weightChange = LR.HO * errThisPat * hiddenVal(k)
        weightsHO(k) = weightsHO(k) - weightChange
        '' Regularization of the output weights.
        if (weightsHO(k) < -5) then
            weightsHO(k) = -5
        end if
        if (weightsHO(k) > 5) then
            weightsHO(k) = 5
        end if
    next k
end sub

sub WeightChangesIH
'' Adjust the Input to Hidden weights.
    for i = 0 to numHidden
        for k = 0 to numInputs
            x = 1 - (hiddenVal(i) * hiddenVal(i))
            x = x * weightsHO(i) * errThisPat * LR.IH
            x = x * trainInputs(patNum,k)
            weightChange = x
            weightsIH(k,i) = weightsIH(k,i) - weightChange
        next k
    next i
end sub

sub calcOverallError
    RMSerror = 0.0
    for i = 0 to numPatterns
         patNum = i
         call calcNet
         RMSerror = RMSerror + (errThisPat * errThisPat)
    next i
    RMSerror = RMSerror / numPatterns
    RMSerror = sqr(RMSerror)
end sub

sub displayResults
    for i = 0 to numPatterns
        patNum = i
        call calcNet
        print "pat = " + str$( patNum + 1 ) _
        + " actual = " + str$( trainOutput(patNum) ) _
        + " neural model = " + str$( outPred )
    next i
end sub

 
User IP Logged

Pages: 1  Notify Send Topic Print
« Previous Topic | Next Topic »

| |

This forum powered for FREE by Conforums ©
Terms of Service | Privacy Policy | Conforums Support | Parental Controls