R/Examples/squareRoot.R

#Generate traing data y = sqrt(x)
trainingData <- as.data.frame(cbind(sqrt(seq(0.1,1,0.1)),seq(0.1,1,0.1)))
colnames(trainingData) <- c("y","x")

#Train the neural network for 5 generations, and plot the fitness
rneatsim <- rneatneuralnet(y~x,trainingData,5)
plot(rneatsim)

#Continue training the network for another 5 generations
rneatsim <- rneatneuralnetcontinuetraining(rneatsim,5)
plot(rneatsim)

#Construct some fresh data to stick through the neural network and hopefully get square rooted
liveData <- as.data.frame(seq(0.1,1,0.01))
colnames(liveData) <- c("x")

liveDataExpectedOutput <- sqrt(liveData)
colnames(liveDataExpectedOutput) <- "yExpected"

#Pass the data through the network
results <- compute(rneatsim,liveData)

#Calculate the difference between yPred the neural network output, and yExpected the actual square root of the input
error <- liveDataExpectedOutput[,"yExpected"] - results[,"yPred"]
results <- cbind(results,liveDataExpectedOutput,error)
print(results)

dev.new()
layout(matrix(c(3,3,3,1,4,2), 2, 3, byrow = TRUE),heights=c(1,2))
plot(x=results[,"x"],y=results[,"yExpected"],type="l", main="Neural Network y=sqrt(x) expected vs predicted",xlab="x",ylab="y")
lines(x=results[,"x"],y=results[,"yPred"],col="red",type="l")
legend(x='bottomright', c('yExpected','yPredicted'), col=c("black","red"), fill=1:2, bty='n')
plot(rneatsim)
plot(rneatsim$simulation)
ahunteruk/RNeat documentation built on May 12, 2019, 2:31 a.m.