From 7c3c5dd0f8d7089bd50282e9dcd56e36798e18cf Mon Sep 17 00:00:00 2001 From: Georg Holzmann Date: Tue, 12 Jul 2005 14:40:21 +0000 Subject: initial commit of pix_recNN svn path=/trunk/externals/grh/; revision=3320 --- pix_recNN/Makefile | 46 +++ pix_recNN/NNActivation.h | 78 ++++++ pix_recNN/NNException.h | 49 ++++ pix_recNN/NNet.h | 636 ++++++++++++++++++++++++++++++++++++++++++ pix_recNN/Neuron.cpp | 169 +++++++++++ pix_recNN/Neuron.h | 191 +++++++++++++ pix_recNN/RecurrentNeuron.cpp | 226 +++++++++++++++ pix_recNN/RecurrentNeuron.h | 149 ++++++++++ pix_recNN/gpl.txt | 346 +++++++++++++++++++++++ pix_recNN/help-pix_recNN.pd | 146 ++++++++++ pix_recNN/pix_recNN.cpp | 423 ++++++++++++++++++++++++++++ pix_recNN/pix_recNN.h | 204 ++++++++++++++ pix_recNN/readme.txt | 27 ++ 13 files changed, 2690 insertions(+) create mode 100755 pix_recNN/Makefile create mode 100755 pix_recNN/NNActivation.h create mode 100755 pix_recNN/NNException.h create mode 100755 pix_recNN/NNet.h create mode 100755 pix_recNN/Neuron.cpp create mode 100755 pix_recNN/Neuron.h create mode 100755 pix_recNN/RecurrentNeuron.cpp create mode 100755 pix_recNN/RecurrentNeuron.h create mode 100755 pix_recNN/gpl.txt create mode 100755 pix_recNN/help-pix_recNN.pd create mode 100755 pix_recNN/pix_recNN.cpp create mode 100755 pix_recNN/pix_recNN.h create mode 100755 pix_recNN/readme.txt diff --git a/pix_recNN/Makefile b/pix_recNN/Makefile new file mode 100755 index 0000000..ab880e8 --- /dev/null +++ b/pix_recNN/Makefile @@ -0,0 +1,46 @@ +PD-PATH=/usr/lib/pd +PD-SCR=/usr/include + +# location of the GEM sources and Gem.pd_linux: +GEM-SCR=/home/Georg/pd-cvs/gem/Gem/src +GEM-LIB=$(PD-PATH)/extra/Gem.pd_linux + + +CC = g++ +LD = g++ +INCLUDE=-I$(PD-SCR) -I$(GEM-SCR) -I./src +LIB=-lc -lm -L$(GEM-LIB) +CC_FLAGS = -c -Wall -g -g -O2 -mmmx -fno-builtin -O3 -funroll-loops -ffast-math +LD_FLAGS = --export-dynamic -shared -o + + +TARGET=pix_recNN.pd_linux +OBJ=RecurrentNeuron.o Neuron.o pix_recNN.o +#-------------------------------------------------------- + +all: pd_linux + +pd_linux: $(TARGET) + +$(TARGET): $(OBJ) + $(LD) $(LD_FLAGS) $(TARGET) $(OBJ) $(LIB) + strip --strip-unneeded $(TARGET) + chmod 755 $(TARGET) + +pix_recNN.o: RecurrentNeuron.o pix_recNN.h pix_recNN.cpp NNet.h NNException.h + $(CC) $(CC_FLAGS) $(INCLUDE) pix_recNN.cpp + + +RecurrentNeuron.o: RecurrentNeuron.cpp RecurrentNeuron.h Neuron.o NNActivation.h + +Neuron.o: Neuron.cpp Neuron.h NNActivation.h + +#-------------------------------------------------------- + +clean: + rm -f $(OBJ) $(TARGET) + + +install: + cp -f $(TARGET) $(PD-PATH)/externs + cp -f *.pd $(PD-PATH)/doc/5.reference diff --git a/pix_recNN/NNActivation.h b/pix_recNN/NNActivation.h new file mode 100755 index 0000000..e91c046 --- /dev/null +++ b/pix_recNN/NNActivation.h @@ -0,0 +1,78 @@ +///////////////////////////////////////////////////////////////////////////// +// +// NNActivation.h +// +// all the activation functions of the neurons +// +// header file +// +// Copyright (c) 2005 Georg Holzmann +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +///////////////////////////////////////////////////////////////////////////// + + +#ifndef _INCLUDE_ACTIVATION_NET__ +#define _INCLUDE_ACTIVATION_NET__ + + +#include + +namespace TheBrain +{ + +//------------------------------------------------------ +/* implementation of the different activation functions + * and it's derivations + */ + +/* Linear activation function. + * span: -inf < y < inf + * y = x +*/ +#define LINEAR 0 + +/* Sigmoid activation function. + * span: 0 < y < 1 + * y = 1/(1 + exp(-x)), y' = y*(1 - y) + */ +#define SIGMOID 1 + +/* Symmetric sigmoid activation function, aka. tanh. + * span: -1 < y < 1 + * y = tanh(x) = 2/(1 + exp(-2*x)) - 1, d = 1-(y*y) +*/ +#define TANH 2 + +// linear function +float act_linear(float value) +{ return value; } + +// derivation of the linear function +float act_linear_derive(float value) +{ return 1; } + +// sigmoid function +float act_sigmoid(float value) +{ return (1.0f/(1.0f + exp(-value))); } + +// derivation of the sigmoid function +float act_sigmoid_derive(float value) +{ return (value * (1.0f - value)); } + +// tanh function +float act_tanh(float value) +{ return (2.0f/(1.0f + exp(-2.0f * value)) - 1.0f); } + +// derivation of the tanh function +float act_tanh_derive(float value) +{ return (1.0f - (value*value)); } + + +} // end of namespace + +#endif // _INCLUDE_ACTIVATION_NET__ diff --git a/pix_recNN/NNException.h b/pix_recNN/NNException.h new file mode 100755 index 0000000..bcb7be5 --- /dev/null +++ b/pix_recNN/NNException.h @@ -0,0 +1,49 @@ +///////////////////////////////////////////////////////////////////////////// +// +// NNDefines.h +// +// global stuff for all the nets +// +// header file +// +// Copyright (c) 2005 Georg Holzmann +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +///////////////////////////////////////////////////////////////////////////// + + +#ifndef _INCLUDE_NNDEFINES_NET__ +#define _INCLUDE_NNDEFINES_NET__ + +#include + +using std::string; + +namespace TheBrain +{ + +//------------------------------------------------------ +/* the exception class for all the neural network stuff + */ +class NNExcept +{ + protected: + string message_; + + public: + NNExcept(string message="") + { message_ = message; } + virtual ~NNExcept() { } + + virtual string what() + { return message_; } +}; + +} // end of namespace NNet + +#endif //_INCLUDE_NNDEFINES_NET__ + diff --git a/pix_recNN/NNet.h b/pix_recNN/NNet.h new file mode 100755 index 0000000..349688f --- /dev/null +++ b/pix_recNN/NNet.h @@ -0,0 +1,636 @@ +///////////////////////////////////////////////////////////////////////////// +// +// class NNet +// +// this is a template for all the nets +// (see NeuralNet documentations for more information) +// +// header file +// +// Copyright (c) 2005 Georg Holzmann +// +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// as published by the Free Software Foundation; either version 2 +// of the License, or (at your option) any later version. +// +///////////////////////////////////////////////////////////////////////////// + + +#ifndef _INCLUDE_NEURAL_TEMPLATE_NET__ +#define _INCLUDE_NEURAL_TEMPLATE_NET__ + +#include "NNActivation.h" +#include "NNException.h" + +namespace TheBrain +{ + +template +class NNet +{ + protected: + + /* the number of output values + * this is automatically also the + * number of output neurons ! + */ + int output_val_; + + /* the number of hidden neurons + * per one output neuron + * (this net has one hidden layer, + * so this is the number of hidden + * neurons is hidden_val_*output_val_) + */ + int hidden_val_; + + /* nr of input values per one output neuron + * (so the number of input values are + * input_val_*output_val_) + */ + int input_val_; + + /* the memory of the output layer + * if you use a recurrent neuron, this + * determines how much output values the + * recurrent neurons can remeber + * these values are fed back as new input + */ + int memory_out_; + + /* the memory of the hidden layer + * if you use a recurrent neuron, this + * determines how much output values the + * recurrent neurons can remeber + * these values are fed back as new input + */ + int memory_hidden_; + + /* these are the output neurons + */ + OutNeuronType *out_neurons_; + + /* these are the hidden neurons + */ + HiddNeuronType *hidden_neurons_; + + /* function pointer to the activation + * function of the output neurons + */ + float (*output_act_f)(float value); + + /* function pointer to the activation + * function of the hidden neurons + */ + float (*hidden_act_f)(float value); + + /* function pointer to the derivation of the + * activation function of the hidden neurons + */ + float (*hidden_act_f_d)(float value); + + + public: + + /* Constructor + */ + NNet(int input_val=1, int hidden_val=1, int output_val=1, int memory_out=0, + int memory_hidden=1, int HIDDEN_ACT_FUNC=0, int OUT_ACT_FUNC=0); + + /* Destructor + */ + virtual ~NNet(); + + + //----------------------------------------------------- + + /* Set/Get learning rate + */ + virtual void setLearningRate(float learn_rate); + virtual float getLearningRate() const; + + /* Set/Get range + * (see Neuron.h) + */ + virtual void setRange(float range); + virtual float getRange() const; + + /* some more get/set methods + */ + virtual void setOutputVal(int output_val) + throw(); + virtual int getOutputVal() const; + + virtual void setHiddenVal(int hidden_val) + throw(); + virtual int getHiddenVal() const; + + virtual void setInputVal(int input_val) + throw(); + virtual int getInputVal() const; + + virtual void setMemoryOut(int memory) + throw(); + virtual int getMemoryOut() const; + + virtual void setMemoryHidden(int memory) + throw(); + virtual int getMemoryHidden() const; + + + //----------------------------------------------------- + + /* creates the network + */ + virtual void create() + throw(NNExcept); + + /* inits the weight matrix and the bias vector of + * the network with random values between [min|max] + */ + virtual void initRand(const int &min, const int &max) + throw(NNExcept); + + /* calculates the output with the current Net and writes + * it in the array output_data + * ATTENTION: array input_data must be a matrix in the form: + * float[output_val_][input_val_] + * array output_data must be in size output_val_ + * (there is no checking !!!) + */ + virtual void calculate(float **input_data, float *output_data); + + /* this method trains the network: + * input_data is, as above, the input data, output_data is the + * output of the current net with input_data, target_output is + * the desired output data + * (this is the a truncated backpropagation through time + * algorithm to train the network) + * ATTENTION: array input_data must be a matrix in the form: + * float[output_val_][input_val_] + * array output_data must be in size output_val_ + * array target_output must be in size output_val_ + * (there is no checking !!!) + */ + virtual void trainBTT(float **input_data, float *output_data, + float *target_output); + + + //----------------------------------------------------- + + /* saves the contents of the current net to file + */ + virtual void save(string filename) + throw(NNExcept); + + /* loads the parameters of the net from file + */ + virtual void load(string filename) + throw(NNExcept); + + + //----------------------------------------------------- + private: + + /* output of the hidden layer with activation function + */ + float *hidden_a_; + + /* output of the hidden layer without activation function + */ + float *hidden_s_; + + /* error signal of the neurons in the hidden layer + */ + float *hidden_error_; + + /* out signal without activation function + */ + float out_s_; + + /* error signal of the output layer + */ + float out_error_; + + /* Copy Construction is not allowed + */ + NNet(const NNet &src) + { } + + /* assignement operator is not allowed + */ + const NNet& operator= + (const NNet& src) + { return *this; } +}; + + +//-------------------------------------------------- +/* Constructor + */ +template +NNet + ::NNet(int input_val, int hidden_val, int output_val, int memory_out, + int memory_hidden, int HIDDEN_ACT_FUNC, int OUT_ACT_FUNC) + : out_neurons_(NULL), hidden_neurons_(NULL), hidden_a_(NULL), + hidden_s_(NULL), hidden_error_(NULL) +{ + output_val_ = (output_val<1) ? 1 : output_val; + hidden_val_ = (hidden_val<0) ? 0 : hidden_val; + input_val_ = (input_val<1) ? 1 : input_val; + memory_out_ = (memory_out<0) ? 0 : memory_out; + memory_hidden_ = (memory_hidden<0) ? 0 : memory_hidden; + + // choose hidden activation function: + switch(HIDDEN_ACT_FUNC) + { + case SIGMOID: + hidden_act_f = act_sigmoid; + hidden_act_f_d = act_sigmoid_derive; + break; + case TANH: + hidden_act_f = act_tanh; + hidden_act_f_d = act_tanh_derive; + break; + default: + case LINEAR: + hidden_act_f = act_linear; + hidden_act_f_d = act_linear_derive; + break; + } + + // choose out function: + switch(OUT_ACT_FUNC) + { + case SIGMOID: + output_act_f = act_sigmoid; + break; + case TANH: + output_act_f = act_tanh; + break; + default: + case LINEAR: + output_act_f = act_linear; + break; + } +} + +//-------------------------------------------------- +/* Destructor + */ +template +NNet::~NNet() +{ + if(hidden_neurons_) + delete[] hidden_neurons_; + + if(out_neurons_) + delete[] out_neurons_; + + if(hidden_a_) + delete[] hidden_a_; + + if(hidden_s_) + delete[] hidden_s_; + + if(hidden_error_) + delete[] hidden_error_; +} + +//-------------------------------------------------- +/* creates the network + */ +template +void NNet::create() + throw(NNExcept) +{ + // delete if they exist + if(out_neurons_) + delete[] out_neurons_; + if(hidden_neurons_) + delete[] hidden_neurons_; + if(hidden_a_) + delete[] hidden_a_; + if(hidden_s_) + delete[] hidden_s_; + if(hidden_error_) + delete[] hidden_error_; + + + out_neurons_ = new OutNeuronType[output_val_](input_val_,memory_out_); + hidden_neurons_ = new HiddNeuronType[hidden_val_*output_val_](input_val_,memory_hidden_); + + if(!out_neurons_ || !hidden_neurons_) + throw NNExcept("No memory for Neurons!"); + + // create the temporary storage + hidden_a_ = new float[hidden_val_]; + hidden_s_ = new float[hidden_val_]; + hidden_error_ = new float[hidden_val_]; + + if(!hidden_a_ || !hidden_s_ || !hidden_error_) + throw NNExcept("No memory for Neurons!"); + + + // create all the neurons + for(int i=0; i +void NNet::initRand(const int &min, const int &max) + throw(NNExcept) +{ + if(!out_neurons_) + throw NNExcept("You must first create the Net!"); + + // init all the neurons + for(int i=0; i +void NNet::calculate(float **input_data, float *output_data) +{ + for(int i=0; i