aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavide Morelli <morellid@users.sourceforge.net>2005-05-20 20:53:00 +0000
committerDavide Morelli <morellid@users.sourceforge.net>2005-05-20 20:53:00 +0000
commit690e7788439c0c549f394783b924f07be12be87e (patch)
tree5d53e269dbaf2e9e37e08f73ccf6c7a606be5606
parentc89e29e887882518062265c5f376ea047b10f756 (diff)
mlp and td ready to be compiled as a single file (library) ann.c needs to be changed: we should add #include ann_*.c
svn path=/trunk/externals/ann/; revision=3045
-rwxr-xr-xsrc/ann_mlp.c151
-rwxr-xr-xsrc/ann_td.c146
-rwxr-xr-xsrc/makefile.msvc21
3 files changed, 165 insertions, 153 deletions
diff --git a/src/ann_mlp.c b/src/ann_mlp.c
index 6db3238..64ce75f 100755
--- a/src/ann_mlp.c
+++ b/src/ann_mlp.c
@@ -10,8 +10,10 @@
#include "m_pd.h"
#include "fann.h"
+#ifndef VERSION
+#define VERSION "0.2"
+#endif
-#define VERSION "0.03"
#ifndef __DATE__
#define __DATE__ ""
#endif
@@ -36,7 +38,7 @@ typedef struct _ann_mlp {
t_outlet *l_out, *f_out;
} t_ann_mlp;
-static void help(t_ann_mlp *x)
+static void ann_mlp_help(t_ann_mlp *x)
{
post("");
post("ann_mlp: neural nets for PD");
@@ -48,7 +50,7 @@ static void help(t_ann_mlp *x)
}
-static void createFann(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_createFann(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
unsigned int num_input = 2;
unsigned int num_output = 1;
@@ -77,13 +79,13 @@ static void createFann(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
if (num_input>MAXINPUT)
{
- error("too many inputs, maximum allowed is %d",MAXINPUT);
+ error("too many inputs, maximum allowed is MAXINPUT");
return;
}
if (num_output>MAXOUTPUT)
{
- error("too many outputs, maximum allowed is %d", MAXOUTPUT);
+ error("too many outputs, maximum allowed is MAXOUTPUT");
return;
}
@@ -108,7 +110,7 @@ static void createFann(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
}
}
-static void print_status(t_ann_mlp *x)
+static void ann_mlp_print_status(t_ann_mlp *x)
{
if (x->mode == TRAIN)
post("nn:training");
@@ -116,7 +118,7 @@ static void print_status(t_ann_mlp *x)
post("nn:running");
}
-static void train(t_ann_mlp *x)
+static void ann_mlp_train(t_ann_mlp *x)
{
x->mode=TRAIN;
if (x->ann == 0)
@@ -125,16 +127,16 @@ static void train(t_ann_mlp *x)
return;
}
fann_reset_MSE(x->ann);
- print_status(x);
+ ann_mlp_print_status(x);
}
-static void run(t_ann_mlp *x)
+static void ann_mlp_run(t_ann_mlp *x)
{
x->mode=RUN;
- print_status(x);
+ ann_mlp_print_status(x);
}
-static void set_mode(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_set_mode(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc<1)
{
@@ -143,13 +145,13 @@ static void set_mode(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
else
{
x->mode = atom_getint(argv++);
- print_status(x);
+ ann_mlp_print_status(x);
}
}
-static void train_on_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_train_on_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
if (x->ann == 0)
{
@@ -171,10 +173,10 @@ static void train_on_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
fann_train_on_file(x->ann, x->filenametrain->s_name, x->max_iterations,
x->iterations_between_reports, x->desired_error);
- post("nn: finished training on file %s", x->filenametrain->s_name);
+ post("ann_mlp: finished training on file %s", x->filenametrain->s_name);
}
-static void set_desired_error(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_set_desired_error(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
float desired_error = (float)0.001;
if (0<argc)
@@ -188,7 +190,7 @@ static void set_desired_error(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv
}
}
-static void set_max_iterations(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_set_max_iterations(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
unsigned int max_iterations = 500000;
if (argc>0)
@@ -202,7 +204,7 @@ static void set_max_iterations(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *arg
}
}
-static void set_iterations_between_reports(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_set_iterations_between_reports(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
unsigned int iterations_between_reports = 1000;
@@ -221,7 +223,7 @@ static void set_iterations_between_reports(t_ann_mlp *x, t_symbol *sl, int argc,
// run the ann using floats in list passed to the inlet as input values
// and send result to outlet as list of float
-static void run_the_net(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_run_the_net(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
int i=0;
fann_type input[MAXINPUT];
@@ -275,7 +277,7 @@ static void run_the_net(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
}
-static void train_on_the_fly(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_train_on_the_fly(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
int i=0;
fann_type input[MAXINPUT];
@@ -334,17 +336,17 @@ static void train_on_the_fly(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
}
-static void manage_list(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_manage_list(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
if (x->mode)
- run_the_net(x, sl, argc, argv);
+ ann_mlp_run_the_net(x, sl, argc, argv);
else
{
- train_on_the_fly(x, sl, argc, argv);
+ ann_mlp_train_on_the_fly(x, sl, argc, argv);
}
}
-static void set_filename(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_set_filename(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc>0) {
x->filename = atom_gensym(argv);
@@ -355,7 +357,7 @@ static void set_filename(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
post("nn:filename set to %s", x->filename->s_name);
}
-static void load_ann_from_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_load_ann_from_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc>0) {
x->filename = atom_gensym(argv);
@@ -367,7 +369,7 @@ static void load_ann_from_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *arg
post("nn:ann loaded fom file %s", x->filename->s_name);
}
-static void save_ann_to_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_save_ann_to_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc>0) {
x->filename = atom_gensym(argv);
@@ -383,7 +385,7 @@ static void save_ann_to_file(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
}
// functions for training algo:
-static void set_FANN_TRAIN_INCREMENTAL(t_ann_mlp *x)
+static void ann_mlp_set_FANN_TRAIN_INCREMENTAL(t_ann_mlp *x)
{
if (x->ann == 0)
{
@@ -394,7 +396,7 @@ static void set_FANN_TRAIN_INCREMENTAL(t_ann_mlp *x)
post("nn:training algorithm set to FANN_TRAIN_INCREMENTAL");
}
}
-static void set_FANN_TRAIN_BATCH(t_ann_mlp *x)
+static void ann_mlp_set_FANN_TRAIN_BATCH(t_ann_mlp *x)
{
if (x->ann == 0)
{
@@ -405,7 +407,7 @@ static void set_FANN_TRAIN_BATCH(t_ann_mlp *x)
post("nn:training algorithm set to FANN_TRAIN_BATCH");
}
}
-static void set_FANN_TRAIN_RPROP(t_ann_mlp *x)
+static void ann_mlp_set_FANN_TRAIN_RPROP(t_ann_mlp *x)
{
if (x->ann == 0)
{
@@ -416,7 +418,7 @@ static void set_FANN_TRAIN_RPROP(t_ann_mlp *x)
post("nn:training algorithm set to FANN_TRAIN_RPROP");
}
}
-static void set_FANN_TRAIN_QUICKPROP(t_ann_mlp *x)
+static void ann_mlp_set_FANN_TRAIN_QUICKPROP(t_ann_mlp *x)
{
if (x->ann == 0)
{
@@ -428,7 +430,7 @@ static void set_FANN_TRAIN_QUICKPROP(t_ann_mlp *x)
}
}
-static void set_activation_function_output(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_mlp_set_activation_function_output(t_ann_mlp *x, t_symbol *sl, int argc, t_atom *argv)
{
t_symbol *parametro = 0;
int funzione = 0;
@@ -464,33 +466,33 @@ static void set_activation_function_output(t_ann_mlp *x, t_symbol *sl, int argc,
}
-static void print_ann_details(t_ann_mlp *x)
+static void ann_mlp_print_ann_details(t_ann_mlp *x)
{
if (x->ann == 0)
{
- post("nn:ann is not initialized");
+ post("ann_mlp:ann is not initialized");
} else
{
- post("nn:follows a description of the current ann:");
- post("nn:num_input=%i", x->ann->num_input);
- post("nn:num_output=%i", x->ann->num_output);
- post("nn:learning_rate=%f", x->ann->learning_rate);
- post("nn:connection_rate=%f", x->ann->connection_rate);
- post("nn:total_neurons=%i", x->ann->total_neurons);
- post("nn:total_connections=%i", x->ann->total_connections);
- post("nn:last error=%i", x->ann->errstr);
+ post("follows a description of the current ann:");
+ post("num_input=%i", x->ann->num_input);
+ post("num_output=%i", x->ann->num_output);
+ post("learning_rate=%f", x->ann->learning_rate);
+ post("connection_rate=%f", x->ann->connection_rate);
+ post("total_neurons=%i", x->ann->total_neurons);
+ post("total_connections=%i", x->ann->total_connections);
+ post("last error=%i", x->ann->errstr);
if (x->filename == 0)
{
- post("nn:filename not set");
+ post("ann_mlp:filename not set");
} else
{
- post("nn:filename=%s", x->filename->s_name);
+ post("filename=%s", x->filename->s_name);
}
}
}
-static void *nn_new(t_symbol *s, int argc, t_atom *argv)
+static void *ann_mlp_new(t_symbol *s, int argc, t_atom *argv)
{
t_ann_mlp *x = (t_ann_mlp *)pd_new(ann_mlp_class);
x->l_out = outlet_new(&x->x_obj, &s_list);
@@ -503,13 +505,21 @@ static void *nn_new(t_symbol *s, int argc, t_atom *argv)
if (argc>0) {
x->filename = atom_gensym(argv);
- load_ann_from_file(x, NULL , 0, NULL);
+ ann_mlp_load_ann_from_file(x, NULL , 0, NULL);
}
+
+ post("");
+ post("ann_mlp: neural nets for PD");
+ post("version: "VERSION"");
+ post("compiled: "__DATE__);
+ post("author: Davide Morelli");
+ post("contact: info@davidemorelli.it www.davidemorelli.it");
+
return (void *)x;
}
// free resources
-static void nn_free(t_ann_mlp *x)
+static void ann_mlp_free(t_ann_mlp *x)
{
struct fann *ann = x->ann;
fann_destroy(ann);
@@ -518,45 +528,38 @@ static void nn_free(t_ann_mlp *x)
void ann_mlp_setup(void) {
- post("");
- post("ann_mlp: neural nets for PD");
- post("version: "VERSION"");
- post("compiled: "__DATE__);
- post("author: Davide Morelli");
- post("contact: info@davidemorelli.it www.davidemorelli.it");
-
ann_mlp_class = class_new(gensym("ann_mlp"),
- (t_newmethod)nn_new,
- (t_method)nn_free, sizeof(t_ann_mlp),
+ (t_newmethod)ann_mlp_new,
+ (t_method)ann_mlp_free, sizeof(t_ann_mlp),
CLASS_DEFAULT, A_GIMME, 0);
// general..
- class_addmethod(ann_mlp_class, (t_method)help, gensym("help"), 0);
- class_addmethod(ann_mlp_class, (t_method)createFann, gensym("create"), A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)train, gensym("train"), 0);
- class_addmethod(ann_mlp_class, (t_method)run, gensym("run"), 0);
- class_addmethod(ann_mlp_class, (t_method)set_mode, gensym("setmode"), A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)train_on_file, gensym("train-on-file"), A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)manage_list, gensym("data"), A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)set_filename, gensym("filename"), A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)load_ann_from_file, gensym("load"),A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)save_ann_to_file, gensym("save"),A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)print_ann_details, gensym("details"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_help, gensym("help"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_createFann, gensym("create"), A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_train, gensym("train"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_run, gensym("run"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_mode, gensym("setmode"), A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_train_on_file, gensym("train-on-file"), A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_manage_list, gensym("data"), A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_filename, gensym("filename"), A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_load_ann_from_file, gensym("load"),A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_save_ann_to_file, gensym("save"),A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_print_ann_details, gensym("details"), 0);
// change training parameters
- class_addmethod(ann_mlp_class, (t_method)set_desired_error, gensym("desired_error"),A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)set_max_iterations, gensym("max_iterations"),A_GIMME, 0);
- class_addmethod(ann_mlp_class, (t_method)set_iterations_between_reports, gensym("iterations_between_reports"),A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_desired_error, gensym("desired_error"),A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_max_iterations, gensym("max_iterations"),A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_iterations_between_reports, gensym("iterations_between_reports"),A_GIMME, 0);
// change training and activation algorithms
- class_addmethod(ann_mlp_class, (t_method)set_FANN_TRAIN_INCREMENTAL, gensym("FANN_TRAIN_INCREMENTAL"), 0);
- class_addmethod(ann_mlp_class, (t_method)set_FANN_TRAIN_BATCH, gensym("FANN_TRAIN_BATCH"), 0);
- class_addmethod(ann_mlp_class, (t_method)set_FANN_TRAIN_RPROP, gensym("FANN_TRAIN_RPROP"), 0);
- class_addmethod(ann_mlp_class, (t_method)set_FANN_TRAIN_QUICKPROP, gensym("FANN_TRAIN_QUICKPROP"), 0);
- class_addmethod(ann_mlp_class, (t_method)set_activation_function_output, gensym("set_activation_function_output"),A_GIMME, 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_FANN_TRAIN_INCREMENTAL, gensym("FANN_TRAIN_INCREMENTAL"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_FANN_TRAIN_BATCH, gensym("FANN_TRAIN_BATCH"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_FANN_TRAIN_RPROP, gensym("FANN_TRAIN_RPROP"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_FANN_TRAIN_QUICKPROP, gensym("FANN_TRAIN_QUICKPROP"), 0);
+ class_addmethod(ann_mlp_class, (t_method)ann_mlp_set_activation_function_output, gensym("set_activation_function_output"),A_GIMME, 0);
// the most important one: running the ann
- class_addlist(ann_mlp_class, (t_method)manage_list);
+ class_addlist(ann_mlp_class, (t_method)ann_mlp_manage_list);
// help patch
class_sethelpsymbol(ann_mlp_class, gensym("help-ann_mlp"));
diff --git a/src/ann_td.c b/src/ann_td.c
index d7dcf3e..8712e42 100755
--- a/src/ann_td.c
+++ b/src/ann_td.c
@@ -10,8 +10,10 @@
#include "m_pd.h"
#include "fann.h"
+#ifndef VERSION
+#define VERSION "0.2"
+#endif
-#define VERSION "0.01"
#ifndef __DATE__
#define __DATE__ ""
#endif
@@ -40,7 +42,7 @@ typedef struct _ann_td {
t_outlet *l_out, *f_out;
} t_ann_td;
-static void help(t_ann_td *x)
+static void ann_td_help(t_ann_td *x)
{
post("");
post("ann_td:time delay neural networks for PD");
@@ -52,7 +54,7 @@ static void help(t_ann_td *x)
}
-static void deallocate_inputs(t_ann_td *x)
+static void ann_td_deallocate_inputs(t_ann_td *x)
{
if (x->inputs != 0)
{
@@ -61,16 +63,16 @@ static void deallocate_inputs(t_ann_td *x)
}
}
-static void allocate_inputs(t_ann_td *x)
+static void ann_td_allocate_inputs(t_ann_td *x)
{
unsigned int i;
- deallocate_inputs(x);
+ ann_td_deallocate_inputs(x);
// allocate space for inputs array
x->inputs = (t_float *)getbytes((x->frames) * (x->num_input) * sizeof(t_float));
for (i=0; i<(x->frames * x->num_input); i++) x->inputs[i]=0.f;
}
-static void createFann(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_createFann(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
unsigned int num_input = 2;
unsigned int num_output = 1;
@@ -126,7 +128,7 @@ static void createFann(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
fann_set_activation_function_hidden(x->ann, FANN_SIGMOID_SYMMETRIC);
fann_set_activation_function_output(x->ann, FANN_SIGMOID_SYMMETRIC);
- allocate_inputs(x);
+ ann_td_allocate_inputs(x);
if (x->ann == 0)
{
@@ -144,7 +146,7 @@ static void createFann(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
}
}
-static void print_status(t_ann_td *x)
+static void ann_td_print_status(t_ann_td *x)
{
if (x->mode == TRAIN)
post("ann_td:training");
@@ -152,7 +154,7 @@ static void print_status(t_ann_td *x)
post("ann_td:running");
}
-static void train(t_ann_td *x)
+static void ann_td_train(t_ann_td *x)
{
x->mode=TRAIN;
if (x->ann == 0)
@@ -161,16 +163,16 @@ static void train(t_ann_td *x)
return;
}
fann_reset_MSE(x->ann);
- print_status(x);
+ ann_td_print_status(x);
}
-static void run(t_ann_td *x)
+static void ann_td_run(t_ann_td *x)
{
x->mode=RUN;
- print_status(x);
+ ann_td_print_status(x);
}
-static void set_mode(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_set_mode(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc<1)
{
@@ -179,13 +181,13 @@ static void set_mode(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
else
{
x->mode = atom_getint(argv++);
- print_status(x);
+ ann_td_print_status(x);
}
}
-static void train_on_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_train_on_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
if (x->ann == 0)
{
@@ -210,7 +212,7 @@ static void train_on_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
post("ann_td: finished training on file %s", x->filenametrain->s_name);
}
-static void set_desired_error(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_set_desired_error(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
float desired_error = (float)0.001;
if (0<argc)
@@ -224,7 +226,7 @@ static void set_desired_error(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
}
}
-static void set_max_iterations(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_set_max_iterations(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
unsigned int max_iterations = 500000;
if (argc>0)
@@ -238,7 +240,7 @@ static void set_max_iterations(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv
}
}
-static void set_iterations_between_reports(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_set_iterations_between_reports(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
unsigned int iterations_between_reports = 1000;
@@ -255,7 +257,7 @@ static void set_iterations_between_reports(t_ann_td *x, t_symbol *sl, int argc,
}
-static void scale_inputs(t_ann_td *x)
+static void ann_td_scale_inputs(t_ann_td *x)
{
unsigned int j;
unsigned int k;
@@ -273,7 +275,7 @@ static void scale_inputs(t_ann_td *x)
// run the ann using floats in list passed to the inlet as input values
// and send result to outlet as list of float
-static void run_the_net(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_run_the_net(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
int i=0;
unsigned j=0;
@@ -302,7 +304,7 @@ static void run_the_net(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
}
quanti = x->ann->num_output;
- scale_inputs(x);
+ ann_td_scale_inputs(x);
// fill output array with zeros
for (i=0; i<MAXOUTPUT; i++)
@@ -337,7 +339,7 @@ static void run_the_net(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
}
-static void train_on_the_fly(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_train_on_the_fly(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
int i=0;
unsigned int j=0;
@@ -370,7 +372,7 @@ static void train_on_the_fly(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
output[i]=0;
}
- scale_inputs(x);
+ ann_td_scale_inputs(x);
// fill input array with actual data sent to inlet
for (j = 0; j < x->num_input; j++)
@@ -398,17 +400,17 @@ static void train_on_the_fly(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
}
-static void manage_list(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_manage_list(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
if (x->mode)
- run_the_net(x, sl, argc, argv);
+ ann_td_run_the_net(x, sl, argc, argv);
else
{
- train_on_the_fly(x, sl, argc, argv);
+ ann_td_train_on_the_fly(x, sl, argc, argv);
}
}
-static void set_filename(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_set_filename(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc>0) {
x->filename = atom_gensym(argv);
@@ -419,7 +421,7 @@ static void set_filename(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
post("nn:filename set to %s", x->filename->s_name);
}
-static void load_ann_from_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_load_ann_from_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
if (x->ins_frames_set==0)
{
@@ -436,10 +438,10 @@ static void load_ann_from_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv
else
post("nn:ann loaded fom file %s", x->filename->s_name);
- allocate_inputs(x);
+ ann_td_allocate_inputs(x);
}
-static void save_ann_to_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_save_ann_to_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
if (argc>0) {
x->filename = atom_gensym(argv);
@@ -455,7 +457,7 @@ static void save_ann_to_file(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
}
// functions for training algo:
-static void set_FANN_TRAIN_INCREMENTAL(t_ann_td *x)
+static void ann_td_set_FANN_TRAIN_INCREMENTAL(t_ann_td *x)
{
if (x->ann == 0)
{
@@ -466,7 +468,7 @@ static void set_FANN_TRAIN_INCREMENTAL(t_ann_td *x)
post("nn:training algorithm set to FANN_TRAIN_INCREMENTAL");
}
}
-static void set_FANN_TRAIN_BATCH(t_ann_td *x)
+static void ann_td_set_FANN_TRAIN_BATCH(t_ann_td *x)
{
if (x->ann == 0)
{
@@ -477,7 +479,7 @@ static void set_FANN_TRAIN_BATCH(t_ann_td *x)
post("nn:training algorithm set to FANN_TRAIN_BATCH");
}
}
-static void set_FANN_TRAIN_RPROP(t_ann_td *x)
+static void ann_td_set_FANN_TRAIN_RPROP(t_ann_td *x)
{
if (x->ann == 0)
{
@@ -488,7 +490,7 @@ static void set_FANN_TRAIN_RPROP(t_ann_td *x)
post("nn:training algorithm set to FANN_TRAIN_RPROP");
}
}
-static void set_FANN_TRAIN_QUICKPROP(t_ann_td *x)
+static void ann_td_set_FANN_TRAIN_QUICKPROP(t_ann_td *x)
{
if (x->ann == 0)
{
@@ -500,7 +502,7 @@ static void set_FANN_TRAIN_QUICKPROP(t_ann_td *x)
}
}
-static void set_activation_function_output(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
+static void ann_td_set_activation_function_output(t_ann_td *x, t_symbol *sl, int argc, t_atom *argv)
{
t_symbol *parametro = 0;
int funzione = 0;
@@ -536,7 +538,7 @@ static void set_activation_function_output(t_ann_td *x, t_symbol *sl, int argc,
}
-static void print_ann_details(t_ann_td *x)
+static void ann_td_print_ann_details(t_ann_td *x)
{
if (x->ann == 0)
{
@@ -561,14 +563,14 @@ static void print_ann_details(t_ann_td *x)
}
}
-static void set_num_input_frames(t_ann_td *x, t_floatarg ins, t_floatarg frames)
+static void ann_td_set_num_input_frames(t_ann_td *x, t_floatarg ins, t_floatarg frames)
{
x->num_input = ins;
x->frames = frames;
x->ins_frames_set=1;
}
-static void *nn_new(t_symbol *s, int argc, t_atom *argv)
+static void *ann_td_new(t_symbol *s, int argc, t_atom *argv)
{
t_ann_td *x = (t_ann_td *)pd_new(ann_td_class);
x->l_out = outlet_new(&x->x_obj, &s_list);
@@ -593,69 +595,69 @@ static void *nn_new(t_symbol *s, int argc, t_atom *argv)
if (argc>1) {
x->frames = atom_getint(argv++);
x->ins_frames_set=1;
- allocate_inputs(x);
+ ann_td_allocate_inputs(x);
}
if (argc>2) {
x->filename = atom_gensym(argv);
- load_ann_from_file(x, NULL , 0, NULL);
+ ann_td_load_ann_from_file(x, NULL , 0, NULL);
}
+ post("");
+ post("ann_td: time delay neural nets for PD");
+ post("version: "VERSION"");
+ post("compiled: "__DATE__);
+ post("author: Davide Morelli");
+ post("contact: info@davidemorelli.it www.davidemorelli.it");
+
return (void *)x;
}
// free resources
-static void nn_free(t_ann_td *x)
+static void ann_td_free(t_ann_td *x)
{
struct fann *ann = x->ann;
fann_destroy(ann);
- deallocate_inputs(x);
+ ann_td_deallocate_inputs(x);
// TODO: free other resources!
}
void ann_td_setup(void) {
- post("");
- post("ann_td: time delay neural nets for PD");
- post("version: "VERSION"");
- post("compiled: "__DATE__);
- post("author: Davide Morelli");
- post("contact: info@davidemorelli.it www.davidemorelli.it");
-
ann_td_class = class_new(gensym("ann_td"),
- (t_newmethod)nn_new,
- (t_method)nn_free, sizeof(t_ann_td),
+ (t_newmethod)ann_td_new,
+ (t_method)ann_td_free, sizeof(t_ann_td),
CLASS_DEFAULT, A_GIMME, 0);
// general..
- class_addmethod(ann_td_class, (t_method)help, gensym("help"), 0);
- class_addmethod(ann_td_class, (t_method)createFann, gensym("create"), A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)train, gensym("train"), 0);
- class_addmethod(ann_td_class, (t_method)run, gensym("run"), 0);
- class_addmethod(ann_td_class, (t_method)set_mode, gensym("setmode"), A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)train_on_file, gensym("train-on-file"), A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)manage_list, gensym("data"), A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)set_filename, gensym("filename"), A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)load_ann_from_file, gensym("load"),A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)save_ann_to_file, gensym("save"),A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)print_ann_details, gensym("details"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_help, gensym("help"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_createFann, gensym("create"), A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_train, gensym("train"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_run, gensym("run"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_mode, gensym("setmode"), A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_train_on_file, gensym("train-on-file"), A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_manage_list, gensym("data"), A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_filename, gensym("filename"), A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_load_ann_from_file, gensym("load"),A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_save_ann_to_file, gensym("save"),A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_print_ann_details, gensym("details"), 0);
// change training parameters
- class_addmethod(ann_td_class, (t_method)set_desired_error, gensym("desired_error"),A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)set_max_iterations, gensym("max_iterations"),A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)set_iterations_between_reports, gensym("iterations_between_reports"),A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_desired_error, gensym("desired_error"),A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_max_iterations, gensym("max_iterations"),A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_iterations_between_reports, gensym("iterations_between_reports"),A_GIMME, 0);
// change training and activation algorithms
- class_addmethod(ann_td_class, (t_method)set_FANN_TRAIN_INCREMENTAL, gensym("FANN_TRAIN_INCREMENTAL"), 0);
- class_addmethod(ann_td_class, (t_method)set_FANN_TRAIN_BATCH, gensym("FANN_TRAIN_BATCH"), 0);
- class_addmethod(ann_td_class, (t_method)set_FANN_TRAIN_RPROP, gensym("FANN_TRAIN_RPROP"), 0);
- class_addmethod(ann_td_class, (t_method)set_FANN_TRAIN_QUICKPROP, gensym("FANN_TRAIN_QUICKPROP"), 0);
- class_addmethod(ann_td_class, (t_method)set_activation_function_output, gensym("set_activation_function_output"),A_GIMME, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_FANN_TRAIN_INCREMENTAL, gensym("FANN_TRAIN_INCREMENTAL"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_FANN_TRAIN_BATCH, gensym("FANN_TRAIN_BATCH"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_FANN_TRAIN_RPROP, gensym("FANN_TRAIN_RPROP"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_FANN_TRAIN_QUICKPROP, gensym("FANN_TRAIN_QUICKPROP"), 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_activation_function_output, gensym("set_activation_function_output"),A_GIMME, 0);
- class_addmethod(ann_td_class, (t_method)set_num_input_frames, gensym("inputs_frames"),A_DEFFLOAT, A_DEFFLOAT, 0);
+ class_addmethod(ann_td_class, (t_method)ann_td_set_num_input_frames, gensym("inputs_frames"),A_DEFFLOAT, A_DEFFLOAT, 0);
// the most important one: running the ann
- class_addlist(ann_td_class, (t_method)manage_list);
+ class_addlist(ann_td_class, (t_method)ann_td_manage_list);
// help patch
class_sethelpsymbol(ann_td_class, gensym("help-ann_td"));
diff --git a/src/makefile.msvc b/src/makefile.msvc
index ebe696f..130d726 100755
--- a/src/makefile.msvc
+++ b/src/makefile.msvc
@@ -11,9 +11,11 @@ FANNSRC="H:\PureData\FANN\fann-1.2.0\fann-1.2.0\src\include"
FANNLIB="H:\PureData\FANN\fann-1.2.0\fann-1.2.0\MSVC++\Release"
-current: clean pd_nt
+current: clean pd_nt distclean
+
+pd_nt: ann_som.dll ann_mlp.dll ann_td.dll ann.dll
+#pd_nt: ann.dll
-pd_nt: ann_som.dll ann_mlp.dll ann_td.dll
.SUFFIXES: .dll
@@ -27,14 +29,15 @@ PDNTLIB = $(PDNTLDIR)\libc.lib \
$(PDNTLDIR)\oldnames.lib \
$(PDNTLDIR)\kernel32.lib \
$(PDPATH)\bin\pd.lib \
- $(FANNLIB)\libfann.lib
+ $(FANNLIB)\libfann.lib
+# ann_mlp.lib ann_som.lib ann_td.lib
.c.dll:
cl $(PDNTCFLAGS) $(PDNTINCLUDE) /c $*.c
- link /dll /export:$*_setup $*.obj $(PDNTLIB)
- -del *.obj
- -del *.lib
- -del *.exp
+ link /dll /export:$*_setup $*.obj $(PDNTLIB) *.lib
+# -del *.obj
+# -del *.lib
+# -del *.exp
#install:
# copy help-*.pd $(PDPATH)/doc/5.reference/
@@ -46,3 +49,7 @@ clean:
-del *.exp
-del *.dll
+distclean:
+ -del *.obj
+ -del *.lib
+ -del *.exp