1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
|
#N canvas 117 93 838 448 12;
#N canvas 181 295 623 324 creation 0;
#X obj 52 235 outlet;
#X msg 49 10 create;
#X msg 72 68 create 2 1;
#X msg 81 97 create 3 1;
#X msg 93 128 create 3 2;
#X msg 59 38 create 3 2 3 3 1 0.7;
#X text 121 7 create with default values;
#X text 236 38 specifying all;
#X text 166 68 2 inputs 1 output;
#X text 176 99 3 inputs 1 output;
#X text 189 128 3 inputs 2 output;
#X text 159 222 TIP:don't set the layers param too high;
#X text 158 179 params: num_input \, num_output \, num_layers \, num_neurons_hidden
\, connection_rate \, learning_rate;
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X connect 4 0 0 0;
#X connect 5 0 0 0;
#X restore 93 68 pd creation examples;
#N canvas 136 60 724 352 run 0;
#X obj 90 219 outlet;
#X msg 123 69 0 1;
#X msg 124 92 1 0;
#X msg 125 115 1 1;
#X msg 126 140 0 0;
#X text 40 17 now you can run your nn passing it a list with inputs
;
#X text 169 70 send a list of data and watch the console for output
;
#X text 39 35 the output is sent as a list of float;
#X text 184 134 these inputs are good for a nn like the one in example1
directory;
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X connect 4 0 0 0;
#X restore 107 180 pd run the net;
#N canvas 0 0 619 610 other 0;
#X obj 43 401 outlet;
#X msg 102 37 train;
#X msg 103 63 run;
#X msg 152 37 setmode 0;
#X msg 153 63 setmode 1;
#X text 249 40 set training/running mode;
#X text 247 63 training mode currently not implemented;
#N canvas 265 255 690 335 training 0;
#X obj 71 288 outlet;
#X msg 82 195 FANN_TRAIN_INCREMENTAL;
#X msg 82 216 FANN_TRAIN_BATCH;
#X msg 81 238 FANN_TRAIN_RPROP;
#X msg 81 258 FANN_TRAIN_QUICKPROP;
#X text 40 28 you can set the training algorithm simply sending a message
with the name of the algorithm chosen. possible values are: FANN_TRAIN_INCREMENTAL
FANN_TRAIN_BATCH FANN_TRAIN_RPROP FANN_TRAIN_QUICKPROP the default
is: FANN_TRAIN_RPROP see the FANN manual for details on each algorithm:
http://fann.sourceforge.net/html/r1996.html;
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X connect 4 0 0 0;
#X restore 150 153 pd training algorithm;
#X text 360 175 some advanced param;
#N canvas 371 92 698 395 training 0;
#X obj 52 230 outlet;
#X msg 69 118 desired_error 0.01;
#X msg 79 146 max_iterations 500000;
#X msg 90 178 iterations_between_reports 1000;
#X text 58 28 you can change training parameters. see FANN manual for
details (http://fann.sourceforge.net);
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X restore 151 179 pd training params;
#N canvas 371 92 694 391 activation 0;
#X obj 49 335 outlet;
#X text 40 28 you can set ti output activation algorithm passing a
message to nn. see the FANN manual for description of the algorithms
;
#X msg 69 118 set_activation_function_output FANN_THRESHOLD;
#X msg 83 139 set_activation_function_output FANN_THRESHOLD_SYMMETRIC
;
#X msg 95 163 set_activation_function_output FANN_LINEAR;
#X msg 98 184 set_activation_function_output FANN_SIGMOID;
#X msg 106 206 set_activation_function_output FANN_SIGMOID_STEPWISE
;
#X msg 108 233 set_activation_function_output FANN_SIGMOID_SYMMETRIC
;
#X msg 115 256 set_activation_function_output FANN_SIGMOID_SYMMETRIC_STEPWISE
;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X connect 4 0 0 0;
#X connect 5 0 0 0;
#X connect 6 0 0 0;
#X connect 7 0 0 0;
#X connect 8 0 0 0;
#X restore 150 203 pd activation algorithm;
#X msg 151 287 details;
#X text 229 285 details on the current nn;
#X msg 145 333 help;
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X connect 4 0 0 0;
#X connect 7 0 0 0;
#X connect 9 0 0 0;
#X connect 10 0 0 0;
#X connect 11 0 0 0;
#X connect 13 0 0 0;
#X restore 128 258 pd other commands;
#N canvas 0 0 653 513 save 0;
#X obj 39 264 outlet;
#X msg 64 20 filename test.net;
#X msg 66 46 save;
#X msg 82 103 load;
#X text 221 19 set the filename;
#X text 214 42 save the net to the file;
#X text 138 104 you can reload it too;
#X text 144 182 nn can be loaded from a file at creation time simply
passing the filename as argument;
#X msg 68 71 save test.net;
#X msg 93 130 load test.net;
#X text 144 217 like [ann_mlp test.net];
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X connect 3 0 0 0;
#X connect 8 0 0 0;
#X connect 9 0 0 0;
#X restore 118 218 pd save the net;
#X text 270 66 create a nn;
#X text 244 179 run your net;
#X text 258 215 save your net;
#N canvas 0 0 712 542 tips 0;
#X text 51 84 for better performances inputs value should be normalized
\, all input should have the same range (if one input has a larger
range it will be more "important"). the range of each input should
be 0 centered. so [-1 \, 1] is good [-2 \, 2] is good \, [0 \, 1] not
so good [1 \, 2] is bad. the range sould not be too small ([-0.1 \,
0.1] is bad).;
#X text 41 19 TIPS;
#X text 41 56 inputs;
#X text 39 211 outputs;
#X text 50 235 each class of outputs should have its own output value:
don't use the same output for 2 meanings \, use 2 outputs intead \,
1 for each.;
#X restore 156 303 pd tips;
#X text 270 353 an interface to fann classes (http://fann.sourceforge.net)
;
#X text 272 371 by Davide Morelli - info@davidemorelli.it;
#N canvas 468 204 572 403 train 0;
#X obj 32 241 outlet;
#N canvas 0 0 458 308 train 0;
#N canvas 8 48 990 509 build 0;
#X obj 65 417 textfile;
#X msg 190 337 clear;
#N canvas 0 0 462 312 alternate 0;
#X obj 103 117 + 1;
#X obj 70 119 f 0;
#X obj 70 171 sel 0 1;
#X obj 70 146 mod 2;
#X msg 95 90 0;
#X obj 68 31 inlet;
#X obj 140 40 inlet;
#X obj 140 63 bang;
#X obj 68 55 bang;
#X obj 65 205 outlet;
#X obj 125 206 outlet;
#X text 59 6 bang;
#X text 139 18 reset to 0 without bang;
#X connect 0 0 1 1;
#X connect 1 0 0 0;
#X connect 1 0 3 0;
#X connect 2 0 9 0;
#X connect 2 1 10 0;
#X connect 3 0 2 0;
#X connect 4 0 1 1;
#X connect 5 0 8 0;
#X connect 6 0 7 0;
#X connect 7 0 4 0;
#X connect 8 0 1 0;
#X restore 58 227 pd alternate;
#X obj 24 81 bng 15 250 50 0 empty empty write-once 0 -6 0 8 -262144
-1 -1;
#X obj 341 183 bng 15 250 50 0 empty empty reset 0 -6 0 8 -262144 -1
-1;
#N canvas 0 0 466 316 inputs 0;
#X obj 61 153 pack s f f;
#X obj 63 200 pack f f;
#X obj 61 176 unpack s f f;
#X msg 66 223 add \$1 \$2;
#X obj 66 257 outlet;
#X text 120 258 to textfile;
#X obj 24 42 inlet;
#X text 23 22 bang;
#X text 66 77 here go the inputs;
#X obj 94 52 r input1;
#X obj 163 52 r input2;
#X connect 0 0 2 0;
#X connect 1 0 3 0;
#X connect 2 1 1 0;
#X connect 2 2 1 1;
#X connect 3 0 4 0;
#X connect 6 0 0 0;
#X connect 9 0 0 1;
#X connect 10 0 0 2;
#X restore 58 306 pd inputs;
#N canvas 0 0 466 316 outputs 0;
#X obj 61 153 pack s f f;
#X obj 63 200 pack f f;
#X obj 61 176 unpack s f f;
#X msg 66 223 add \$1 \$2;
#X obj 66 257 outlet;
#X text 120 258 to textfile;
#X obj 24 42 inlet;
#X text 23 22 bang;
#X text 66 77 here go the outputs;
#X obj 91 51 r output1;
#X obj 166 51 r output2;
#X connect 0 0 2 0;
#X connect 1 0 3 0;
#X connect 2 1 1 0;
#X connect 2 2 1 1;
#X connect 3 0 4 0;
#X connect 6 0 0 0;
#X connect 9 0 0 1;
#X connect 10 0 0 2;
#X restore 149 284 pd outputs;
#X obj 230 223 f 0;
#X obj 260 223 + 1;
#X obj 239 257 nbx 5 14 -1e+037 1e+037 0 0 empty empty how_many_patterns
0 -6 0 10 -262144 -1 -1 0 256;
#X text 156 406 todo: write header (a line at the beginning of file
with 3 int: how many tests \, num_input \, num_output);
#X obj 122 190 delay 50;
#X obj 115 159 metro 100;
#X floatatom 259 72 5 100 5000 2 msec_between_snapshots - -;
#X obj 127 80 tgl 15 0 empty empty toggle_on-off 0 -6 0 8 -262144 -1
-1 0 1;
#X obj 219 189 / 2;
#X obj 260 16 loadbang;
#X msg 260 36 100;
#X msg 326 342 write test.txt cr;
#X text 293 224 comment;
#N canvas 262 68 647 603 README 0;
#X text 67 432 please help me getting this patch more usable: - how
to add a line at the very beginning of a text file after i have filled
it? - how to manage inputs and outputs of different sized without forcing
the user to edit the patch?;
#X text 9 63 how to use: 1) modify [pd inputs] and [ps outputs] inserting
[r] objects to receive input data \, and modify [pack]s to handle the
right number of inputs 2) do the same with [pd outputs] 3) click on
reset 4) toggle ON and start collecting data 5) when you are ready
toggle OFF 6) edit [write filename cr( with the actual filename you
want for your training data (always keep the cr after the filename)
7) open the file with training data 8) add a line at the beginning
and write 3 integers: the 1st is the number of training patterns written
(see "how many patterns" number box) \, the 2nd is how many inputs
your ann has \, the 3th is how many outputs e.g. i collected 100 training
snapshots \, for a ann with 10 ins and 2 outs I write: 100 10 2 at
the very beginning of the file now the training file is ready and can
be read from nn via train-on-file command;
#X text 9 7 this tricky sub-patch is usefull to write a file to train
ann and is intended to be used with the nn external;
#X restore 25 16 pd README;
#X text 479 210 by davide morelli info@davidemorelli.it;
#X text 106 14 <--readme!;
#X text 242 283 <--edit here!;
#X text 142 308 <--edit here!;
#X text 429 86 usage: read [pd README] \, edit [pd inputs] and [pd
outputs] \, toggle on and record inputs and outputs \, toggle off when
ready \, write to a file \, edit the file adding a line at the beginning
(see REAMDE);
#X connect 1 0 0 0;
#X connect 2 0 5 0;
#X connect 2 1 6 0;
#X connect 2 1 7 0;
#X connect 3 0 11 0;
#X connect 3 0 2 0;
#X connect 4 0 2 1;
#X connect 4 0 1 0;
#X connect 5 0 0 0;
#X connect 6 0 0 0;
#X connect 7 0 8 0;
#X connect 7 0 9 0;
#X connect 8 0 7 1;
#X connect 11 0 2 0;
#X connect 12 0 11 0;
#X connect 12 0 2 0;
#X connect 13 0 12 1;
#X connect 13 0 15 0;
#X connect 14 0 12 0;
#X connect 15 0 11 1;
#X connect 16 0 17 0;
#X connect 17 0 13 0;
#X connect 18 0 0 0;
#X restore 86 42 pd build training file;
#X msg 88 74 train-on-file test.txt;
#X text 285 45 build a training file;
#X text 287 74 train the nn with the training file;
#X obj 56 139 outlet;
#X connect 1 0 4 0;
#X restore 79 103 pd train you net using a train file;
#N canvas 100 44 884 550 train 0;
#X obj 57 397 outlet;
#X msg 60 31 train;
#X text 126 33 1- set the train mode;
#X text 116 81 2- build a list with inputs and desired output;
#X text 139 101 be shure you provide the correct numbers of inputs
and outputs;
#X obj 168 202 pack s f f f;
#X obj 197 248 pack f f f;
#X obj 168 225 unpack s f f f;
#X msg 192 374 run;
#X obj 198 170 tgl 15 0 empty empty in1 0 -6 0 8 -262144 -1 -1 0 1
;
#X obj 228 170 tgl 15 0 empty empty in2 0 -6 0 8 -262144 -1 -1 0 1
;
#X obj 259 170 tgl 15 0 empty empty output 0 -6 0 8 -262144 -1 -1 0
1;
#X obj 148 169 bng 15 250 50 0 empty empty train! 0 -6 0 8 -262144
-1 -1;
#X text 299 183 set inputs and output value \, then send the list clicking
on the "train!" bang;
#X msg 316 261 create 2 1;
#X text 229 374 3- when you are ready switch again to run mode before
exiting;
#X text 315 226 NOTE1: before training with this example you should
have created a nn with 2 ins and 1 out with a command like:;
#X text 311 291 NOTE2: while training the right outlet gives you the
mean square error after each training pattern.;
#X connect 1 0 0 0;
#X connect 5 0 7 0;
#X connect 6 0 0 0;
#X connect 7 1 6 0;
#X connect 7 2 6 1;
#X connect 7 3 6 2;
#X connect 8 0 0 0;
#X connect 9 0 5 1;
#X connect 10 0 5 2;
#X connect 11 0 5 3;
#X connect 12 0 5 0;
#X connect 14 0 0 0;
#X restore 68 50 pd train it on the fly;
#X text 62 5 there are 2 ways to train your net;
#X text 253 47 on the fly is simpler;
#X text 88 128 with a trainfile the net could be more accurate;
#X connect 1 0 0 0;
#X connect 2 0 0 0;
#X restore 115 119 pd train;
#X text 190 118 train a nn;
#X obj 103 345 print mse;
#X obj 52 373 print out;
#X obj 52 313 ann_mlp;
#X text 9 2 ann_mlp: multi layer perceptrons neural networks in PD
;
#X connect 0 0 14 0;
#X connect 1 0 14 0;
#X connect 2 0 14 0;
#X connect 3 0 14 0;
#X connect 10 0 14 0;
#X connect 14 0 13 0;
#X connect 14 1 12 0;
|