CMS 3D CMS Logo

mlp_gen.cc
Go to the documentation of this file.
1 #include <cmath>
2 #include <cstring>
3 #include <cstdio>
4 #include <cstdlib>
5 
6 #include "mlp_gen.h"
7 #include "mlp_sigmoide.h"
8 
9 #ifdef __cplusplus
10 extern "C" {
11 #endif
12 
13 #define NPMAX 100000
14 #define NLMAX 1000
15 
17 struct learn_ learn_ MLP_HIDDEN;
18 struct pat_ pat_ MLP_HIDDEN;
20 struct stat_ stat_ MLP_HIDDEN;
21 
22 int MessLang = 0;
23 int OutputWeights = 100;
25 int WeightsMemory = 0;
26 int PatMemory[2] = {0,0};
27 int BFGSMemory = 0;
29 int LearnMemory = 0;
30 int NetMemory = 0;
31 float MLPfitVersion = (float) 1.40;
34 
35 dbl ***dir;
42 
43 /* The following lines are needed to use the dgels routine from the LAPACK
44  library in Reslin() */
45 
46 /* Subroutine */ int dgels_(char *trans, int *m, int *n, int *
47  nrhs, double *a, int *lda, double *b, int *ldb,
48  double *work, int *lwork, int *info);
49 
50 /***********************************************************/
51 /* MLP_Out */
52 /* */
53 /* computes the output of the Neural Network */
54 /* inputs: double *rrin = inputs of the MLP */
55 /* outputs: double *rrout = outputs of the MLP */
56 /* */
57 /* Author: J.Schwindling 29-Mar-99 */
58 /* Modified: J.Schwindling 16-Jul-99 unrolled loops */
59 /* Modified: J.Schwindling 20-Jul-99 fast sigmoid */
60 /***********************************************************/
61 
62 /* extern "C"Dllexport */void MLP_Out(type_pat *rrin, dbl *rrout)
63 {
64  int i, il, in, j, m, mp1;
65  dbl **deriv1;
66 
67 /* input layer */
68 
69  deriv1 = NET.Deriv1;
70  m = NET.Nneur[0]%4;
71  if(m==0) goto L10;
72  for(j=0;j<m;j++) NET.Outn[0][j] = rrin[j];
73 L10:
74  mp1 = m+1;
75  for(i=mp1; i<=NET.Nneur[0]; i+=4)
76  {
77  NET.Outn[0][i-1] = rrin[i-1];
78  NET.Outn[0][i] = rrin[i];
79  NET.Outn[0][i+1] = rrin[i+1];
80  NET.Outn[0][i+2] = rrin[i+2];
81  }
82 
83 /* hidden and output layers */
84 
85  MLP_MatrixVectorBias(NET.vWeights[1],NET.Outn[0],
86  NET.Outn[1],NET.Nneur[1],NET.Nneur[0]);
87 
88  for(il=2; il<NET.Nlayer; il++)
89  {
90  MLP_vSigmoideDeriv(NET.Outn[il-1],
91  deriv1[il-1],NET.Nneur[il-1]);
92  MLP_MatrixVectorBias(NET.vWeights[il],NET.Outn[il-1],
93  NET.Outn[il],NET.Nneur[il],
94  NET.Nneur[il-1]);
95  }
96  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
97  {
98  deriv1[NET.Nlayer-1][in] = 1;
99  }
100 }
101 
102 
103 /***********************************************************/
104 /* MLP_Out_T */
105 /* */
106 /* computes the output of the Neural Network when called */
107 /* from MLP_Test */
108 /* inputs: double *rrin = inputs of the MLP */
109 /* */
110 /* Author: J.Schwindling 23-Jul-1999 */
111 /***********************************************************/
112 
113 /* extern "C"Dllexport */void MLP_Out_T(type_pat *rrin)
114 {
115  int i, il, in, j, ilm1, m, mp1;
116  dbl a;
117 
118 /* input layer */
119 
120  m = NET.Nneur[0]%4;
121  if(m==0) goto L10;
122  for(j=0;j<m;j++) NET.Outn[0][j] = rrin[j];
123 L10:
124  mp1 = m+1;
125  for(i=mp1; i<=NET.Nneur[0]; i+=4)
126  {
127  NET.Outn[0][i-1] = rrin[i-1];
128  NET.Outn[0][i] = rrin[i];
129  NET.Outn[0][i+1] = rrin[i+1];
130  NET.Outn[0][i+2] = rrin[i+2];
131  }
132 
133 /* hidden and output layers */
134 
135 /* for(in=0;in<NET.Nneur[0]; in++) printf("%e %e\n",
136  NET.Outn[0][in],NET.Weights[1][0][in]);
137  printf("\n"); */
138  for(il=1; il<NET.Nlayer; il++)
139  {
140  ilm1 = il-1;
141  m = NET.Nneur[ilm1]%4;
142  for(in=0; in<NET.Nneur[il]; in++)
143  {
144  a = NET.Weights[il][in][0];
145  if(m==0) goto L20;
146  for(j=1;j<=m;j++) a +=
147  NET.Weights[il][in][j]*NET.Outn[ilm1][j-1];
148 L20:
149  mp1 = m+1;
150  for(j=mp1; j<=NET.Nneur[ilm1]; j+=4)
151  {
152  a +=
153  NET.Weights[il][in][j+3]*NET.Outn[ilm1][j+2]+
154  NET.Weights[il][in][j+2]*NET.Outn[ilm1][j+1]+
155  NET.Weights[il][in][j+1]*NET.Outn[ilm1][j]+
156  NET.Weights[il][in][j]*NET.Outn[ilm1][j-1];
157  }
158  switch(NET.T_func[il][in])
159  {
160  case 2: NET.Outn[il][in] = MLP_Sigmoide(a);
161  break;
162  case 1: NET.Outn[il][in] = a;
163  break;
164  case 0: NET.Outn[il][in] = 0;
165  break;
166  }
167  }
168  }
169 }
170 
171 
172 /***********************************************************/
173 /* MLP_Out2 */
174 /* */
175 /* computes the output of the Neural Network */
176 /* inputs: double *rrin = inputs of the MLP */
177 /* outputs: double *rrout = outputs of the MLP */
178 /* */
179 /* Author: J.Schwindling 29-Mar-99 */
180 /* Modified: J.Schwindling 16-Jul-99 unrolled loops */
181 /* Modified: J.Schwindling 20-Jul-99 fast sigmoid */
182 /***********************************************************/
183 
184 /* extern "C"Dllexport */void MLP_Out2(type_pat *rrin)
185 {
186  int il, in, m, mp1;
187  int i;
188  dbl **rrout, **deriv1;
189  dbl *prrout;
190  type_pat *prrin;
191  int nhid = NET.Nneur[1];
192  int nin = NET.Nneur[0];
193 
194  rrout = NET.Outn;
195  deriv1 = NET.Deriv1;
196 
197  m = NET.Nneur[0]%4;
198  if(m==0) goto L10;
199  if(m==1)
200  {
201  rrout[0][0] = rrin[1];
202  goto L10;
203  }
204  else if(m==2)
205  {
206  rrout[0][0] = rrin[1];
207  rrout[0][1] = rrin[2];
208  goto L10;
209  }
210  else if(m==3)
211  {
212  rrout[0][0] = rrin[1];
213  rrout[0][1] = rrin[2];
214  rrout[0][2] = rrin[3];
215  goto L10;
216  }
217 L10:
218  mp1 = m+1;
219  prrout = &(rrout[0][mp1]);
220  prrin = &(rrin[mp1+1]);
221  for(i=mp1; i<=NET.Nneur[0]; i+=4, prrout+=4, prrin+=4)
222  {
223  *(prrout-1) = *(prrin-1);
224  *prrout = *prrin;
225  *(prrout+1)= *(prrin+1);
226  *(prrout+2) = *(prrin+2);
227  }
228 
229 /* input layer */
230 
231  MLP_MatrixVectorBias(NET.vWeights[1],NET.Outn[0],
232  NET.Outn[1],nhid,nin);
233 
234 
235 /* hidden and output layers */
236 
237  for(il=2; il<NET.Nlayer; il++)
238  {
239  MLP_vSigmoideDeriv(NET.Outn[il-1],deriv1[il-1],NET.Nneur[il-1]);
240  MLP_MatrixVectorBias(NET.vWeights[il],NET.Outn[il-1],
241  NET.Outn[il],NET.Nneur[il],NET.Nneur[il-1]);
242  }
243  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
244  deriv1[NET.Nlayer-1][in] = 1;
245 }
246 
247 
248 /***********************************************************/
249 /* MLP_Test_MM */
250 /* */
251 /* computes the MLP error function using matrix-matrix */
252 /* multiplications */
253 /* inputs: int ifile = file number: 0=learn, 1=test */
254 /* dbl *tmp = a pointer to an array of size */
255 /* 2 x number of neurons in first */
256 /* hidden layer */
257 /* */
258 /* return value (dbl) = error value */
259 /* */
260 /* Author: J.Schwindling 25-Jan-2000 */
261 /***********************************************************/
262 
264 {
265  int ipat;
266  int npat = PAT.Npat[ifile];
267  int nhid = NET.Nneur[1];
268  int nin = NET.Nneur[0];
269  int jpat, j, il, ilm1, m, in, mp1;
270  dbl err, a, rrans;
271  dbl *pweights, *ptmp;
272 
273  err = 0;
274  for(ipat=0; ipat<npat-1; ipat+=2)
275  {
276  MLP_MM2rows(tmp, &(PAT.vRin[ifile][ipat*(nin+1)]),
277  NET.vWeights[1], 2, nhid, nin+1,
278  nin+1, nin+1);
279 
280  switch(NET.T_func[1][0])
281  {
282  case 2:
283  ptmp = &(tmp[0]);
284  MLP_vSigmoide(ptmp,2*nhid);
285  break;
286 
287  case 1:
288  break;
289 
290  case 0:
291  for(jpat=0; jpat<2; jpat++)
292  {
293  for(j=0; j<nhid; j++)
294  {
295  tmp[j+jpat*nhid] = 0;
296  }
297  }
298  break;
299  }
300 
301  for(jpat=0; jpat<2; jpat++)
302  {
303  for(in=0; in<nhid; in++)
304  {
305  NET.Outn[1][in] = tmp[jpat*nhid+in];
306  }
307  for(il=2; il<NET.Nlayer; il++)
308  {
309  ilm1 = il-1;
310  m = NET.Nneur[ilm1]%4;
311  for(in=0; in<NET.Nneur[il]; in++)
312  {
313  pweights = &(NET.Weights[il][in][0]);
314  a = *pweights;
315  pweights++;
316  if(m==0) goto L20;
317  for(j=1;j<=m;j++,pweights++) a +=
318  (*pweights)*NET.Outn[ilm1][j-1];
319 L20:
320  mp1 = m+1;
321  for(j=mp1; j<=NET.Nneur[ilm1];
322  j+=4, pweights+=4)
323  {
324  a +=
325  *(pweights+3)*NET.Outn[ilm1][j+2]+
326  *(pweights+2)*NET.Outn[ilm1][j+1]+
327  *(pweights+1)*NET.Outn[ilm1][j]+
328  *(pweights )*NET.Outn[ilm1][j-1];
329  }
330  switch(NET.T_func[il][in])
331  {
332  case 2: NET.Outn[il][in] = MLP_Sigmoide(a);
333  break;
334  case 1: NET.Outn[il][in] = a;
335  break;
336  case 0: NET.Outn[il][in] = 0;
337  break;
338  }
339  }
340  if(il == NET.Nlayer-1)
341  {
342  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
343  {
344  rrans = (dbl) PAT.Rans[ifile][ipat+jpat][in];
345  err += (rrans-NET.Outn[NET.Nlayer-1][in])*
346  (rrans-NET.Outn[NET.Nlayer-1][in])*
347  PAT.Pond[ifile][ipat+jpat];
348  }
349  }
350  }
351  }
352  }
353 
354 /* cas npat impair */
355  for(/*'ipat' set above*/; ipat<npat; ipat++)
356  {
357  MLP_MatrixVector(NET.vWeights[1],
358  &(PAT.vRin[ifile][ipat*(nin+1)]),tmp,
359  nhid,nin+1);
360 
361  switch(NET.T_func[1][0])
362  {
363  case 2:
364  ptmp = &(tmp[0]);
365  MLP_vSigmoide(ptmp,2*nhid);
366  break;
367 
368  case 1:
369  break;
370 
371  case 0:
372  for(j=0; j<nhid; j++)
373  {
374  tmp[j] = 0;
375  }
376  break;
377  }
378 
379  for(in=0; in<nhid; in++)
380  {
381  NET.Outn[1][in] = tmp[in];
382  }
383  for(il=2; il<NET.Nlayer; il++)
384  {
385  ilm1 = il-1;
386  m = NET.Nneur[ilm1]%4;
387  for(in=0; in<NET.Nneur[il]; in++)
388  {
389  pweights = &(NET.Weights[il][in][0]);
390  a = *pweights;
391  pweights++;
392  if(m==0) goto L25;
393  for(j=1;j<=m;j++,pweights++) a +=
394  (*pweights)*NET.Outn[ilm1][j-1];
395 L25:
396  mp1 = m+1;
397  for(j=mp1; j<=NET.Nneur[ilm1];
398  j+=4, pweights+=4)
399  {
400  a +=
401  *(pweights+3)*NET.Outn[ilm1][j+2]+
402  *(pweights+2)*NET.Outn[ilm1][j+1]+
403  *(pweights+1)*NET.Outn[ilm1][j]+
404  *(pweights )*NET.Outn[ilm1][j-1];
405  }
406  switch(NET.T_func[il][in])
407  {
408  case 2: NET.Outn[il][in] = MLP_Sigmoide(a);
409  break;
410  case 1: NET.Outn[il][in] = a;
411  break;
412  case 0: NET.Outn[il][in] = 0;
413  break;
414  }
415  }
416  if(il == NET.Nlayer-1)
417  {
418  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
419  {
420  rrans = (dbl) PAT.Rans[ifile][ipat][in];
421  err += (rrans-NET.Outn[NET.Nlayer-1][in])*
422  (rrans-NET.Outn[NET.Nlayer-1][in])*
423  PAT.Pond[ifile][ipat];
424  }
425  }
426  }
427  }
428  return(err);
429 }
430 
431 
432 /***********************************************************/
433 /* MLP_Test */
434 /* */
435 /* computes the MLP error function */
436 /* inputs: int ifile = file number: 0=learn, 1=test */
437 /* int regul = 0: no regularisation term */
438 /* 1: regularisation term */
439 /* (for hybrid learning method) */
440 /* */
441 /* return value (dbl) = error value */
442 /* */
443 /* Author: J.Schwindling 31-Mar-99 */
444 /***********************************************************/
445 
446 /* extern "C"Dllexport */dbl MLP_Test(int ifile,int regul)
447 {
448  dbl err, rrans;
449  int in,jn,ipat,ipati;
450 
451  dbl *tmp;
452 
453  tmp = (dbl *) malloc(2 * NET.Nneur[1] * sizeof(dbl));
454  if(tmp == nullptr) /* not enough memory */
455  {
456  printf("not enough memory in MLP_Test\n");
457  err = 0;
458  for(ipat=0; ipat<PAT.Npat[ifile]; ipat++)
459  {
460  if(ifile==0)
461  {
462  ipati = ExamplesIndex[ipat];
463  }
464  else
465  {
466  ipati = ipat;
467  }
468  MLP_Out_T(PAT.Rin[ifile][ipati]);
469  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
470  {
471  rrans = (dbl) PAT.Rans[ifile][ipati][in];
472  err += (rrans-NET.Outn[NET.Nlayer-1][in])*
473  (rrans-NET.Outn[NET.Nlayer-1][in])*
474  PAT.Pond[ifile][ipati];
475  }
476  }
477 
478  if(regul>=1)
479  {
480  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
481  for(jn=0; jn<=NET.Nneur[NET.Nlayer-2]; jn++)
482  {
483  err += LEARN.Alambda*NET.Weights[NET.Nlayer-1][in][jn]*
484  NET.Weights[NET.Nlayer-1][in][jn];
485  }
486  }
487  free(tmp);
488  return(err);
489  }
490  else /* computation using matrix - matrix multiply */
491  {
492  err = MLP_Test_MM(ifile, tmp);
493  if(regul>=1)
494  {
495  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
496  for(jn=0; jn<=NET.Nneur[NET.Nlayer-2]; jn++)
497  {
498  err += LEARN.Alambda*NET.Weights[NET.Nlayer-1][in][jn]*
499  NET.Weights[NET.Nlayer-1][in][jn];
500  }
501  }
502  free(tmp);
503  return(err);
504  }
505 }
506 
507 
508 /***********************************************************/
509 /* MLP_Stochastic */
510 /* */
511 /* one epoch of MLP stochastic training */
512 /* (optimized for speed) */
513 /* */
514 /* Author: J.Schwindling 08-Jul-99 */
515 /* Modified: J.Schwindling 20-Jul-99 remove unused cases */
516 /***********************************************************/
517 
518 /* extern "C"Dllexport */dbl MLP_Stochastic()
519 {
520  int ipat, ii, inm1;
521  dbl err = 0;
522  int il, in1, in, itest2;
523  dbl deriv, deriv1, deriv2, deriv3, deriv4, pond;
524  dbl eta, eps;
525  dbl a, b, dd, a1, a2, a3, a4;
526  dbl *pout, *pdelta, *pw1, *pw2, *pw3, *pw4;
527  dbl ***weights;
528 
529  if(NET.Debug>=5) printf(" Entry MLP_Stochastic\n");
530  weights = NET.Weights;
531 /* shuffle patterns */
532  ShuffleExamples(PAT.Npat[0],ExamplesIndex);
533 
534 /* reduce learning parameter */
535  if(LEARN.Decay<1) EtaDecay();
536 
537  eta = -LEARN.eta;
538  eps = LEARN.epsilon;
539 
540 /* loop on the examples */
541  for(ipat=0;ipat<PAT.Npat[0];ipat++)
542  {
543  ii = ExamplesIndex[ipat];
544  pond = PAT.Pond[0][ii];
545 
546  MLP_Out2(&(PAT.vRin[0][ii*(NET.Nneur[0]+1)]));
547 
548 /* next lines are equivalent to DeDwSum */
549  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
550  {
551  deriv = NET.Deriv1[NET.Nlayer-1][in];
552  a = (dbl) PAT.Rans[0][ii][in];
553  b = NET.Outn[NET.Nlayer-1][in]-a;
554  err += b*b*pond;
555  NET.Delta[NET.Nlayer-1][in] = b*deriv*pond*eta;
556  }
557 
558  for(il=NET.Nlayer-2; il>0; il--)
559  {
560  dd = NET.Delta[il+1][0];
561  for(in=0; in<NET.Nneur[il]-3; in+=4)
562  {
563  deriv1 = NET.Deriv1[il][in];
564  deriv2 = NET.Deriv1[il][in+1];
565  deriv3 = NET.Deriv1[il][in+2];
566  deriv4 = NET.Deriv1[il][in+3];
567  itest2 = (NET.Nneur[il+1]==1);
568  a1 = dd*weights[il+1][0][in+1];
569  a2 = dd*weights[il+1][0][in+2];
570  a3 = dd*weights[il+1][0][in+3];
571  a4 = dd*weights[il+1][0][in+4];
572  if(itest2) goto L1;
573  pdelta = &(NET.Delta[il+1][1]);
574  for(in1=1; in1<NET.Nneur[il+1];
575  in1++, pdelta++)
576  {
577  a1 += *pdelta * weights[il+1][in1][in+1];
578  a2 += *pdelta * weights[il+1][in1][in+2];
579  a3 += *pdelta * weights[il+1][in1][in+3];
580  a4 += *pdelta * weights[il+1][in1][in+4];
581  }
582 L1: NET.Delta[il][in] = a1*deriv1;
583  NET.Delta[il][in+1] = a2*deriv2;
584  NET.Delta[il][in+2] = a3*deriv3;
585  NET.Delta[il][in+3] = a4*deriv4;
586  }
587  for(/*'in' set above*/; in<NET.Nneur[il]; in++)
588  {
589  deriv = NET.Deriv1[il][in];
590  itest2 = (NET.Nneur[il+1]==1);
591  a = dd*weights[il+1][0][in+1];
592  if(itest2) goto L2;
593  pdelta = &(NET.Delta[il+1][1]);
594  for(in1=1; in1<NET.Nneur[il+1];
595  in1++, pdelta++)
596  {
597  a += *pdelta *
598  weights[il+1][in1][in+1];
599  }
600 L2: NET.Delta[il][in] = a*deriv;
601  }
602 
603  } /* end of loop on layers */
604 
605 
606 /* update the weights */
607  if(eps==0)
608  {
609  for(il=1; il<NET.Nlayer; il++)
610  {
611  inm1 = NET.Nneur[il-1];
612  for(in=0; in<NET.Nneur[il]-3; in+=4)
613  {
614  a1 = NET.Delta[il][in];
615  a2 = NET.Delta[il][in+1];
616  a3 = NET.Delta[il][in+2];
617  a4 = NET.Delta[il][in+3];
618  pout = &(NET.Outn[il-1][0]);
619  weights[il][in][0] += a1;
620  weights[il][in+1][0] += a2;
621  weights[il][in+2][0] += a3;
622  weights[il][in+3][0] += a4;
623  weights[il][in][1] += a1* (*pout);
624  weights[il][in+1][1] += a2* (*pout);
625  weights[il][in+2][1] += a3* (*pout);
626  weights[il][in+3][1] += a4* (*pout);
627  pout++;
628  pw1 = &(weights[il][in][2]);
629  pw2 = &(weights[il][in+1][2]);
630  pw3 = &(weights[il][in+2][2]);
631  pw4 = &(weights[il][in+3][2]);
632  for(in1=2; in1<=inm1;
633  ++in1, ++pout, ++pw1, ++pw2,
634  ++pw3, ++pw4)
635  {
636  *pw1 += a1 * *pout;
637  *pw2 += a2 * *pout;
638  *pw3 += a3 * *pout;
639  *pw4 += a4 * *pout;
640  }
641  }
642  for(/*'in' set above*/; in<NET.Nneur[il]; in++)
643  {
644  a1 = NET.Delta[il][in];
645  pout = &(NET.Outn[il-1][0]);
646  weights[il][in][0] += a1;
647  weights[il][in][1] += a1* (*pout);
648  pout++;
649  pw1 = &(weights[il][in][2]);
650  for(in1=2; in1<=inm1;
651  ++in1, ++pout, ++pw1)
652  {
653  *pw1 += a1 * *pout;
654  }
655  }
656  }
657  }
658  else
659  {
660  for(il=1; il<NET.Nlayer; il++)
661  {
662  for(in=0; in<NET.Nneur[il]; in++)
663  {
664 
665  a = NET.Delta[il][in];
666  LEARN.Odw[il][in][0] = a + eps * LEARN.Odw[il][in][0];
667  NET.Weights[il][in][0] += LEARN.Odw[il][in][0];
668 
669  b = a*NET.Outn[il-1][0];
670  LEARN.Odw[il][in][1] = b + eps*LEARN.Odw[il][in][1];
671  NET.Weights[il][in][1] += LEARN.Odw[il][in][1];
672 
673  for(in1=2; in1<=NET.Nneur[il-1]; in1++)
674  {
675  b = a*NET.Outn[il-1][in1-1];
676  LEARN.Odw[il][in][in1] = b + eps*LEARN.Odw[il][in][in1];
677  NET.Weights[il][in][in1] += LEARN.Odw[il][in][in1];
678  }
679  }
680  }
681  }
682 
683  } /* end of loop on examples */
684  return(err);
685 }
686 
687 
688 /***********************************************************/
689 /* MLP_Epoch */
690 /* */
691 /* one epoch of MLP training */
692 /* inputs: int iepoch = epoch number */
693 /* dbl *alpmin = optimal alpha from Line Search*/
694 /* */
695 /* return value (dbl) = error value on learning sample */
696 /* BEFORE changing the weights */
697 /* */
698 /* Author: J.Schwindling 31-Mar-99 */
699 /* Modified: J.Schwindling 09-Apr-99 */
700 /* re-organize routine */
701 /* J.Schwindling 13-Apr-99 */
702 /* remove Quickprop algorithm */
703 /* implement Ribiere-Polak conjugate grad. */
704 /***********************************************************/
705 
706 /* extern "C"Dllexport */dbl MLP_Epoch(int iepoch, dbl *alpmin, int *Ntest)
707 {
708  dbl err, ONorm, beta, prod, ddir;
709 /* int *index;*/
710  int Nweights, Nlinear, ipat, ierr;
711  int nn;
712 
713  err = 0;
714  *alpmin = 0.;
715 
716  Nweights = NET.Nweights;
717  Nlinear = NET.Nneur[NET.Nlayer-2] + 1;
718 
719  if(NET.Debug>=5) printf(" Entry MLP_Epoch\n");
720 /* stochastic minimization */
721  if(LEARN.Meth==1)
722  {
723 
724  err = MLP_Stochastic();
725 
726  }
727  else
728  {
729  if(iepoch==1 && LEARN.Meth==7)
730  {
731  SetLambda(10000);
732  MLP_ResLin();
733  if(NET.Debug>=2) PrintWeights();
734  }
735 
736 /* save previous gradient and reset current one */
737  DeDwSaveZero();
738  if(LEARN.Meth==16)
739  {
740  ShuffleExamples(PAT.Npat[0],ExamplesIndex);
741  nn = PAT.Npat[0];
742  PAT.Npat[0] = nn/10;
743  for(ipat=0;ipat<nn;ipat++)
744  {
745  ierr = MLP_Train(&ExamplesIndex[ipat],&err);
746  if(ierr!=0) printf("Epoch: ierr= %d\n",ierr);
747  }
748  }
749  else
750  {
751  for(ipat=0;ipat<PAT.Npat[0];ipat++)
752  {
753  ierr = MLP_Train(&ipat,&err);
754  if(ierr!=0) printf("Epoch: ierr= %d\n",ierr);
755  }
756  }
757  DeDwScale(PAT.Npat[0]);
758  if(LEARN.Meth==2) StochStep();
759  if(LEARN.Meth==3)
760  {
761  SteepestDir();
762  if(LineSearch(alpmin,Ntest,err)==1) StochStep();
763  }
764 
765 /* Conjugate Gradients Ribiere - Polak */
766  if(LEARN.Meth==4)
767  {
768  if((iepoch-1)%LEARN.Nreset==0)
769  {
770  LEARN.Norm = DeDwNorm(); /* for next epoch */
771  SteepestDir();
772  }
773  else
774  {
775  ONorm = LEARN.Norm;
776  LEARN.Norm = DeDwNorm();
777  prod = DeDwProd();
778  beta = (LEARN.Norm-prod)/ONorm;
779  CGDir(beta);
780  }
781  if(LineSearch(alpmin,Ntest,err)==1) StochStep();
782  }
783 
784 /* Conjugate Gradients Fletcher - Reeves */
785  if(LEARN.Meth==5)
786  {
787  if((iepoch-1)%LEARN.Nreset==0)
788  {
789  LEARN.Norm = DeDwNorm(); /* for next epoch */
790  SteepestDir();
791  }
792  else
793  {
794  ONorm = LEARN.Norm;
795  LEARN.Norm = DeDwNorm();
796  beta = LEARN.Norm/ONorm;
797  CGDir(beta);
798  }
799  if(LineSearch(alpmin,Ntest,err)==1) StochStep();
800  }
801  if(LEARN.Meth==6)
802  {
803  if((iepoch-1)%LEARN.Nreset==0)
804  {
805  SteepestDir();
806  InitBFGSH(Nweights);
807  }
808  else
809  {
810  GetGammaDelta();
811  ierr = GetBFGSH(Nweights);
812  if(ierr)
813  {
814  SteepestDir();
815  InitBFGSH(Nweights);
816  }
817  else
818  {
819  BFGSdir(Nweights);
820  }
821  }
822  ddir = DerivDir();
823  if(ddir>0)
824  {
825  SteepestDir();
826  InitBFGSH(Nweights);
827  ddir = DerivDir();
828  }
829  if(LineSearch(alpmin,Ntest,err)==1)
830  {
831  InitBFGSH(Nweights);
832  SteepestDir();
833  if(LineSearch(alpmin,Ntest,err)==1)
834  {
835  printf("Line search fail \n");
836  }
837  }
838  }
839  if(LEARN.Meth==7)
840  {
841  if((iepoch-1)%LEARN.Nreset==0)
842  {
843  SteepestDir();
844  InitBFGSH(Nweights-Nlinear);
845  }
846  else
847  {
848  if(NET.Debug>=5) printf("Before GetGammaDelta \n");
849  GetGammaDelta();
850  if(NET.Debug>=5) printf("After GetGammaDelta \n");
851  ierr = GetBFGSH(Nweights-Nlinear);
852  if(NET.Debug>=5) printf("After GetBFGSH \n");
853  if(ierr)
854  {
855  SteepestDir();
856  InitBFGSH(Nweights-Nlinear);
857  }
858  else
859  {
860  BFGSdir(Nweights-Nlinear);
861  }
862  if(NET.Debug>=5) printf("After BFGSdir \n");
863  }
864  SetLambda(10000);
865  if(LineSearchHyb(alpmin,Ntest)==1)
866  {
867  InitBFGSH(Nweights-Nlinear);
868  SteepestDir();
869  if(LineSearchHyb(alpmin,Ntest)==1)
870  {
871  printf("Line search fail \n");
872  }
873  }
874  }
875  }
876 
877  if(NET.Debug>=5) printf(" End MLP_Epoch\n");
878  return(err);
879 }
880 
881 
882 /***********************************************************/
883 /* MLP_Train */
884 /* */
885 /* Train Network: compute output, update DeDw */
886 /* inputs: int *ipat = pointer to pattern number */
887 /* input/output: dbl *err = current error */
888 /* */
889 /* return value (int) = error value: 1 wrong pattern number*/
890 /* 2 *ipat < 0 */
891 /* */
892 /* Author: J.Schwindling 09-Apr-99 */
893 /***********************************************************/
894 
895 /* extern "C"Dllexport */int MLP_Train(int *ipat, dbl *err)
896 {
897  int in;
898 
899 /* if(*ipat>=PAT.Npat[0]) return(1);*/
900  if(*ipat<0) return(2);
901 
902 /* MLP_Out(PAT.Rin[0][*ipat],NET.Outn[NET.Nlayer-1]);*/
903  MLP_Out2(&(PAT.vRin[0][*ipat*(NET.Nneur[0]+1)]));
904  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
905  {
906  *err += ((dbl) PAT.Rans[0][*ipat][in]-NET.Outn[NET.Nlayer-1][in])
907  *((dbl) PAT.Rans[0][*ipat][in]-NET.Outn[NET.Nlayer-1][in])*
908  PAT.Pond[0][*ipat];
909  }
910  DeDwSum(PAT.Rans[0][*ipat],NET.Outn[NET.Nlayer-1],*ipat);
911  return(0);
912 }
913 
914 
915 /***********************************************************/
916 /* StochStepHyb */
917 /* */
918 /* Update the weights according to stochastic minimization */
919 /* formula (for hybrid methods) */
920 /* */
921 /* return value (int) = 0 */
922 /* */
923 /* Author: J.Schwindling 09-Apr-99 */
924 /***********************************************************/
925 
926 /* extern "C"Dllexport */int StochStepHyb()
927 {
928  int il, in1, in;
929  dbl eta, eps;
930 
931  eta = LEARN.eta;
932  eps = LEARN.epsilon;
933  for(il=NET.Nlayer-2; il>0; il--) {
934 
935  for(in=0; in<NET.Nneur[il]; in++) {
936 
937  /* compute delta weights */
938  for(in1=0; in1<=NET.Nneur[il-1]; in1++) {
939  LEARN.Odw[il][in][in1] = -eta * LEARN.DeDw[il][in][in1]
940  + eps * LEARN.Odw[il][in][in1];
941  }
942 
943  /* update weights */
944  for(in1=0; in1<=NET.Nneur[il-1]; in1++) {
945  NET.Weights[il][in][in1] += LEARN.Odw[il][in][in1];
946  }
947  }
948  }
949  MLP_ResLin();
950  return(0);
951 }
952 
953 
954 /***********************************************************/
955 /* StochStep */
956 /* */
957 /* Update the weights according to stochastic minimization */
958 /* formula */
959 /* */
960 /* return value (int) = 0 */
961 /* */
962 /* Author: J.Schwindling 09-Apr-99 */
963 /***********************************************************/
964 
965 /* extern "C"Dllexport */int StochStep()
966 {
967  int il, in1, in;
968  dbl eta, eps, epseta;
969 
970  eta = -LEARN.eta;
971  eps = LEARN.epsilon;
972  epseta = eps/eta;
973  for(il=NET.Nlayer-1; il>0; il--) {
974  for(in1=0; in1<=NET.Nneur[il-1]; in1++) {
975 
976  /* compute delta weights */
977  for(in=0; in<NET.Nneur[il]; in++) {
978  LEARN.Odw[il][in][in1] = eta * (LEARN.DeDw[il][in][in1]
979  + epseta * LEARN.Odw[il][in][in1]);
980  NET.Weights[il][in][in1] += LEARN.Odw[il][in][in1];
981  }
982 
983  }
984  }
985 
986  return(0);
987 }
988 
989 
990 /***********************************************************/
991 /* DeDwNorm */
992 /* */
993 /* computes the norm of the gradient */
994 /* */
995 /* Author: J.Schwindling 31-Mar-99 */
996 /***********************************************************/
997 
998 /* extern "C"Dllexport */dbl DeDwNorm()
999 {
1000  int il,in,jn;
1001  dbl dd=0;
1002  for(il=1; il<NET.Nlayer; il++)
1003  for(in=0; in<NET.Nneur[il]; in++)
1004  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1005  dd += LEARN.DeDw[il][in][jn]*
1006  LEARN.DeDw[il][in][jn];
1007  return(dd);
1008 }
1009 
1010 
1011 /***********************************************************/
1012 /* DeDwProd */
1013 /* */
1014 /* scalar product between new gradient and previous one */
1015 /* (used in Polak-Ribiere Conjugate Gradient method */
1016 /* */
1017 /* Author: J.Schwindling 26-Mar-99 */
1018 /***********************************************************/
1019 
1020 /* extern "C"Dllexport */dbl DeDwProd()
1021 {
1022  int il,in,jn;
1023  dbl dd=0;
1024  for(il=1; il<NET.Nlayer; il++)
1025  for(in=0; in<NET.Nneur[il]; in++)
1026  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1027  dd += LEARN.DeDw[il][in][jn]*
1028  LEARN.ODeDw[il][in][jn];
1029  return(dd);
1030 }
1031 
1032 
1033 /***********************************************************/
1034 /* DeDwZero */
1035 /* */
1036 /* resets to 0 the gradient (should be done before DeDwSum)*/
1037 /* */
1038 /* Author: J.Schwindling 31-Mar-99 */
1039 /***********************************************************/
1040 
1041 /* extern "C"Dllexport */void DeDwZero()
1042 {
1043  int il, in, jn;
1044  for(il=1; il<NET.Nlayer; il++)
1045  for(in=0; in<NET.Nneur[il]; in++)
1046  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1047  LEARN.DeDw[il][in][jn] = 0;
1048 }
1049 
1050 
1051 /***********************************************************/
1052 /* DeDwScale */
1053 /* */
1054 /* divides the gradient by the number of examples */
1055 /* inputs: int Nexamples = number of examples */
1056 /* */
1057 /* Author: J.Schwindling 31-Mar-99 */
1058 /***********************************************************/
1059 
1060 /* extern "C"Dllexport */void DeDwScale(int Nexamples)
1061 {
1062  int il, in, jn;
1063  for(il=1; il<NET.Nlayer; il++)
1064  for(in=0; in<NET.Nneur[il]; in++)
1065  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1066  LEARN.DeDw[il][in][jn] /= (dbl) Nexamples;
1067 }
1068 
1069 
1070 /***********************************************************/
1071 /* DeDwSave */
1072 /* */
1073 /* copies the gradient DeDw into ODeDw */
1074 /* */
1075 /* Author: J.Schwindling 31-Mar-99 */
1076 /***********************************************************/
1077 
1078 /* extern "C"Dllexport */void DeDwSave()
1079 {
1080  int il, in, jn;
1081  for(il=1; il<NET.Nlayer; il++)
1082  for(in=0; in<NET.Nneur[il]; in++)
1083  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1084  LEARN.ODeDw[il][in][jn] = LEARN.DeDw[il][in][jn];
1085 }
1086 
1087 
1088 /***********************************************************/
1089 /* DeDwSaveZero */
1090 /* */
1091 /* copies the gradient DeDw into ODeDw */
1092 /* resets DeDw to 0 */
1093 /* */
1094 /* Author: J.Schwindling 23-Apr-99 */
1095 /***********************************************************/
1096 
1097 /* extern "C"Dllexport */void DeDwSaveZero()
1098 {
1099  int il, in, jn;
1100  for(il=1; il<NET.Nlayer; il++)
1101  for(in=0; in<NET.Nneur[il]; in++)
1102  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1103  {
1104  LEARN.ODeDw[il][in][jn] = LEARN.DeDw[il][in][jn];
1105  LEARN.DeDw[il][in][jn] = 0;
1106  }
1107 }
1108 
1109 
1110 /***********************************************************/
1111 /* DeDwSum */
1112 /* */
1113 /* adds to the total gradient the gradient in the current */
1114 /* example */
1115 /* inputs: int Nexamples = number of examples */
1116 /* */
1117 /* Author: J.Schwindling 31-Mar-99 */
1118 /* Modified: B.Mansoulie 23-Apr-99 */
1119 /* faster and still correct way to compute the */
1120 /* sigmoid derivative */
1121 /***********************************************************/
1122 
1123 /* extern "C"Dllexport */int DeDwSum(type_pat *ans, dbl *out, int ipat)
1124 {
1125  int il, in1, in, ii;
1126 /* dbl err[NMAX][4]; */
1127  dbl deriv;
1128  dbl *pout, *pdedw, *pdelta;
1129  dbl a, b;
1130 /* char buf[50];*/
1131 
1132 /* output layer */
1133  b = (dbl) PAT.Pond[0][ipat];
1134  for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
1135  {
1136  deriv = NET.Deriv1[NET.Nlayer-1][in];
1137  NET.Delta[NET.Nlayer-1][in] =
1138  (out[in] - (dbl) ans[in])*deriv*b;
1139  }
1140 
1141  for(il=NET.Nlayer-2; il>0; il--)
1142  {
1143 
1144  for(in=0; in<NET.Nneur[il]; in++)
1145  {
1146  deriv = NET.Deriv1[il][in];
1147  a = NET.Delta[il+1][0] * NET.Weights[il+1][0][in+1];
1148  pdelta = &(NET.Delta[il+1][1]);
1149  for(in1=1; in1<NET.Nneur[il+1]; in1++, pdelta++)
1150  {
1151  a += *pdelta * NET.Weights[il+1][in1][in+1];
1152  }
1153  NET.Delta[il][in] = a * deriv;
1154  }
1155  }
1156 
1157  for(il=1; il<NET.Nlayer; il++)
1158  {
1159  ii = NET.Nneur[il-1];
1160  for(in=0; in<NET.Nneur[il]; in++)
1161  {
1162  a = NET.Delta[il][in];
1163  LEARN.DeDw[il][in][0] += a;
1164  LEARN.DeDw[il][in][1] += a * NET.Outn[il-1][0];
1165  pout = &(NET.Outn[il-1][1]);
1166  pdedw = &(LEARN.DeDw[il][in][2]);
1167  for(in1=1; in1<ii; ++in1, ++pout, ++pdedw)
1168  {
1169  (*pdedw) += a * (*pout);
1170  }
1171  }
1172  }
1173 
1174  return(0);
1175 }
1176 
1177 
1178 /***********************************************************/
1179 /* SetTransFunc */
1180 /* */
1181 /* to set the transfert function of a neuron */
1182 /* inputs: int layer = layer number (1 -> Nlayer) */
1183 /* int neuron = neuron number (1 -> Nneur) */
1184 /* int func = transfert function: */
1185 /* 0: y=0 */
1186 /* 1: y=x */
1187 /* 2: y=1/(1+exp(-x)) */
1188 /* 3: y=tanh(x) */
1189 /* 4: y=delta*x+1/(1+exp(-x)) */
1190 /* 5: y=exp(-x**2) */
1191 /* */
1192 /* return value (int) = error value: */
1193 /* 0: no error */
1194 /* 1: layer > 4 */
1195 /* 2: neuron > NMAX */
1196 /* */
1197 /* Author: J.Schwindling 02-Apr-99 */
1198 /***********************************************************/
1199 
1200 /* extern "C"Dllexport */int SetTransFunc(int layer, int neuron,
1201  int func)
1202 {
1203  if(layer>NLMAX) return(1);
1204 /* if(neuron>NMAX) return(2);*/
1205 
1206  NET.T_func[layer-1][neuron-1] = func;
1207 
1208  return(0);
1209 }
1210 
1211 
1212 /***********************************************************/
1213 /* SetDefaultFuncs */
1214 /* */
1215 /* - sets the default transfer functions to sigmoid for */
1216 /* hidden layers and linear for output layer */
1217 /* - sets temperatures to 1 */
1218 /* */
1219 /* */
1220 /* Author: J.Schwindling 08-Apr-99 */
1221 /***********************************************************/
1222 
1224 {
1225  int il,in;
1226  for(il=0; il<NET.Nlayer; il++) {
1227  for(in=0; in<NET.Nneur[il]; in++) {
1228  NET.T_func[il][in] = 2;
1229  if(il==NET.Nlayer-1) NET.T_func[il][in] = 1;
1230  }
1231  }
1232 
1233 }
1234 
1235 
1236 /***********************************************************/
1237 /* SteepestDir */
1238 /* */
1239 /* sets the search direction to steepest descent */
1240 /* */
1241 /* Author: J.Schwindling 02-Apr-99 */
1242 /***********************************************************/
1243 
1244 /* extern "C"Dllexport */void SteepestDir()
1245 {
1246  int il,in,jn;
1247  for(il=1; il<NET.Nlayer; il++)
1248  for(in=0; in<NET.Nneur[il]; in++)
1249  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1250  dir[il][in][jn] = -LEARN.DeDw[il][in][jn];
1251 }
1252 
1253 
1254 /***********************************************************/
1255 /* CGDir */
1256 /* */
1257 /* sets the search direction to conjugate gradient dir */
1258 /* inputs: dbl beta : d(t+1) = -g(t+1) + beta d(t) */
1259 /* beta should be: */
1260 /* ||g(t+1)||^2 / ||g(t)||^2 (Fletcher-Reeves) */
1261 /* g(t+1) (g(t+1)-g(t)) / ||g(t)||^2 (Polak-Ribiere) */
1262 /* */
1263 /* Author: J.Schwindling 02-Apr-99 */
1264 /***********************************************************/
1265 
1266 /* extern "C"Dllexport */void CGDir(dbl beta)
1267 {
1268  int il,in,jn;
1269  for(il=1; il<NET.Nlayer; il++)
1270  for(in=0; in<NET.Nneur[il]; in++)
1271  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1272  {
1273  dir[il][in][jn] = -LEARN.DeDw[il][in][jn]+
1274  beta*dir[il][in][jn];
1275  }
1276 }
1277 
1278 
1279 /***********************************************************/
1280 /* DerivDir */
1281 /* */
1282 /* scalar product between gradient and direction */
1283 /* = derivative along direction */
1284 /* */
1285 /* Author: J.Schwindling 01-Jul-99 */
1286 /***********************************************************/
1287 
1288 /* extern "C"Dllexport */dbl DerivDir()
1289 {
1290  int il,in,jn;
1291  dbl ddir = 0;
1292 
1293  for(il=1; il<NET.Nlayer; il++)
1294  for(in=0; in<NET.Nneur[il]; in++)
1295  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1296  {
1297  ddir += LEARN.DeDw[il][in][jn]*dir[il][in][jn];
1298  }
1299  return(ddir);
1300 }
1301 
1302 
1303 /***********************************************************/
1304 /* GetGammaDelta */
1305 /* */
1306 /* sets the vectors Gamma (=g(t+1)-g(t)) */
1307 /* and delta (=w(t+1)-w(t)) */
1308 /* (for BFGS and Hybrid learning methods) */
1309 /* */
1310 /* Author: J.Schwindling 02-Apr-99 */
1311 /***********************************************************/
1312 
1313 /* extern "C"Dllexport */void GetGammaDelta()
1314 {
1315  int i=0;
1316  int il,in,jn;
1317  for(il=1; il<NET.Nlayer; il++)
1318  for(in=0; in<NET.Nneur[il]; in++)
1319  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1320  {
1321  Gamma[i] = LEARN.DeDw[il][in][jn]-
1322  LEARN.ODeDw[il][in][jn];
1323  delta[i] = LEARN.Odw[il][in][jn];
1324  i++;
1325  }
1326 }
1327 
1328 
1329 /***********************************************************/
1330 /* BFGSDir */
1331 /* */
1332 /* sets the search direction to BFGS direction from the */
1333 /* BFGS matrix */
1334 /* */
1335 /* inputs: int Nweights = number of weights */
1336 /* */
1337 /* Author: J.Schwindling 02-Apr-99 */
1338 /***********************************************************/
1339 
1340 /* extern "C"Dllexport */void BFGSdir(int Nweights)
1341 {
1342  dbl *g, *s;
1343  int kk=0;
1344  int il,i,j,in,jn;
1345 
1346  g = (dbl*) malloc(NET.Nweights*sizeof(dbl));
1347  s = (dbl*) malloc(Nweights*sizeof(dbl));
1348 
1349  for(il=1; kk<Nweights; il++)
1350  for(in=0; in<NET.Nneur[il]; in++)
1351  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1352  {
1353  g[kk] = LEARN.DeDw[il][in][jn];
1354  kk++;
1355  }
1356  for(i=0; i<Nweights; i++)
1357  {
1358  s[i] = 0;
1359  for(j=0; j<Nweights; j++)
1360  {
1361  s[i] += BFGSH[i][j] * g[j];
1362  }
1363  }
1364 
1365  kk = 0;
1366  for(il=1; kk<Nweights; il++)
1367  for(in=0; in<NET.Nneur[il]; in++)
1368  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1369  {
1370  dir[il][in][jn] = -s[kk];
1371  kk++;
1372  }
1373  free(g);
1374  free(s);
1375 }
1376 
1377 
1378 /***********************************************************/
1379 /* InitBFGS */
1380 /* */
1381 /* initializes BFGS matrix to identity */
1382 /* */
1383 /* inputs: int Nweights = number of weights */
1384 /* */
1385 /* Author: J.Schwindling 02-Apr-99 */
1386 /***********************************************************/
1387 
1388 /* extern "C"Dllexport */void InitBFGSH(int Nweights)
1389 {
1390  int i,j;
1391  for(i=0; i<Nweights; i++)
1392  for(j=0; j<Nweights; j++)
1393  {
1394  BFGSH[i][j] = 0;
1395  if(i==j) BFGSH[i][j] = 1;
1396  }
1397 }
1398 
1399 
1400 /***********************************************************/
1401 /* GetBFGSH */
1402 /* */
1403 /* sets the BFGS matrix */
1404 /* */
1405 /* inputs: int Nweights = number of weights */
1406 /* */
1407 /* return value (int) = 0 if no problem */
1408 /* 1 is deltaTgamma = 0 -> switch to */
1409 /* steepest dir */
1410 /* */
1411 /* Author: J.Schwindling 02-Apr-99 */
1412 /* Modified: J.Schwindling 04-May-99 */
1413 /* computations as Nw^2 , matrices removed */
1414 /* Modified: J.Schwindling 11-Jun-99 */
1415 /* return value */
1416 /***********************************************************/
1417 
1418 /* extern "C"Dllexport */int GetBFGSH(int Nweights)
1419 {
1420  typedef double dble;
1421  dble deltaTgamma=0;
1422  dble factor=0;
1423  dble *Hgamma;
1424  dble *tmp;
1425  dble a, b;
1426  int i,j;
1427 
1428  Hgamma = (dble *) malloc(Nweights*sizeof(dble));
1429  tmp = (dble *) malloc(Nweights*sizeof(dble));
1430 
1431  for(i=0; i<Nweights; i++)
1432  {
1433  deltaTgamma += (dble) delta[i] * (dble) Gamma[i];
1434  a = 0;
1435  b = 0;
1436  for(j=0; j<Nweights; j++)
1437  {
1438  a += (dble) BFGSH[i][j] * (dble) Gamma[j];
1439  b += (dble) Gamma[j] * (dble) BFGSH[j][i];
1440  }
1441  Hgamma[i] = a;
1442  tmp[i] = b;
1443  factor += (dble) Gamma[i]*Hgamma[i];
1444  }
1445  if(deltaTgamma == 0)
1446  {
1447  free(tmp);
1448  free(Hgamma);
1449  return 1;
1450  }
1451  a = 1 / deltaTgamma;
1452  factor = 1 + factor*a;
1453 
1454  for(i=0; i<Nweights; i++)
1455  {
1456  b = (dble) delta[i];
1457  for(j=0; j<Nweights; j++)
1458  BFGSH[i][j] += (dbl) (factor*b* (dble)
1459  delta[j]-(tmp[j]*b+Hgamma[i]*(dble)delta[j]))*a;
1460  }
1461  free(Hgamma);
1462  free(tmp);
1463  return 0;
1464 }
1465 
1466 
1467 /***********************************************************/
1468 /* LineSearch */
1469 /* */
1470 /* search along the line defined by dir */
1471 /* */
1472 /* outputs: dbl *alpmin = optimal step length */
1473 /* */
1474 /* Author: B.Mansoulie 01-Jul-98 */
1475 /***********************************************************/
1476 
1477 /* extern "C"Dllexport */
1478 int LineSearch(dbl *alpmin, int *Ntest, dbl Err0)
1479 {
1480  dbl ***w0;
1481  dbl alpha1, alpha2, alpha3;
1482  dbl err1, err2, err3;
1483  dbl tau;
1484  int icount, il, in, jn;
1485 
1486  tau=LEARN.Tau;
1487 
1488 /* store weights before line search */
1489 
1490  *Ntest = 0;
1491  w0 = (dbl ***) malloc(NET.Nlayer*sizeof(dbl**));
1492  for(il=1; il<NET.Nlayer; il++)
1493  {
1494  w0[il] = (dbl **) malloc(NET.Nneur[il]*sizeof(dbl*));
1495  for(in=0; in<NET.Nneur[il]; in++)
1496  {
1497  w0[il][in] = (dbl *) malloc((NET.Nneur[il-1]+1)*
1498  sizeof(dbl));
1499  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1500  {
1501  w0[il][in][jn] = NET.Weights[il][in][jn];
1502  }
1503  }
1504  }
1505 
1506 /* compute error(w0) */
1507 
1508 /* err1 = MLP_Test(0,0);
1509  (*Ntest) ++;*/
1510  err1 = Err0;
1511 
1512  if(NET.Debug>=4) printf("err depart= %f\n",err1);
1513 
1514  *alpmin = 0;
1515  alpha1 = 0;
1516 /* alpha2 = 0.05;
1517  if(LastAlpha != 0) alpha2 = LastAlpha;*/
1518  alpha2 = LastAlpha;
1519  if(alpha2 < 0.01) alpha2 = 0.01;
1520  if(alpha2 > 2.0) alpha2 = 2.0;
1521  MLP_Line(w0,alpha2);
1522  err2 = MLP_Test(0,0);
1523  (*Ntest) ++;
1524  if(NET.Debug>=4) printf("alpha, err= %e %e\n",alpha2,err2);
1525 
1526  alpha3 = alpha2;
1527  err3 = err2;
1528 
1529 /* try to find a triplet (alpha1, alpha2, alpha3) such that
1530  Error(alpha1)>Error(alpha2)<Error(alpha3) */
1531 
1532  if(err1>err2)
1533  {
1534  for(icount=1;icount<=100;icount++)
1535  {
1536  alpha3 = alpha3*tau;
1537  MLP_Line(w0,alpha3);
1538  err3 =MLP_Test(0,0);
1539  if(NET.Debug>=4) printf("alpha, err= %e %e\n",alpha3,err3);
1540  (*Ntest) ++;
1541  if(err3>err2) break;
1542  alpha1 = alpha2;
1543  err1 = err2;
1544  alpha2 = alpha3;
1545  err2 = err3;
1546  }
1547  if(icount>=100) /* line search fails */
1548  {
1549  MLP_Line(w0,0); /* reset weights */
1550  free(w0);
1551  return(1);
1552  }
1553  }
1554  else
1555  {
1556  for(icount=1;icount<=100;icount++)
1557  {
1558  alpha2 = alpha2/tau;
1559  MLP_Line(w0,alpha2);
1560  err2 = MLP_Test(0,0);
1561  if(NET.Debug>=4) printf("alpha, err= %e %e\n",alpha2,err2);
1562  (*Ntest) ++;
1563  if(err1>err2) break;
1564  alpha3 = alpha2;
1565  err3 = err2;
1566  }
1567  if(icount>=100) /* line search fails */
1568  {
1569  MLP_Line(w0,0); /* reset weights */
1570  free(w0);
1571  LastAlpha = 0.05; /* try to be safe */
1572  return(1);
1573  }
1574  }
1575 
1576 /* find bottom of parabola */
1577 
1578  *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1579  -(err2-err1)/(alpha2-alpha1)));
1580  if(*alpmin>10000) *alpmin=10000;
1581 
1582 /* set the weights */
1583  MLP_Line(w0,*alpmin);
1584  LastAlpha = *alpmin;
1585 
1586 /* store weight changes */
1587  for(il=1; il<NET.Nlayer; il++)
1588  for(in=0; in<NET.Nneur[il]; in++)
1589  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1590  LEARN.Odw[il][in][jn] = NET.Weights[il][in][jn]
1591  - w0[il][in][jn];
1592 
1593  for(il=1; il<NET.Nlayer; il++)
1594  for(in=0; in<NET.Nneur[il]; in++)
1595  free(w0[il][in]);
1596  for(il=1; il<NET.Nlayer; il++)
1597  free(w0[il]);
1598  free(w0);
1599 
1600  return(0);
1601 }
1602 
1603 
1604 /***********************************************************/
1605 /* DecreaseSearch */
1606 /* */
1607 /* search along the line defined by dir for a point where */
1608 /* error is decreased (faster than full line search) */
1609 /* */
1610 /* outputs: dbl *alpmin = step length */
1611 /* */
1612 /* Author: J.Schwindling 11-May-99 */
1613 /***********************************************************/
1614 
1615 /* extern "C"Dllexport */
1616 int DecreaseSearch(dbl *alpmin, int *Ntest, dbl Err0)
1617 {
1618  dbl ***w0;
1619  dbl alpha2;
1620  dbl err1, err2;
1621  dbl tau;
1622  int icount, il, in, jn;
1623 
1624  tau=LEARN.Tau;
1625 
1626 /* store weights before line search */
1627 
1628  *Ntest = 0;
1629  w0 = (dbl ***) malloc(NET.Nlayer*sizeof(dbl**));
1630  for(il=1; il<NET.Nlayer; il++)
1631  {
1632  w0[il] = (dbl **) malloc(NET.Nneur[il]*sizeof(dbl*));
1633  for(in=0; in<NET.Nneur[il]; in++)
1634  {
1635  w0[il][in] = (dbl *) malloc((NET.Nneur[il-1]+1)*
1636  sizeof(dbl));
1637  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1638  {
1639  w0[il][in][jn] = NET.Weights[il][in][jn];
1640  }
1641  }
1642  }
1643 
1644 /* compute error(w0) */
1645 
1646 /* err1 = MLP_Test(0,0);
1647  (*Ntest) ++;*/
1648  err1 = Err0;
1649 
1650  if(NET.Debug>=4) printf("err depart= %f\n",err1);
1651 
1652  *alpmin = 0;
1653  alpha2 = 0.05;
1654  MLP_Line(w0,alpha2);
1655  err2 = MLP_Test(0,0);
1656  (*Ntest) ++;
1657 
1658  if(err2<err1)
1659  {
1660  *alpmin = alpha2;
1661  }
1662  else
1663  {
1664 
1665 
1666  for(icount=1;icount<=100;icount++)
1667  {
1668  alpha2 = alpha2/tau;
1669  MLP_Line(w0,alpha2);
1670  err2 = MLP_Test(0,0);
1671  (*Ntest) ++;
1672  if(err1>err2) break;
1673  }
1674  if(icount>=100) /* line search fails */
1675  {
1676  MLP_Line(w0,0); /* reset weights */
1677  free(w0);
1678  return(1);
1679  }
1680  *alpmin = alpha2;
1681  }
1682 
1683 /* set the weights */
1684  MLP_Line(w0,*alpmin);
1685 
1686 /* store weight changes */
1687  for(il=1; il<NET.Nlayer; il++)
1688  for(in=0; in<NET.Nneur[il]; in++)
1689  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1690  LEARN.Odw[il][in][jn] = NET.Weights[il][in][jn]
1691  - w0[il][in][jn];
1692 
1693  for(il=1; il<NET.Nlayer; il++)
1694  for(in=0; in<NET.Nneur[il]; in++)
1695  free(w0[il][in]);
1696  for(il=1; il<NET.Nlayer; il++)
1697  free(w0[il]);
1698  free(w0);
1699 
1700  return(0);
1701 }
1702 
1703 
1704 /* extern "C"Dllexport */int FixedStep(dbl alpha)
1705 {
1706  dbl ***w0;
1707  int il, in, jn;
1708 
1709  w0 = (dbl ***) malloc(NET.Nlayer*sizeof(dbl**));
1710  for(il=1; il<NET.Nlayer; il++)
1711  {
1712  w0[il] = (dbl **) malloc(NET.Nneur[il]*sizeof(dbl*));
1713  for(in=0; in<NET.Nneur[il]; in++)
1714  {
1715  w0[il][in] = (dbl *) malloc((NET.Nneur[il-1]+1)*
1716  sizeof(dbl));
1717  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1718  {
1719  w0[il][in][jn] = NET.Weights[il][in][jn];
1720  }
1721  }
1722  }
1723 
1724 
1725 /* set the weights */
1726  MLP_Line(w0,alpha);
1727 
1728 /* store weight changes */
1729  for(il=1; il<NET.Nlayer; il++)
1730  for(in=0; in<NET.Nneur[il]; in++)
1731  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1732  LEARN.Odw[il][in][jn] = NET.Weights[il][in][jn]
1733  - w0[il][in][jn];
1734 
1735  for(il=1; il<NET.Nlayer; il++)
1736  for(in=0; in<NET.Nneur[il]; in++)
1737  free(w0[il][in]);
1738  for(il=1; il<NET.Nlayer; il++)
1739  free(w0[il]);
1740  free(w0);
1741 
1742  return(0);
1743 }
1744 
1745 /***********************************************************/
1746 /* MLP_Line */
1747 /* */
1748 /* sets the weights to a point along a line */
1749 /* */
1750 /* inputs: dbl ***w0 = initial point */
1751 /* dbl alpha = step length */
1752 /* */
1753 /* Author: B.Mansoulie 01-Jul-98 */
1754 /***********************************************************/
1755 
1756 /* extern "C"Dllexport */
1757 void MLP_Line(dbl ***w0, dbl alpha)
1758 {
1759  int il,in,jn;
1760 
1761  for(il=1; il<NET.Nlayer; il++)
1762  for(in=0; in<NET.Nneur[il]; in++)
1763  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1764  NET.Weights[il][in][jn] = w0[il][in][jn]+
1765  alpha*dir[il][in][jn];
1766 
1767 }
1768 
1769 
1770 /***********************************************************/
1771 /* LineSearchHyb */
1772 /* */
1773 /* search along the line defined by dir */
1774 /* */
1775 /* outputs: dbl *alpmin = optimal step length */
1776 /* */
1777 /* Author: B.Mansoulie 01-Jul-98 */
1778 /***********************************************************/
1779 
1780 /* extern "C"Dllexport */
1781 int LineSearchHyb(dbl *alpmin, int *Ntest)
1782 {
1783  dbl ***w0;
1784  dbl alpha1, alpha2, alpha3;
1785  dbl err1, err2, err3;
1786  dbl tau;
1787  int icount, il, in, jn;
1788 
1789 /* char buf [50];
1790  sprintf (buf,"entree linesearchhyb\n");
1791  MessageBoxA (0,buf,"dans FreePatterns",MB_OK);*/
1792 
1793  if(NET.Debug>=4){
1794  printf(" entry LineSearchHyb \n");
1795  }
1796  tau=LEARN.Tau;
1797 
1798 /* store weights before line search */
1799 
1800  *Ntest = 0;
1801  w0 = (dbl ***) malloc((NET.Nlayer-1)*sizeof(dbl**));
1802  for(il=1; il<NET.Nlayer-1; il++)
1803  {
1804  w0[il] = (dbl **) malloc(NET.Nneur[il]*sizeof(dbl*));
1805  for(in=0; in<NET.Nneur[il]; in++)
1806  {
1807  w0[il][in] = (dbl *) malloc((NET.Nneur[il-1]+1)*
1808  sizeof(dbl));
1809  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1810  {
1811  w0[il][in][jn] = NET.Weights[il][in][jn];
1812  }
1813  }
1814  }
1815 
1816 /* compute error(w0) */
1817  err1 = MLP_Test(0,1);
1818  (*Ntest) ++;
1819  if(NET.Debug>=4) printf("LinesearchHyb err depart= %f\n",err1);
1820 
1821  *alpmin = 0;
1822  alpha1 = 0;
1823 /* alpha2 = 0.05;
1824  if(LastAlpha != 0) alpha2 = LastAlpha;*/
1825  alpha2 = LastAlpha;
1826  if(alpha2 < 0.01) alpha2 = 0.01;
1827  if(alpha2 > 2.0) alpha2 = 2.0;
1828  MLP_LineHyb(w0,alpha2);
1829  err2 = MLP_Test(0,1);
1830  (*Ntest) ++;
1831 
1832  alpha3 = alpha2;
1833  err3 = err2;
1834 
1835 /* try to find a triplet (alpha1, alpha2, alpha3) such that
1836  Error(alpha1)>Error(alpha2)<Error(alpha3) */
1837 
1838  if(err1>err2)
1839  {
1840  for(icount=1;icount<=100;icount++)
1841  {
1842  alpha3 = alpha3*tau;
1843  MLP_LineHyb(w0,alpha3);
1844  err3 = MLP_Test(0,1);
1845  (*Ntest) ++;
1846  if(err3>err2) break;
1847  alpha1 = alpha2;
1848  err1 = err2;
1849  alpha2 = alpha3;
1850  err2 = err3;
1851  }
1852  if(icount>=100) /* line search fails */
1853  {
1854  MLP_LineHyb(w0,0); /* reset weights */
1855  free(w0);
1856  return(1);
1857  }
1858  }
1859  else
1860  {
1861  for(icount=1;icount<=100;icount++)
1862  {
1863  alpha2 = alpha2/tau;
1864  MLP_LineHyb(w0,alpha2);
1865  err2 = MLP_Test(0,1);
1866  (*Ntest) ++;
1867  if(err1>err2) break;
1868  alpha3 = alpha2;
1869  err3 = err2;
1870  }
1871  if(icount>=100) /* line search fails */
1872  {
1873  MLP_LineHyb(w0,0); /* reset weights */
1874  free(w0);
1875  return(1);
1876  }
1877  }
1878 
1879 /* find bottom of parabola */
1880 
1881  *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1882  -(err2-err1)/(alpha2-alpha1)));
1883  if(*alpmin>10000) *alpmin=10000;
1884 
1885 /* set the weights */
1886  MLP_LineHyb(w0,*alpmin);
1887  LastAlpha = *alpmin;
1888 
1889 /* store weight changes */
1890  for(il=1; il<NET.Nlayer-1; il++)
1891  for(in=0; in<NET.Nneur[il]; in++)
1892  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1893  LEARN.Odw[il][in][jn] = NET.Weights[il][in][jn]
1894  - w0[il][in][jn];
1895 
1896  for(il=1; il<NET.Nlayer-1; il++)
1897  for(in=0; in<NET.Nneur[il]; in++)
1898  free(w0[il][in]);
1899  for(il=1; il<NET.Nlayer-1; il++)
1900  free(w0[il]);
1901  free(w0);
1902  if(NET.Debug>=4){
1903  printf(" exit LineSearchHyb \n");
1904  }
1905 
1906  return(0);
1907 }
1908 
1909 
1910 /***********************************************************/
1911 /* MLP_LineHyb */
1912 /* */
1913 /* sets the weights to a point along a line */
1914 /* (for hybrid methods) */
1915 /* */
1916 /* inputs: dbl ***w0 = initial point */
1917 /* dbl alpha = step length */
1918 /* */
1919 /* Author: B.Mansoulie 01-Jul-98 */
1920 /***********************************************************/
1921 
1922 /* extern "C"Dllexport */
1923 void MLP_LineHyb(dbl ***w0, dbl alpha)
1924 {
1925  int il,in,jn;
1926  for(il=1; il<NET.Nlayer-1; il++)
1927  for(in=0; in<NET.Nneur[il]; in++)
1928  for(jn=0; jn<=NET.Nneur[il-1]; jn++)
1929  {
1930  NET.Weights[il][in][jn] = w0[il][in][jn]+
1931  alpha*dir[il][in][jn];
1932  }
1933  MLP_ResLin();
1934 }
1935 
1936 
1937 /***********************************************************/
1938 /* SetLambda */
1939 /* */
1940 /* sets the coefficient of the regularisation term for */
1941 /* hybrid learning method */
1942 /* */
1943 /* input: double Wmax = typical maximum weight */
1944 /* */
1945 /* Author: J.Schwindling 13-Apr-99 */
1946 /***********************************************************/
1947 
1948 /* extern "C"Dllexport */
1949 void SetLambda(double Wmax)
1950 {
1951  dbl err;
1952  err = MLP_Test(0,0);
1953  LEARN.Alambda =
1954  LEARN.Lambda*err/(Wmax*Wmax*(dbl)(NET.Nneur[NET.Nlayer-2]+1));
1955 }
1956 
1957 
1958 /***********************************************************/
1959 /* MLP_ResLin */
1960 /* */
1961 /* resolution of linear system of equations for hybrid */
1962 /* training method */
1963 /* */
1964 /* */
1965 /* Author: B.Mansoulie end-98 */
1966 /* Modified: J.Schwindling 29-APR-99 */
1967 /* use dgels from LAPACK */
1968 /***********************************************************/
1969 
1970 /* extern "C"Dllexport */
1972 {
1973 /* dbl rrans[NMAX], rrout[NMAX];*/
1974 /* type_pat rrin[NMAX];*/
1975  double *HR,*dpat; //,*wlin,*SV;
1976  double err,lambda,lambda2;
1977  int Nl,M,Nhr,khr,nrhs,iret,ierr;
1978  int il, in, inl, ipat;
1979  /*register dbl a;*/ //a unused
1980  char Trans = 'N';
1981 
1982 
1983 /* int rank; */
1984 // double rcond = -1; /* use machine precision */
1985 
1986  lambda2 = LEARN.Alambda;
1987 
1988 /* Nl = number of linear weights
1989  M = number of terms in linear system = number of examples + regularisation*/
1990  Nl = NET.Nneur[NET.Nlayer-2] + 1;
1991  M = PAT.Npat[0]+Nl;
1992 
1993  int Lwork = 5 * M;
1994  double *Work = (double*) malloc((int) Lwork*sizeof(double));
1995 
1996 /* memory allocation */
1997  dpat = (double*) malloc((int) M*sizeof(double));
1998 // wlin = (double*) malloc((int) Nl*sizeof(double));
1999 // SV = (double*) malloc((int) Nl*sizeof(double));
2000 
2001  Nhr = M * Nl;
2002  HR = (double*) malloc((int) Nhr*sizeof(double));
2003  err = 0.;
2004  for(ipat=0;ipat<PAT.Npat[0];ipat++)
2005  {
2006 /* *** Filling dpat and HR *** */
2007 /* for(in=0; in<NET.Nneur[0]; in++)
2008  {
2009  rrin[in] = PAT.Rin[0][ipat][in];
2010  }*/
2011 
2012  MLP_Out(PAT.Rin[0][ipat],NET.Outn[NET.Nlayer-1]);
2013 /* MLP_Out(rrin,rrout);*/
2014  /*for(in=0; in<NET.Nneur[NET.Nlayer-1]; in++)
2015  {
2016  a = (dbl) PAT.Rans[0][ipat][in]; //a was not used
2017  } */
2018  il = NET.Nlayer-2;
2019  dpat[ipat] = (dbl) PAT.Rans[0][ipat][0]*sqrt(PAT.Pond[0][ipat]);
2020  khr = ipat;
2021  HR[khr] = (dbl) sqrt(PAT.Pond[0][ipat]);
2022  for(in=0;in<NET.Nneur[il];in++)
2023  {
2024  khr = M *(in+1) + ipat;
2025  HR[khr] = NET.Outn[il][in]*
2026  (dbl) sqrt(PAT.Pond[0][ipat]);
2027  }
2028  }
2029  il = NET.Nlayer-2;
2030  lambda = sqrt(lambda2);
2031  for(ipat=0;ipat<=NET.Nneur[il];ipat++)
2032  {
2033  dpat[ipat+PAT.Npat[0]] = 0;
2034  for(in=0;in<=NET.Nneur[il];in++)
2035  {
2036  khr = M *in + ipat + PAT.Npat[0];
2037  HR[khr] = 0;
2038  if(in==ipat) HR[khr]=lambda;
2039  }
2040  }
2041  if(NET.Debug>=4)
2042  {
2043  err = MLP_Test(0,0);
2044  printf("entry ResLin, err=MLP_Test(0,0), err= %f\n",err);
2045  }
2046 /* */
2047 /* Trouve les poids lineaires par resolution lineaire */
2048 /* */
2049  nrhs = 1;
2050  ierr = dgels_(&Trans,&M,&Nl,&nrhs,HR,&M,dpat,&M,Work,
2051  &Lwork,&iret);
2052  if(iret != 0) printf("Warning from dgels: iret = %d\n",(int)iret);
2053  if(ierr != 0) printf("Warning from dgels: ierr = %d\n",(int)ierr);
2054 
2055 /* ierr = dgelss_(&M,&Nl,&nrhs,HR,&M,dpat,&M,SV,&rcond,&rank,Work,&Lwork,
2056  &iret);
2057  if(iret != 0) printf("Warning from dgelss: iret = %d\n",iret);
2058  if(ierr != 0) printf("Warning from dgelss: ierr = %d\n",ierr);*/
2059 
2060  il = NET.Nlayer-1;
2061  for (inl=0; inl<=NET.Nneur[il-1];inl++)
2062  {
2063  NET.Weights[il][0][inl] = dpat[inl];
2064  }
2065  if(NET.Debug>=4)
2066  {
2067  err = MLP_Test(0,0);
2068  printf("ResLin, apres tlsfor, err= %f\n",err);
2069  }
2070  free(Work);
2071  free(dpat);
2072 // free(wlin);
2073  free(HR);
2074 // free(SV);
2075 }
2076 
2077 /***********************************************************/
2078 /* EtaDecay */
2079 /* */
2080 /* decreases the learning parameter eta by the factor */
2081 /* LEARN.Decay */
2082 /* */
2083 /* Author: J.Schwindling 14-Apr-99 */
2084 /***********************************************************/
2085 
2086 void EtaDecay()
2087 {
2088  LEARN.eta *= LEARN.Decay;
2089 }
2090 
2091 
2092 /***********************************************************/
2093 /* ShuffleExamples */
2094 /* */
2095 /* Shuffles the learning examples (for stochastic method) */
2096 /* */
2097 /* Author: J.Schwindling 14-Apr-1999 */
2098 /* Modified: J.Schwindling 21-Jul-1999 inline MLP_Rand */
2099 /***********************************************************/
2100 
2101 /* extern "C"Dllexport */
2102 int ShuffleExamples(int n, int *index)
2103 {
2104  int i,ii,itmp;
2105  dbl a = (dbl) (n-1);
2106 
2107  for(i=0;i<n;i++)
2108  {
2109  ii = (int) MLP_Rand(0.,a);
2110  itmp = index[ii];
2111  index[ii] = index[i];
2112  index[i] = itmp;
2113  }
2114  return 0;
2115 }
2116 
2117 
2118 /***********************************************************/
2119 /* MLP_Rand */
2120 /* */
2121 /* Random Numbers (to initialize weights) */
2122 /* */
2123 /* inputs : dbl min, dbl max = random number between */
2124 /* min and max */
2125 /* return value: (double) random number */
2126 /* */
2127 /* Author: J.Schwindling 14-Apr-99 */
2128 /***********************************************************/
2129 
2130 /* extern "C"Dllexport */
2131 double MLP_Rand(dbl mini, dbl maxi)
2132 {
2133 return mini+(maxi-mini)*random()/RAND_MAX;
2134 }
2135 
2136 
2137 /***********************************************************/
2138 /* InitWeights */
2139 /* */
2140 /* initializes the weights to random numbers between */
2141 /* -0.5 : 0.5 */
2142 /* */
2143 /* Author: J.Schwindling 14-Apr-99 */
2144 /***********************************************************/
2145 
2146 /* extern "C"Dllexport */
2148 {
2149  int ilayer,ineur,i;
2150 
2151  for(ilayer=1;ilayer<NET.Nlayer;ilayer++)
2152  for(ineur=0;ineur<NET.Nneur[ilayer];ineur++)
2153  for(i=0;i<=NET.Nneur[ilayer-1];i++)
2154  NET.Weights[ilayer][ineur][i]=
2155  (dbl) MLP_Rand(-0.5, 0.5);
2156 }
2157 
2158 
2159 /***********************************************************/
2160 /* PrintWeights */
2161 /* */
2162 /* Print weights on the screen */
2163 /* */
2164 /* Author: J.Schwindling 14-Apr-99 */
2165 /***********************************************************/
2166 
2167 /* extern "C"Dllexport */
2169 {
2170  int ilayer,ineur,i;
2171 
2172  for(ilayer=1; ilayer<NET.Nlayer; ilayer++)
2173  {
2174  if(MessLang==1)
2175  {
2176  printf("Couche %d\n",ilayer);
2177  }
2178  else
2179  {
2180  printf("Layer %d\n",ilayer);
2181  }
2182  for(ineur=0; ineur<NET.Nneur[ilayer]; ineur++)
2183  {
2184  if(MessLang==1)
2185  {
2186  printf("Neurone %d",ineur);
2187  }
2188  else
2189  {
2190  printf("Neuron %d",ineur);
2191  }
2192  for(i=0; i<=NET.Nneur[ilayer-1]; i++)
2193  {
2194  printf(" %f",
2195  (double) NET.Weights[ilayer][ineur][i]);
2196  }
2197  printf("\n");
2198  }
2199  printf("\n");
2200  }
2201 }
2202 
2203 
2204 /***********************************************************/
2205 /* ReadPatterns */
2206 /* */
2207 /* parser for learn.pat or test.pat files */
2208 /* */
2209 /* inputs: char *filename = name of the file to read */
2210 /* int ifile = 0: learning examples */
2211 /* 1: test examples */
2212 /* */
2213 /* outputs: int *inet = 1 if a network is defined */
2214 /* int *ilearn = 1 if a learning method is defined*/
2215 /* int *iexamples = 1 if examples are defined */
2216 /* */
2217 /* return value (int) = 0: no error */
2218 /* = -1: file could not be opened */
2219 /* */
2220 /* Author: J.Schwindling 20-Apr-99 */
2221 /* Modified: J.Schwindling 01-Jun-99 return inet, ilearn */
2222 /* iexamples */
2223 /* J.Schwindling 21-Sep-99 return value = -1 */
2224 /***********************************************************/
2225 
2226 #define CLEN 1024
2227 
2228 /* extern "C"Dllexport */
2229 int ReadPatterns(char *filename, int ifile,
2230  int *inet, int *ilearn, int *iexamples)
2231 {
2232 char s[CLEN], s2[CLEN], cc[6], cc2[6];
2233 char otherfile[CLEN];
2234 double p;
2235 //int line,i,j;
2236 int line,i;
2237 //int l,ll,ipat,nmax,il,in,tf;
2238 int l,ll,ipat,nmax;
2239 int np=0; /* nombre d'exemples */
2240 int nin=0; /* nombre d'entrees */
2241 int nout=0; /* nombre de sorties */
2242 int npon=0;
2243 int ntot, ierr;
2244 //char **ss;
2245 char **ss=nullptr;
2246 FILE *LVQpat;
2247 int nlayer, nneur[NLMAX];
2248 
2249 printf("\nLoading file %s\n",filename);
2250 LVQpat=fopen(filename,"r");
2251 if(LVQpat == nullptr) return -1;
2252 
2253 line=0;
2254 
2255 while(fgets(s,CLEN,LVQpat))
2256  {
2257  if(*s=='N')
2258  {
2259  if(*(s+1)=='N') /* NNEU */
2260  {
2261  printf("Number of neurons %s",s);
2262  *inet = 1;
2263  sscanf(s,"%s %s",cc,s2);
2264  ierr = GetNetStructure(s2,&nlayer,nneur);
2265  if(ierr != 0) return ierr;
2266  ierr = MLP_SetNet(&nlayer,nneur);
2267  if(ierr != 0) return ierr;
2268  }
2269  else
2270  {
2271  sscanf(s,"%s %d",cc,&l);
2272  if(*(cc+1)=='P') /* NPAT */
2273  {
2274  np=l;
2275  printf("Number of patterns %d\n",np);
2276  }
2277  else if(*(cc+1)=='I') /* NINP */
2278  {
2279  nin=l;
2280  PAT.Nin = nin;
2281  printf("Number of inputs %d\n",nin);
2282  }
2283  else if(*(cc+1)=='O' && *(cc+2)=='U') /* NOUT */
2284  {
2285  nout=l;
2286  PAT.Nout = nout;
2287  printf("Number of outputs %d\n",nout);
2288  }
2289  else if(*(cc+1)=='O' && *(cc+2)=='R') /* NORM */
2290  {
2291  DIVERS.Norm=l;
2292  if(l==1) printf("Normalize inputs\n");
2293  }
2294 /* obsolete datacard NLAY */
2295  else if(*(cc+1)=='L')
2296  {
2297  printf("NLAY datacard is no longer needed\n");
2298  }
2299  else if(*(cc+1)=='E') /* NEPO */
2300  {
2301  LEARN.Nepoch=l;
2302  printf("Number of epochs %d\n",l);
2303  }
2304  else if(*(cc+1)=='R') /* NRES */
2305  {
2306  LEARN.Nreset=l;
2307  printf(
2308  "Reset to steepest descent every %d epochs\n",
2309  l);
2310  }
2311  }
2312  }
2313  else if(*s=='L')
2314  {
2315  if(*(s+1)=='P') /* LPAR */
2316  {
2317  sscanf(s,"%s %le",cc,&p);
2318  printf("Learning parameter %f\n",p);
2319  LEARN.eta = (dbl) p;
2320  }
2321  else if(*(s+1)=='M') /* LMET */
2322  {
2323  *ilearn = 1;
2324  sscanf(s,"%s %d",cc,&(LEARN.Meth));
2325  printf("Learning method = ");
2326  switch(LEARN.Meth)
2327  {
2328  case 1: printf("Stochastic Minimization\n");
2329  break;
2330  case 2: printf("Steepest descent with fixed step\n");
2331  break;
2332  case 3: printf("Steepest descent with line search\n"); break;
2333  case 4: printf("Polak-Ribiere Conjugate Gradients\n"); break;
2334  case 5: printf("Fletcher-Reeves Conjugate Gradients\n");
2335  break;
2336  case 6: printf("BFGS\n");
2337  break;
2338  case 7: printf("Hybrid BFGS-linear\n");
2339  break;
2340  default: printf("Error: unknown method\n"); break;
2341  }
2342 
2343  }
2344  else if(*(s+1)=='T') /* LTAU */
2345  {
2346  sscanf(s,"%s %lf",cc,&p);
2347  printf("Tau %f\n",p);
2348  LEARN.Tau = (dbl) p;
2349  }
2350  else if(*(s+1)=='A') /* LAMB */
2351  {
2352  sscanf(s,"%s %lf",cc,&p);
2353  printf("Lambda %f\n",p);
2354  LEARN.Lambda = (dbl) p;
2355  }
2356  }
2357  else if(*s=='F')
2358  {
2359  if(*(s+1)=='S') /* FSPO */
2360  {
2361  sscanf(s,"%s %le",cc,&p);
2362  printf("Flat spot elimination parameter %f\n",p);
2363  LEARN.delta = (dbl) p;
2364  }
2365  else if(*(s+1)=='I') /* FILE */
2366  {
2367  sscanf(s,"%s %s",cc,otherfile);
2368  ierr = ReadPatterns(otherfile,ifile, inet, ilearn, iexamples);
2369  if(ierr != 0) return ierr;
2370  }
2371  }
2372  else if(*s=='M') /* momentum */
2373  {
2374  sscanf(s,"%s %le",cc,&p);
2375  printf("Momentum term %f\n",p);
2376  LEARN.epsilon = (dbl) p;
2377  }
2378  else if(*s=='O') /* OUTx */
2379  {
2380  if(*(s+3)=='W') /* OUTW */
2381  {
2382  sscanf(s,"%s %d",cc,&OutputWeights);
2383  if(OutputWeights == 0)
2384  {
2385  printf("Never write file weights.out\n");
2386  }
2387  else if(OutputWeights == -1)
2388  {
2389  printf("Write weights to output file at the end\n");
2390  }
2391  else
2392  {
2393  printf("Write weights to file every %d epochs\n",
2394  OutputWeights);
2395  }
2396  }
2397  else if(*(s+3)=='F') /* OUTF */
2398  {
2399  sscanf(s,"%s %s",cc,cc2);
2400  if(*cc2=='F' || *cc2=='C')
2401  {
2402  DIVERS.Outf = *cc2;
2403  }
2404  else
2405  {
2406  printf(" *** Error while loading file %s at line %s :",
2407  filename,s);
2408  printf(" unknown language\n");
2409  }
2410  }
2411  else
2412  {
2413  printf(" *** Error while loading file %s at line %s\n",
2414  filename,s);
2415  }
2416  }
2417  else if(*s=='R') /* RDWT */
2418  {
2419  sscanf(s,"%s %d",cc,&(NET.Rdwt));
2420  if(NET.Rdwt == 0)
2421  {
2422  printf("Random weights \n");
2423  }
2424  else
2425  {
2426  printf("Read weights from file weights.in\n");
2427  }
2428  }
2429  else if(*s=='S') /* STAT */
2430  {
2431  sscanf(s,"%s %d",cc,&(DIVERS.Stat));
2432  }
2433 /* else if(*s=='T') TFUN
2434  {
2435  sscanf(s,"%s %d %d %d",cc,&il,&in,&tf);
2436  SetTransFunc(il,in,tf);
2437  } */
2438  else if(*s=='H') /* HESS */
2439  {
2440  sscanf(s,"%s %d",cc,&(DIVERS.Ihess));
2441  }
2442  else if(*s=='D')
2443  {
2444  if(*(s+1)=='C') /* DCAY */
2445  {
2446  sscanf(s,"%s %le",cc,&p);
2447  LEARN.Decay = p;
2448  printf("Learning parameter decay %f\n",
2449  (double) LEARN.Decay);
2450  }
2451  if(*(s+1)=='B') /* DBIN */
2452  {
2453  sscanf(s,"%s %d",cc,&(DIVERS.Dbin));
2454  printf("Fill histogram every %d epochs\n",DIVERS.Dbin);
2455  }
2456  if(*(s+1)=='E') /* DEBU */
2457  {
2458  sscanf(s,"%s %d",cc,&(NET.Debug));
2459  printf("Debug mode %d\n",NET.Debug);
2460  }
2461  }
2462  else if(*s=='P') /* POND */
2463  {
2464  npon = CountLexemes(s);
2465  if(npon==2)
2466  {
2467  sscanf(s,"%s %d",cc,&(PAT.Iponde));
2468  }
2469  else
2470  {
2471  ss = (char**) malloc((npon+1)*sizeof(char*));
2472  for(i=0;i<=npon;i++)
2473  ss[i]=(char*) malloc(40*sizeof(char));
2474  getnLexemes(npon,s,ss);
2475  sscanf(ss[1],"%d",&(PAT.Iponde));
2476  for(i=2;i<npon;i++)
2477  {
2478  sscanf(ss[i],"%le",&(PAT.Ponds[i-2]));
2479  }
2480  }
2481  if(PAT.Iponde==0)
2482  {
2483  npon = 0;
2484  }
2485  else
2486  {
2487  npon = 1;
2488  }
2489  }
2490  else if(*s=='#') /* comments */
2491  {
2492  }
2493  else /* exemple itself */
2494  {
2495  if(np==0) return 1;
2496  if(nin==0) return 2;
2497  if(nout==0) return 3;
2498 
2499 
2500 /* store number of exemples and allocate memory*/
2501  if(line==0)
2502  {
2503  PAT.Npat[ifile] = np;
2504  ierr = AllocPatterns(ifile,np,nin,nout,0);
2505  if(ierr != 0) return ierr;
2506  *iexamples = 1;
2507  }
2508 
2509 /* now get exemple */
2510 
2511  line++;
2512  ll = (line-1)%2;
2513  ipat = (line-1)/2;
2514  /* printf("Loading event \t %d\r",ipat);*/
2515 /* if(ipat>NPMAX)
2516  {
2517  printf("Too many examples in file\n");
2518  printf("Loading %d examples\n",NPMAX);
2519  PAT.Npat[ifile] = NPMAX;
2520  break;
2521  }
2522 */
2523 
2524 /* allocate the number of lines */
2525 
2526  if(line==1)
2527  {
2528 
2529  nmax = nin;
2530  if(nout>nin) nmax=nout;
2531  ss = (char**) malloc((nmax+1)*sizeof(char*));
2532  if(ss == nullptr) return -111;
2533  for(i=0;i<=nmax;i++)
2534  {
2535  ss[i]=(char*) malloc(40*sizeof(char));
2536  if(ss[i] == nullptr) return -111;
2537  }
2538  }
2539 
2540  if(ll==0) /* inputs */
2541  {
2542  getnLexemes(nin,s,ss);
2543  for(i=0;i<nin;i++)
2544  {
2545  sscanf(ss[i],"%le",&p);
2546  PAT.Rin[ifile][ipat][i] = (type_pat) p;
2547  }
2548  }
2549  else /* answers */
2550  {
2551  ntot=nout+npon;
2552  getnLexemes(ntot,s,ss);
2553  for(i=0;i<ntot;i++)
2554  {
2555  sscanf(ss[i],"%le",&p);
2556  if(i<nout)
2557  {
2558  PAT.Rans[ifile][ipat][i] = (type_pat) p;
2559  }
2560  else
2561  {
2562  if(PAT.Iponde==1)
2563  {
2564  PAT.Pond[ifile][ipat] =
2565  (type_pat) p;
2566  }
2567  else
2568  {
2569  PAT.Pond[ifile][ipat] =
2570  (type_pat) PAT.Ponds[(int) p -1];
2571  }
2572  }
2573  }
2574  }
2575  }
2576  }
2577  printf("%d examples loaded \n\n",PAT.Npat[ifile]);
2578  fclose(LVQpat);
2579  return 0;
2580 }
2581 
2582 
2583 /* CountLexemes returns the number of lexemes in s separated by blanks.*/
2584 /* extern "C"Dllexport */
2585 int CountLexemes(char *s)
2586 {
2587  char tmp[1024];
2588  int i=0;
2589 
2590  strcpy(tmp,s);
2591  char* saveptr;
2592  if (strtok_r(tmp," ",&saveptr))
2593  {
2594  i=1;
2595  while (strtok_r(nullptr," ",&saveptr)) i++;
2596  }
2597  return i;
2598 }
2599 
2600 /* splits s in substrings ss separated by blanks*/
2601 /* extern "C"Dllexport */
2602 void getnLexemes(int n, char *s, char **ss)
2603 {
2604  char tmp[1024];
2605  int i;
2606  strcpy(tmp,s);
2607  if (n>0)
2608  {
2609  char* saveptr;
2610  strcpy(ss[0],strtok_r(tmp," ",&saveptr));
2611  for (i=1;i<n;i++)
2612  strcpy(ss[i],strtok_r(nullptr," ",&saveptr));
2613  }
2614 }
2615 
2616 /* extern "C"Dllexport */
2617 void getLexemes(char *s,char **ss)
2618 {
2619  char tmp[1024];
2620  int i,n;
2621 
2622  strcpy(tmp,s);
2623  n=CountLexemes(tmp);
2624  if (n>0)
2625  {
2626  char* saveptr;
2627  strcpy(ss[0],strtok_r(tmp," ",&saveptr));
2628  for (i=1;i<n;i++)
2629  strcpy(ss[i],strtok_r(nullptr," ",&saveptr));
2630  }
2631 }
2632 
2633 
2634 /***********************************************************/
2635 /* LearnFree */
2636 /* */
2637 /* frees memory allocated for learning */
2638 /* */
2639 /* Author: J.Schwindling 28-May-99 */
2640 /***********************************************************/
2641 
2642 /* extern "C"Dllexport */
2644 {
2645  int il,in;
2646  if(LearnMemory==0) return;
2647  LearnMemory = 0;
2648  for(il=0; il<NET.Nlayer; il++)
2649  {
2650  for(in=0; in<NET.Nneur[il]; in++)
2651  {
2652  free(dir[il][in]);
2653  }
2654  free(dir[il]);
2655  }
2656  free(dir);
2657  if(BFGSMemory==0) return;
2658  BFGSMemory = 0;
2659  for(il=0; il<NET.Nweights; il++)
2660  {
2661  free(BFGSH[il]);
2662  }
2663  free(BFGSH);
2664  free(Gamma);
2665  free(delta);
2666 
2667 /* if(JacobianMemory == 0) return;
2668  JacobianMemory = 0;
2669  for(il=0; il<PAT.Npat[0]; il++) free(JacobianMatrix[il]);
2670  free(JacobianMatrix); */
2671 }
2672 
2673 
2674 /***********************************************************/
2675 /* LearnAlloc */
2676 /* */
2677 /* memory allocation for vectors and matrices used in */
2678 /* conjugate gradients or BFGS like methods */
2679 /* */
2680 /* return value (int) = error code: 0 no error */
2681 /* -111 no memory */
2682 /* */
2683 /* Author: J.Schwindling 20-Apr-99 */
2684 /* Modified: J.Schwindling 31-Jan-2000 error code */
2685 /***********************************************************/
2686 
2687 /* extern "C"Dllexport */
2689 {
2690  int il,in,i;
2691  int Nweights = 0;
2692 
2693  if(LearnMemory != 0) LearnFree();
2694  LearnMemory = 1;
2695  dir = (dbl ***) malloc(NET.Nlayer*sizeof(dbl**));
2696  if(dir == nullptr) return -111;
2697 
2698  for(il=0; il<NET.Nlayer; il++)
2699  {
2700  dir[il] = (dbl **) malloc(NET.Nneur[il]*sizeof(dbl*));
2701  if(dir[il] == nullptr) return -111;
2702  for(in=0; in<NET.Nneur[il]; in++)
2703  {
2704  if(il==0)
2705  {
2706 /* TODO: understand implications of hard-coded 101 */
2707  dir[0][in] = (dbl *)
2708  malloc(101*sizeof(dbl));
2709  if(dir[0][in] == nullptr) return -111;
2710  }
2711  else
2712  {
2713  dir[il][in] = (dbl *)
2714  malloc((NET.Nneur[il-1]+1)*sizeof(dbl));
2715  if(dir[il][in] == nullptr) return -111;
2716  Nweights += NET.Nneur[il-1]+1;
2717  }
2718  }
2719  }
2720  NET.Nweights = Nweights;
2721 
2722  if(BFGSMemory==0 && LEARN.Meth>= 6)
2723  {
2724  BFGSMemory = 1;
2725  Gamma = (dbl*) malloc(Nweights*sizeof(dbl));
2726  delta = (dbl*) malloc(Nweights*sizeof(dbl));
2727  BFGSH = (dbl**) malloc(Nweights*sizeof(dbl*));
2728  if(Gamma == nullptr || delta == nullptr || BFGSH == nullptr)
2729  return -111;
2730 
2731  for(i=0; i<Nweights; i++)
2732  {
2733  BFGSH[i] = (dbl*) malloc(Nweights*sizeof(dbl));
2734  if(BFGSH[i] == nullptr) return -111;
2735  }
2736  }
2737 
2738 /* if(JacobianMemory==0)
2739  {
2740  JacobianMemory = 1;
2741  printf("JacobianMemory = %d\n",JacobianMemory);
2742  JacobianMatrix = (dbl **) malloc(PAT.Npat[0]*sizeof(dbl *));
2743  for(i=0; i<PAT.Npat[0]; i++)
2744  JacobianMatrix[i] =
2745  (dbl*) malloc(Nweights*sizeof(dbl));
2746  printf("end memory alloc\n");
2747  }
2748 
2749  if(DIVERS.Ihess==1) HessianAlloc(Nweights);*/
2750 
2751  return 0;
2752 }
2753 
2754 
2755 /***********************************************************/
2756 /* MLP_PrFFun */
2757 /* */
2758 /* writes the MLP function to file as a fortran function */
2759 /* */
2760 /* inputs : char *filename = name of the output file */
2761 /* */
2762 /* return value (int) = 0: no error */
2763 /* -1: could not open file */
2764 /* */
2765 /* Author: J.Schwindling 20-Apr-99 */
2766 /* Modified: J.Schwindling 05-May-99 */
2767 /* add normalization of inputs */
2768 /* J.Schwindling 30-Nov-99 return code */
2769 /***********************************************************/
2770 
2771 /* extern "C"Dllexport */
2773 {
2774  int il,in,jn;
2775  FILE *W;
2776 
2777  W=fopen(filename,"w");
2778  if(W==nullptr) return -1;
2779  fprintf(W," SUBROUTINE RNNFUN(rin,rout)\n");
2780  fprintf(W," DIMENSION RIN(%d)\n",NET.Nneur[0]);
2781  fprintf(W," DIMENSION ROUT(%d)\n",NET.Nneur[NET.Nlayer-1]);
2782  fprintf(W,"C\n");
2783 
2784  for(in=0; in<NET.Nneur[0]; in++)
2785  {
2786  if(DIVERS.Norm==0)
2787  {
2788  fprintf(W," OUT%d = RIN(%d)\n",in+1,in+1);
2789  }
2790  else
2791  {
2792  fprintf(W," OUT%d = (RIN(%d)-%e)/%e\n",in+1,in+1,
2793  STAT.mean[in],STAT.sigma[in]);
2794  }
2795  }
2796  for(il=1; il<NET.Nlayer-1; il++)
2797  {
2798  fprintf(W,"C\n");
2799  fprintf(W,"C layer %d\n",il+1);
2800  for(in=0; in<NET.Nneur[il]; in++)
2801  {
2802  fprintf(W," RIN%d = %e\n",in+1,
2803  (double) NET.Weights[il][in][0]);
2804  for(jn=1;jn<=NET.Nneur[il-1]; jn++)
2805  fprintf(W," > +(%e) * OUT%d\n",
2806  (double) NET.Weights[il][in][jn],jn);
2807  }
2808  fprintf(W,"C\n");
2809  for(in=0; in<NET.Nneur[il]; in++)
2810  {
2811  if(NET.T_func[il][in]==0)
2812  {
2813  fprintf(W," OUT%d = 0\n",in+1);
2814  }
2815  else if(NET.T_func[il][in]==1)
2816  {
2817  fprintf(W," OUT%d = RIN%d\n",in+1,in+1);
2818  }
2819  else if(NET.T_func[il][in]==2)
2820  {
2821  fprintf(W," OUT%d = SIGMOID(RIN%d)\n",
2822  in+1,in+1);
2823  }
2824  }
2825  }
2826  il = NET.Nlayer-1;
2827  fprintf(W,"C\n");
2828  fprintf(W,"C layer %d\n",il+1);
2829  for(in=0; in<NET.Nneur[il]; in++)
2830  {
2831  fprintf(W," RIN%d = %e\n",in+1,
2832  (double) NET.Weights[il][in][0]);
2833  for(jn=1;jn<=NET.Nneur[il-1]; jn++)
2834  fprintf(W," > +(%e) * OUT%d\n",
2835  (double) NET.Weights[il][in][jn],jn);
2836  }
2837  fprintf(W,"C\n");
2838  for(in=0; in<NET.Nneur[il]; in++)
2839  {
2840  if(NET.T_func[il][in]==0)
2841  {
2842  fprintf(W," ROUT(%d) = 0\n",in+1);
2843  }
2844  else if(NET.T_func[il][in]==1)
2845  {
2846  fprintf(W," ROUT(%d) = RIN%d\n",in+1,in+1);
2847  }
2848  else if(NET.T_func[il][in]==2)
2849  {
2850  fprintf(W," ROUT(%d) = SIGMOID(RIN%d)\n",
2851  in+1,in+1);
2852  }
2853  }
2854 
2855  fprintf(W,"C\n");
2856  fprintf(W," END\n");
2857  fprintf(W," REAL FUNCTION SIGMOID(X)\n");
2858  fprintf(W," SIGMOID = 1./(1.+EXP(-X))\n");
2859  fprintf(W," END\n");
2860 
2861  fclose(W);
2862  return 0;
2863 }
2864 
2865 
2866 /***********************************************************/
2867 /* MLP_PrCFun */
2868 /* */
2869 /* writes the MLP function to file as a C function */
2870 /* */
2871 /* inputs : char *filename = name of the output file */
2872 /* */
2873 /* return value (int) = 0: no error */
2874 /* -1: could not open file */
2875 /* */
2876 /* Author: J.Schwindling 23-Apr-99 */
2877 /* Modified: J.Schwindling 30-Nov-99 return code */
2878 /***********************************************************/
2879 
2880 /* extern "C"Dllexport */
2882 {
2883  int il,in,jn;
2884  FILE *W;
2885 
2886  W=fopen(filename,"w");
2887  if(W==nullptr) return -1;
2888 
2889  fprintf(W,"double sigmoid(double x)\n");
2890  fprintf(W,"{\n");
2891  fprintf(W,"return 1/(1+exp(-x));\n");
2892  fprintf(W,"}\n");
2893  fprintf(W,"void rnnfun(double *rin,double *rout)\n");
2894  fprintf(W,"{\n");
2895  fprintf(W," double out1[%d];\n",NET.Nneur[0]);
2896  fprintf(W," double out2[%d];\n",NET.Nneur[1]);
2897  if(NET.Nlayer>=3) fprintf(W," double out3[%d];\n",NET.Nneur[2]);
2898  if(NET.Nlayer>=4) fprintf(W," double out4[%d];\n",NET.Nneur[3]);
2899  fprintf(W,"\n");
2900 
2901  for(in=0; in<NET.Nneur[0]; in++)
2902  {
2903  if(DIVERS.Norm==0)
2904  {
2905  fprintf(W," out1[%d] = rin[%d];\n",in,in);
2906  }
2907  else
2908  {
2909  fprintf(W," out1[%d] = (rin[%d]-%e)/%e;\n",
2910  in,in,
2911  STAT.mean[in],STAT.sigma[in]);
2912  }
2913  }
2914 
2915  for(il=1; il<=NET.Nlayer-1; il++)
2916  {
2917  fprintf(W,"\n");
2918  fprintf(W,"/* layer %d */\n",il+1);
2919  for(in=0; in<NET.Nneur[il]; in++)
2920  {
2921  fprintf(W," out%d[%d] = %e\n",il+1,in,
2922  (double) NET.Weights[il][in][0]);
2923  for(jn=1;jn<=NET.Nneur[il-1]; jn++)
2924  fprintf(W," +(%e) * out%d[%d]\n",
2925  (double) NET.Weights[il][in][jn],il,jn-1);
2926  fprintf(W," ;\n");
2927  }
2928  fprintf(W,"\n");
2929  for(in=0; in<NET.Nneur[il]; in++)
2930  {
2931  if(NET.T_func[il][in]==0)
2932  {
2933  fprintf(W," out%d[%d] = 0;\n",il+1,in);
2934  }
2935  else if(NET.T_func[il][in]==1)
2936  {
2937  }
2938  else if(NET.T_func[il][in]==2)
2939  {
2940  fprintf(W," out%d[%d] = sigmoid(out%d[%d]);\n",
2941  il+1,in,il+1,in);
2942  }
2943  }
2944  }
2945  il = NET.Nlayer-1;
2946  for(in=0; in<NET.Nneur[il]; in++)
2947  {
2948  fprintf(W," rout[%d] = out%d[%d];\n",in,il+1,in);
2949  }
2950  fprintf(W,"}\n");
2951  fclose(W);
2952  return 0;
2953 }
2954 
2955 
2956 /***********************************************************/
2957 /* SaveWeights */
2958 /* */
2959 /* writes the weights to file */
2960 /* */
2961 /* inputs : char *filename = name of the output file */
2962 /* int iepoch = epoch number */
2963 /* */
2964 /* return value (int): 0 if OK */
2965 /* -1 if file could not be opened */
2966 /* */
2967 /* Author: J.Schwindling 20-Apr-99 */
2968 /* Modified: J.Schwindling 11-Jun-99 */
2969 /* print net structure in header */
2970 /* Modified: J.Schwindling 05-Nov-99 */
2971 /* return error code */
2972 /***********************************************************/
2973 
2974 /* extern "C"Dllexport */
2975 int SaveWeights(char *filename, int iepoch)
2976 {
2977  FILE *W;
2978  int ilayer,ineur,i;
2979 
2980  W=fopen(filename,"w");
2981  if(W==nullptr) return -1;
2982 
2983  fprintf(W,"# network structure ");
2984  for(ilayer=0; ilayer<NET.Nlayer; ilayer++)
2985  {
2986  fprintf(W,"%d ",NET.Nneur[ilayer]);
2987  }
2988 
2989  fprintf(W,"\n %d\n",iepoch);
2990  for(ilayer=1; ilayer<NET.Nlayer; ilayer++)
2991  {
2992  for(ineur=0; ineur<NET.Nneur[ilayer]; ineur++)
2993  {
2994  for(i=0; i<=NET.Nneur[ilayer-1]; i++)
2995  {
2996  fprintf(W," %1.15e\n",
2997  (double) NET.Weights[ilayer][ineur][i]);
2998  }
2999  }
3000  }
3001  fclose(W);
3002  return 0;
3003 }
3004 
3005 
3006 /***********************************************************/
3007 /* LoadWeights */
3008 /* */
3009 /* reads the weights from file */
3010 /* */
3011 /* input : char *filename = name of the input file */
3012 /* output : int *iepoch = epoch number */
3013 /* */
3014 /* return value (int): 0 if OK */
3015 /* -1 if file could not be opened */
3016 /* */
3017 /* Author: J.Schwindling 20-Apr-99 */
3018 /* Modified: J.Schwindling 11-Jun-99 */
3019 /* lines starting with # are skipped */
3020 /* Modified: J.Schwindling 05-Nov-99 */
3021 /* return error code */
3022 /***********************************************************/
3023 
3024 /* extern "C"Dllexport */
3025 int LoadWeights(char *filename, int *iepoch)
3026 {
3027  FILE *W;
3028  int ilayer,ineur,i;
3029  double p;
3030  char s[80];
3031 
3032  W=fopen(filename,"r");
3033  if(W==nullptr) return -1;
3034  do
3035  {
3036  fgets(s,80,W);
3037  }
3038  while(*s == '#');
3039  sscanf(s," %d",iepoch);
3040  for(ilayer=1; ilayer<NET.Nlayer; ilayer++)
3041  {
3042  for(ineur=0; ineur<NET.Nneur[ilayer]; ineur++)
3043  {
3044  for(i=0; i<=NET.Nneur[ilayer-1]; i++)
3045  {
3046  fscanf(W," %le",&p);
3047  NET.Weights[ilayer][ineur][i] = (dbl) p;
3048  }
3049  }
3050  }
3051 
3052  fclose(W);
3053  return 0;
3054 }
3055 
3056 
3057 /***********************************************************/
3058 /* AllocPatterns */
3059 /* */
3060 /* allocate memory for the examples */
3061 /* */
3062 /* input : int ifile = file number (0 or 1) */
3063 /* int npat = number of examples */
3064 /* int nin = number of inputs */
3065 /* int nout = number of outputs */
3066 /* int iadd = 0: new examples */
3067 /* 1: add to existing ones */
3068 /* */
3069 /* return value (int) = error code: 0 = no error */
3070 /* 1 = wrong file number */
3071 /* -111 = no memory */
3072 /* */
3073 /* Author: J.Schwindling 21-Apr-99 */
3074 /* Modified: J.Schwindling 26-Apr-99 */
3075 /* - frees memory if already booked and iadd=0 */
3076 /* (should remove need to call mlpfree) */
3077 /* - implement iadd = 1 */
3078 /***********************************************************/
3079 
3080 /* extern "C"Dllexport */int AllocPatterns(int ifile, int npat, int nin, int nout, int iadd)
3081 {
3082  int j;
3083  type_pat *tmp, *tmp3;
3084  type_pat **tmp2;
3085  int ntot;
3086 
3087  if(ifile>1 || ifile<0) return(1);
3088 /* scanf("%d",&j); */
3089  if(ExamplesMemory==0)
3090  {
3091  ExamplesMemory=1;
3092  PAT.Pond = (type_pat **) malloc(2*sizeof(dbl*));
3093  PAT.Rin = (type_pat***) malloc(2*sizeof(type_pat**));
3094  PAT.Rans = (type_pat***) malloc(2*sizeof(type_pat**));
3095  PAT.vRin = (type_pat**) malloc(2*sizeof(type_pat*));
3096  if(PAT.Pond == nullptr || PAT.Rin == nullptr
3097  || PAT.Rans == nullptr || PAT.vRin == nullptr) return -111;
3098  }
3099 
3100 
3101 /* if iadd=0, check that memory not already allocated. Otherwise free it */
3102  if(iadd==0 && PatMemory[ifile]!=0)
3103  {
3104  FreePatterns(ifile);
3105  }
3106 
3107 /* allocate memory and initialize ponderations */
3108  if(iadd==0 || PatMemory[ifile]==0)
3109  {
3110  PatMemory[ifile] = 1;
3111  PAT.Pond[ifile] = (type_pat*) malloc(npat*sizeof(type_pat));
3112  if(PAT.Pond[ifile] == nullptr) return -111;
3113  for(j=0; j<npat; j++)
3114  PAT.Pond[ifile][j] = 1;
3115 
3116  PAT.Rin[ifile] = (type_pat**) malloc(npat*sizeof(type_pat*));
3117  if(PAT.Rin[ifile] == nullptr) return -111;
3118  PAT.Rans[ifile] = (type_pat**) malloc(npat*sizeof(type_pat*));
3119  if(PAT.Rans[ifile] == nullptr) return -111;
3120 
3121  PAT.vRin[ifile] = (type_pat *) malloc(npat*(nin+1)*
3122  sizeof(type_pat));
3123  if(PAT.vRin[ifile] == nullptr) return -111;
3124 
3125  for(j=0; j<npat; j++)
3126  {
3127  PAT.Rin[ifile][j] = &(PAT.vRin[ifile][j*(nin+1)+1]);
3128  PAT.vRin[ifile][j*(nin+1)] = 1;
3129  }
3130  for(j=0; j<npat; j++)
3131  {
3132  PAT.Rans[ifile][j] = (type_pat*) malloc(nout*sizeof(type_pat));
3133  if(PAT.Rans[ifile][j] == nullptr) return -111;
3134  }
3135  PAT.Npat[ifile] = npat;
3136 
3137  if(ifile==0)
3138  {
3139  ExamplesIndex = (int *) malloc(npat*sizeof(int));
3140  if(ExamplesIndex == nullptr) return -111;
3141  for(j=0; j<npat; j++) ExamplesIndex[j] = j;
3142  }
3143  }
3144  else /* add examples */
3145  {
3146  ntot = PAT.Npat[ifile]+npat;
3147 
3148 /* event weighting */
3149  tmp = (type_pat *) malloc(ntot*sizeof(type_pat));
3150  if(tmp == nullptr) return -111;
3151 
3152  for(j=0; j<PAT.Npat[ifile]; j++)
3153  {
3154  tmp[j] = PAT.Pond[ifile][j];
3155  }
3156  for(j=PAT.Npat[ifile];j<ntot;j++)
3157  {
3158  tmp[j] = 1;
3159  }
3160  if(PatMemory[ifile]==1) free(PAT.Pond[ifile]);
3161  PAT.Pond[ifile] = tmp;
3162 
3163 /* examples */
3164 /* tmp2 = (type_pat **) malloc(ntot*sizeof(type_pat*));
3165  for(j=0; j<PAT.Npat[ifile]; j++)
3166  {
3167  tmp2[j] = PAT.Rin[ifile][j];
3168  }
3169  for(j=PAT.Npat[ifile];j<ntot;j++)
3170  {
3171  tmp2[j] = (type_pat*) malloc(nin*sizeof(type_pat));
3172  }
3173  if(PatMemory[ifile]==1) free(PAT.Rin[ifile]);
3174  PAT.Rin[ifile] = tmp2; */
3175 
3176  tmp3 = (type_pat *) malloc(ntot*(nin+1)*sizeof(type_pat));
3177  if(tmp3 == nullptr) return -111;
3178 
3179  for(j=0; j<PAT.Npat[ifile]*(nin+1); j++)
3180  {
3181  tmp3[j] = PAT.vRin[ifile][j];
3182  }
3183  if(PatMemory[ifile]==1) free(PAT.vRin[ifile]);
3184  PAT.vRin[ifile] = tmp3;
3185  for(j=0; j<ntot; j++)
3186  {
3187  PAT.Rin[ifile][j] = &(PAT.vRin[ifile][j*(nin+1)+1]);
3188  PAT.vRin[ifile][j*(nin+1)] = 1;
3189  }
3190 
3191  tmp2 = (type_pat **) malloc(ntot*sizeof(type_pat*));
3192  if(tmp2 == nullptr) return -111;
3193  for(j=0; j<PAT.Npat[ifile]; j++)
3194  {
3195  tmp2[j] = PAT.Rans[ifile][j];
3196  }
3197  for(j=PAT.Npat[ifile];j<ntot;j++)
3198  {
3199  tmp2[j] = (type_pat*) malloc(nout*sizeof(type_pat));
3200  if(tmp2[j] == nullptr) return -111;
3201  }
3202  if(PatMemory[ifile]==1) free(PAT.Rans[ifile]);
3203  PAT.Rans[ifile] = tmp2;
3204  PAT.Npat[ifile] = ntot;
3205  PatMemory[ifile] = 1;
3206 
3207 /* indices */
3208  if(ifile==0)
3209  {
3210  free(ExamplesIndex);
3211  ExamplesIndex = (int *) malloc(ntot*sizeof(int));
3212  if(ExamplesIndex == nullptr) return -111;
3213  for(j=0; j<ntot; j++) ExamplesIndex[j] = j;
3214  }
3215  }
3216 
3217  return 0;
3218 }
3219 
3220 
3221 /***********************************************************/
3222 /* FreePatterns */
3223 /* */
3224 /* frees memory for the examples */
3225 /* */
3226 /* input : int ifile = file number (0 or 1) */
3227 /* */
3228 /* return value (int) = error code: 0 = no error */
3229 /* 1 = wrong file number */
3230 /* 2 = no mem allocated */
3231 /* */
3232 /* Author: J.Schwindling 26-Apr-99 */
3233 /***********************************************************/
3234 
3235 /* extern "C"Dllexport */int FreePatterns(int ifile)
3236 {
3237  int i;
3238 
3239  if(ifile>1 || ifile<0) return 1;
3240 /* printf("%d %d \n",ifile,PatMemory[ifile]);*/
3241  if(PatMemory[ifile]==0) return 2;
3242 
3243  free(PAT.Pond[ifile]);
3244  for(i=0; i<PAT.Npat[ifile]; i++)
3245  {
3246 /* free(PAT.Rin[ifile][i]); */
3247  free(PAT.Rans[ifile][i]);
3248  }
3249  free(PAT.Rin[ifile]);
3250  free(PAT.Rans[ifile]);
3251  free(PAT.vRin[ifile]);
3252  PatMemory[ifile] = 0;
3253  PAT.Npat[ifile] = 0;
3254 
3255  return 0;
3256 }
3257 
3258 
3259 /***********************************************************/
3260 /* MLP_StatInputs */
3261 /* */
3262 /* compute statistics about the inputs: mean, RMS, min, max*/
3263 /* */
3264 /* inputs: int Nexamples = number of examples */
3265 /* int Niputs = number of quantities */
3266 /* type_pat **inputs = input values */
3267 /* */
3268 /* outputs: dbl *mean = mean value */
3269 /* dbl *sigma = RMS */
3270 /* dbl *minimum = minimum */
3271 /* dbl *maximum = maximum */
3272 /* */
3273 /* return value (int): always = 0 */
3274 /* */
3275 /* Author: J.Schwindling 11-Oct-99 */
3276 /***********************************************************/
3277 
3278 int MLP_StatInputs(int Nexamples, int Ninputs, type_pat **inputs,
3279  dbl *mean, dbl *sigma, dbl *minimum, dbl *maximum)
3280 {
3281  dbl *fmean;
3282  int j, ipat, nmax;
3283 
3284 /* first compute a fast rough mean using the first 100 events */
3285  fmean = (dbl*) malloc(Ninputs*sizeof(dbl));
3286  nmax = 100;
3287  if(Nexamples<100) nmax=Nexamples;
3288 
3289  for(j=0;j<Ninputs;j++)
3290  {
3291  fmean[j] = 0;
3292  for(ipat=0;ipat<nmax;ipat++)
3293  {
3294  fmean[j] += (dbl) inputs[ipat][j];
3295  }
3296  fmean[j] = fmean[j]/(dbl) nmax;
3297 
3298 /* now compute real mean and sigma, min and max */
3299  mean[j] = 0;
3300  sigma[j] = 0;
3301  minimum[j] = 99999;
3302  maximum[j] = -99999;
3303  for(ipat=0;ipat<Nexamples;ipat++)
3304  {
3305  mean[j] += (dbl) inputs[ipat][j];
3306  sigma[j] += ((dbl) inputs[ipat][j]-fmean[j])*
3307  ((dbl) inputs[ipat][j]-fmean[j]);
3308  if((dbl) inputs[ipat][j] > maximum[j])
3309  maximum[j]=(dbl) inputs[ipat][j];
3310  if((dbl) inputs[ipat][j] < minimum[j])
3311  minimum[j]=(dbl) inputs[ipat][j];
3312  }
3313  mean[j] = mean[j]/(dbl) Nexamples;
3314  sigma[j] = sqrt(sigma[j]/ (dbl) Nexamples -
3315  (mean[j]-fmean[j])*
3316  (mean[j]-fmean[j]));
3317  }
3318  free(fmean);
3319  return 0;
3320 }
3321 
3322 /***********************************************************/
3323 /* MLP_PrintInputStat */
3324 /* */
3325 /* prints statistics about the inputs: mean, RMS, min, max */
3326 /* */
3327 /* return value (int) = error code: 0 = OK */
3328 /* -111 = could not */
3329 /* allocate memory */
3330 /* */
3331 /* Author: J.Schwindling 11-Oct-99 */
3332 /* Modified: J.Schwindling 31-Jan-2000: return value */
3333 /***********************************************************/
3334 
3336 {
3337  int j;
3338  dbl *mean, *sigma, *minimum, *maximum;
3339 
3340 /* allocate memory */
3341  mean = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3342  sigma = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3343  minimum = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3344  maximum = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3345  int returnCode = -111; // to return if any malloc failed
3346 
3347  if(mean && sigma && minimum && maximum) {
3348 
3349  MLP_StatInputs(PAT.Npat[0],NET.Nneur[0],PAT.Rin[0],mean,sigma,minimum,maximum);
3350 
3351  printf("\t mean \t\t RMS \t\t min \t\t max\n");
3352  for(j=0;j<NET.Nneur[0];j++)
3353  {
3354  printf("var%d \t %e \t %e \t %e \t %e\n",j+1,
3355  mean[j],sigma[j],minimum[j],maximum[j]);
3356  }
3357  returnCode = 0; // everything went fine
3358  }
3359 
3360  free(mean);
3361  free(sigma);
3362  free(minimum);
3363  free(maximum);
3364  printf("\n");
3365  return returnCode;
3366 }
3367 
3368 
3369 /***********************************************************/
3370 /* NormalizeInputs */
3371 /* */
3372 /* normalize the inputs: I' = (I - <I>) / RMS(I) */
3373 /* */
3374 /* return value (int) = error code: 0 = OK */
3375 /* -111 = could not */
3376 /* allocate memory */
3377 /* */
3378 /* Author: J.Schwindling 04-May-1999 */
3379 /* Modified: J.Schwindling 31-Jan-2000: return value */
3380 /***********************************************************/
3381 
3382 /* extern "C"Dllexport */int NormalizeInputs()
3383 {
3384  int j, ipat;
3385  dbl *mean, *sigma, *minimum, *maximum;
3386 
3387 /* allocate memory */
3388  mean = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3389  sigma = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3390  STAT.mean = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3391  STAT.sigma = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3392  minimum = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3393  maximum = (dbl *) malloc(NET.Nneur[0]*sizeof(dbl));
3394  int returnCode = -111; // to return if any malloc failed
3395 
3396  if(mean && sigma && minimum && maximum && STAT.mean && STAT.sigma) {
3397 
3398  MLP_StatInputs(PAT.Npat[0],NET.Nneur[0],PAT.Rin[0],mean,sigma,minimum,maximum);
3399 
3400  if(NET.Debug>=1) printf("\t mean \t\t RMS \t\t min \t\t max\n");
3401  for(j=0;j<NET.Nneur[0];j++)
3402  {
3403  if(NET.Debug>=1)
3404  printf("var%d \t %e \t %e \t %e \t %e\n",j+1,
3405  mean[j],sigma[j],minimum[j],maximum[j]);
3406 
3407  /* store mean and sigma for output function */
3408  STAT.mean[j] = mean[j];
3409  STAT.sigma[j] = sigma[j];
3410 
3411  /* finally apply the normalization */
3412  for(ipat=0;ipat<PAT.Npat[0];ipat++)
3413  {
3414  PAT.Rin[0][ipat][j] =
3415  (PAT.Rin[0][ipat][j]-(float) mean[j])/
3416  (float) sigma[j];
3417  }
3418  for(ipat=0;ipat<PAT.Npat[1];ipat++)
3419  {
3420  PAT.Rin[1][ipat][j] =
3421  (PAT.Rin[1][ipat][j]-(float) mean[j])/
3422  (float) sigma[j];
3423  }
3424  }
3425  returnCode = 0; // everything went fine
3426  }
3427 
3428  free(mean);
3429  free(sigma);
3430  free(minimum);
3431  free(maximum);
3432  if(NET.Debug>=1) printf("\n");
3433  return returnCode;
3434 }
3435 
3436 
3437 /***********************************************************/
3438 /* AllocNetwork */
3439 /* */
3440 /* memory allocation for weights, etc */
3441 /* */
3442 /* inputs: int Nlayer: number of layers */
3443 /* int *Neurons: nulber of neurons per layer */
3444 /* */
3445 /* return value (int): error = 0: no error */
3446 /* = -111: could not allocate mem*/
3447 /* */
3448 /* Author: J.Schwindling 28-Sep-99 */
3449 /***********************************************************/
3450 
3451 int AllocNetwork(int Nlayer, int *Neurons)
3452 {
3453  int i, j, k, l;
3454 
3455  if(NetMemory != 0) FreeNetwork();
3456  NetMemory = 1;
3457 
3458  NET.Nneur = (int *) malloc(Nlayer*sizeof(int));
3459  if(NET.Nneur == nullptr) return -111;
3460 
3461  NET.T_func = (int **) malloc(Nlayer*sizeof(int *));
3462  NET.Deriv1 = (dbl **) malloc(Nlayer*sizeof(dbl *));
3463  NET.Inn = (dbl **) malloc(Nlayer*sizeof(dbl *));
3464  NET.Outn = (dbl **) malloc(Nlayer*sizeof(dbl *));
3465  NET.Delta = (dbl **) malloc(Nlayer*sizeof(dbl *));
3466  if(NET.T_func == nullptr || NET.Deriv1 == nullptr
3467  || NET.Inn == nullptr || NET.Outn == nullptr
3468  || NET.Delta == nullptr) return -111;
3469 
3470  for(i=0; i<Nlayer; i++)
3471  {
3472  NET.T_func[i] = (int *) malloc(Neurons[i]*sizeof(int));
3473  NET.Deriv1[i] = (dbl *) malloc(Neurons[i]*sizeof(dbl));
3474  NET.Inn[i] = (dbl *) malloc(Neurons[i]*sizeof(dbl));
3475  NET.Outn[i] = (dbl *) malloc(Neurons[i]*sizeof(dbl));
3476  NET.Delta[i] = (dbl *) malloc(Neurons[i]*sizeof(dbl));
3477  if(NET.T_func[i] == nullptr || NET.Deriv1[i] == nullptr
3478  || NET.Inn[i] == nullptr || NET.Outn[i] == nullptr
3479  || NET.Delta[i] ==nullptr ) return -111;
3480  }
3481 
3482  NET.Weights = (dbl ***) malloc(Nlayer*sizeof(dbl **));
3483  NET.vWeights = (dbl **) malloc(Nlayer*sizeof(dbl *));
3484  LEARN.Odw = (dbl ***) malloc(Nlayer*sizeof(dbl **));
3485  LEARN.ODeDw = (dbl ***) malloc(Nlayer*sizeof(dbl **));
3486  LEARN.DeDw = (dbl ***) malloc(Nlayer*sizeof(dbl **));
3487  if(NET.Weights == nullptr || NET.vWeights == nullptr
3488  || LEARN.Odw == nullptr || LEARN.ODeDw == nullptr
3489  || LEARN.DeDw == nullptr) return -111;
3490 
3491  for(i=1; i<Nlayer; i++)
3492  {
3493  k = Neurons[i-1]+1;
3494  NET.vWeights[i] = (dbl *) malloc(k * Neurons[i] *
3495  sizeof(dbl));
3496  NET.Weights[i] = (dbl **) malloc(Neurons[i]*sizeof(dbl *));
3497  LEARN.Odw[i] = (dbl **) malloc(Neurons[i]*sizeof(dbl *));
3498  LEARN.ODeDw[i] = (dbl **) malloc(Neurons[i]*sizeof(dbl *));
3499  LEARN.DeDw[i] = (dbl **) malloc(Neurons[i]*sizeof(dbl *));
3500  if(NET.Weights[i] == nullptr || NET.vWeights[i] == nullptr
3501  || LEARN.Odw[i] == nullptr || LEARN.ODeDw[i] == nullptr
3502  || LEARN.DeDw[i] == nullptr) return -111;
3503 
3504  for(j=0; j<Neurons[i]; j++)
3505  {
3506  NET.Weights[i][j] = &(NET.vWeights[i][j*k]);
3507  LEARN.Odw[i][j] = (dbl *) malloc(k*sizeof(dbl));
3508  LEARN.ODeDw[i][j] = (dbl *) malloc(k*sizeof(dbl));
3509  LEARN.DeDw[i][j] = (dbl *) malloc(k*sizeof(dbl));
3510  if(LEARN.Odw[i][j] == nullptr
3511  || LEARN.ODeDw[i][j] == nullptr
3512  || LEARN.DeDw[i][j] == nullptr) return -111;
3513 
3514  for(l=0; l<k; l++)
3515  {
3516  LEARN.Odw[i][j][l] = 0;
3517  LEARN.ODeDw[i][j][l] = 0;
3518  }
3519  }
3520  }
3521  return 0;
3522 }
3523 
3524 
3525 /***********************************************************/
3526 /* FreeNetwork */
3527 /* */
3528 /* frees the memory allocated for the network */
3529 /* */
3530 /* Author: J.Schwindling 06-Oct-99 */
3531 /***********************************************************/
3532 
3534 {
3535  int i, j;
3536  for(i=1; i<NET.Nlayer; i++)
3537  {
3538  for(j=0; j<NET.Nneur[i]; j++)
3539  {
3540 /* free(NET.Weights[i][j]); */
3541  free(LEARN.Odw[i][j]);
3542  free(LEARN.ODeDw[i][j]);
3543  free(LEARN.DeDw[i][j]);
3544  }
3545  free(NET.vWeights[i]);
3546  free(NET.Weights[i]);
3547  free(LEARN.Odw[i]);
3548  free(LEARN.ODeDw[i]);
3549  free(LEARN.DeDw[i]);
3550  }
3551  free(NET.Weights);
3552  free(LEARN.Odw);
3553  free(LEARN.ODeDw);
3554  free(LEARN.DeDw);
3555 
3556  free(NET.Nneur);
3557 
3558  for(i=0; i<NET.Nlayer; i++)
3559  {
3560  free(NET.T_func[i]);
3561  free(NET.Deriv1[i]);
3562  free(NET.Inn[i]);
3563  free(NET.Outn[i]);
3564  free(NET.Delta[i]);
3565  }
3566  free(NET.T_func);
3567  free(NET.Deriv1);
3568  free(NET.Inn);
3569  free(NET.Outn);
3570  free(NET.Delta);
3571 
3572  NetMemory = 0;
3573 }
3574 
3575 /***********************************************************/
3576 /* GetNetStructure */
3577 /* */
3578 /* given a strinng like "3,4,1" returns the network */
3579 /* structure */
3580 /* */
3581 /* inputs: char *s: input string */
3582 /* */
3583 /* outputs: int *Nlayer: number of layers */
3584 /* int *Nneur: number of neurons per layer */
3585 /* */
3586 /* return value (int): error = 0: no error */
3587 /* = -1: s is empty */
3588 /* = -2: s is too long */
3589 /* = -3: too many layers */
3590 /* */
3591 /* Author: J.Schwindling 04-Oct-99 */
3592 /***********************************************************/
3593 
3594 int GetNetStructure(char *s, int *Nlayer, int *Nneur)
3595 {
3596  int i=0;
3597  char tmp[1024];
3598 
3599  if(strlen(s)==0) return -1;
3600  if(strlen(s)>1024) return -2;
3601 
3602  strcpy(tmp,s);
3603  char* saveptr;
3604  if (strtok_r(tmp,",",&saveptr))
3605  {
3606  i=1;
3607  while (strtok_r(nullptr,",",&saveptr)) i++;
3608  }
3609  *Nlayer = i;
3610  if(i > NLMAX) return -3;
3611 
3612  strcpy(tmp,s);
3613  if (*Nlayer>0)
3614  {
3615  sscanf(strtok_r(tmp,",",&saveptr),"%d",&(Nneur[0]));
3616  for (i=1;i<*Nlayer;i++)
3617  sscanf(strtok_r(nullptr,",",&saveptr),"%d",&(Nneur[i]));
3618  }
3619 
3620  return 0;
3621 }
3622 
3623 
3624 /***********************************************************/
3625 /* MLP_SetNet */
3626 /* */
3627 /* to set the structure of a neural network */
3628 /* inputs: int *nl = number of layers */
3629 /* int *nn = number of neurons */
3630 /* */
3631 /* return value (int) = error value: */
3632 /* 0: no error */
3633 /* 1: N layers > NLMAX */
3634 /* 2: N layers < 2 */
3635 /* -111: Error allocating memory */
3636 /* */
3637 /* Author: J.Schwindling 14-Apr-99 */
3638 /* Modified: J.Schwindling 05-Oct-99 allocate memory */
3639 /* Modified: J.Schwindling 29-Nov-99 LearnFree, LearnAlloc */
3640 /***********************************************************/
3641 
3642 int MLP_SetNet(int *nl, int *nn)
3643 {
3644  int il,ierr;
3645 
3646  if((*nl)>NLMAX) return(1);
3647  if((*nl)<2) return(2);
3648 
3649 /* LearnFree(); */
3650 /* allocate memory */
3651  ierr = AllocNetwork(*nl,nn);
3652  if(ierr != 0) return ierr;
3653 
3654 /* set number of layers */
3655  NET.Nlayer = (int) *nl;
3656 
3657 /* set number of neurons */
3658  for(il=0; il<NET.Nlayer; il++) {
3659  NET.Nneur[il] = nn[il];
3660  }
3661 
3662 /* set transfer functions */
3663  SetDefaultFuncs();
3664 /* LearnAlloc(); */
3665 
3666  return(0);
3667 }
3668 
3669 
3670 /***********************************************************/
3671 /* MLP_MatrixVectorBias */
3672 /* */
3673 /* computes a Matrix-Vector product */
3674 /* r[j] = M[j][0] + Sum_i M[j][i] v[i] */
3675 /* */
3676 /* inputs: dbl *M = matrix (n lines, m+1 columns) */
3677 /* dbl *v = vector (dimension m) */
3678 /* dbl *r = resulting vector (dimension n) */
3679 /* int n */
3680 /* int m */
3681 /* */
3682 /* Author: J.Schwindling 24-Jan-00 */
3683 /***********************************************************/
3684 
3685 void MLP_MatrixVectorBias(dbl *M, dbl *v, dbl *r, int n, int m)
3686 {
3687  int i,j;
3688  dbl a1, a2, a3, a4, c, d;
3689  dbl *pM1 = M;
3690  dbl *pM2 = &(M[m+1]);
3691  dbl *pM3 = &(M[2*(m+1)]);
3692  dbl *pM4 = &(M[3*(m+1)]);
3693  dbl *pr = r;
3694  int mp1 = m+1;
3695 
3696  for(i=0; i<n-3;
3697  i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3698  pr+=4)
3699  {
3700  a1 = *pM1;
3701  a2 = *pM2;
3702  a3 = *pM3;
3703  a4 = *pM4;
3704  pM1++; pM2++; pM3++; pM4++;
3705  for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3706  {
3707  c = v[j];
3708  d = v[j+1];
3709  a1 = a1 + *pM1 * c + *(pM1+1) * d;
3710  a2 = a2 + *pM2 * c + *(pM2+1) * d;
3711  a3 = a3 + *pM3 * c + *(pM3+1) * d;
3712  a4 = a4 + *pM4 * c + *(pM4+1) * d;
3713  }
3714  for(/*j set above*/; j<m; j++, pM1++, pM2++, pM3++, pM4++)
3715  {
3716  c = v[j];
3717  a1 = a1 + *pM1 * c;
3718  a2 = a2 + *pM2 * c;
3719  a3 = a3 + *pM3 * c;
3720  a4 = a4 + *pM4 * c;
3721  }
3722  *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3723  }
3724  for(/*i set above*/; i<n; i++)
3725  {
3726  pM1 = &(M[i*(m+1)]);
3727  a1 = *pM1;
3728  pM1++;
3729  for(j=0; j<m; j++, pM1++)
3730  {
3731  a1 = a1 + *pM1 * v[j];
3732  }
3733  r[i] = a1;
3734  }
3735 }
3736 /***********************************************************/
3737 /* MLP_MatrixVector */
3738 /* */
3739 /* computes a Matrix-Vector product */
3740 /* r[j] = Sum_i M[j][i] v[i] */
3741 /* */
3742 /* inputs: dbl *M = matrix (n lines, m+1 columns) */
3743 /* dbl *v = vector (dimension m) */
3744 /* dbl *r = resulting vector (dimension n) */
3745 /* int n */
3746 /* int m */
3747 /* */
3748 /* Author: J.Schwindling 24-Jan-00 */
3749 /***********************************************************/
3750 
3751 void MLP_MatrixVector(dbl *M, type_pat *v, dbl *r, int n, int m)
3752 {
3753  int i,j;
3754  dbl a1, a2, a3, a4, c, d;
3755  dbl *pM1 = M;
3756  dbl *pM2 = &(M[m]);
3757  dbl *pM3 = &(M[2*m]);
3758  dbl *pM4 = &(M[3*m]);
3759  dbl *pr = r;
3760  int mp1 = m;
3761 
3762  for(i=0; i<n-3;
3763  i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3764  pr+=4)
3765  {
3766  a1 = 0;
3767  a2 = 0;
3768  a3 = 0;
3769  a4 = 0;
3770  for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3771  {
3772  c = v[j];
3773  d = v[j+1];
3774  a1 = a1 + *pM1 * c + *(pM1+1) * d;
3775  a2 = a2 + *pM2 * c + *(pM2+1) * d;
3776  a3 = a3 + *pM3 * c + *(pM3+1) * d;
3777  a4 = a4 + *pM4 * c + *(pM4+1) * d;
3778  }
3779  for(/*j set above*/; j<m; j++, pM1++, pM2++, pM3++, pM4++)
3780  {
3781  c = v[j];
3782  a1 = a1 + *pM1 * c;
3783  a2 = a2 + *pM2 * c;
3784  a3 = a3 + *pM3 * c;
3785  a4 = a4 + *pM4 * c;
3786  }
3787  *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3788  }
3789  for(/*i set above*/; i<n; i++)
3790  {
3791  pM1 = &(M[i*m]);
3792  a1 = 0;
3793  for(j=0; j<m; j++, pM1++)
3794  {
3795  a1 = a1 + *pM1 * v[j];
3796  }
3797  r[i] = a1;
3798  }
3799 }
3800 
3801 
3802 /***********************************************************/
3803 /* MLP_MM2rows */
3804 /* */
3805 /* computes a Matrix-Matrix product, with the first matrix */
3806 /* having 2 rows */
3807 /* */
3808 /* inputs: dbl *c = resulting matrix (Nj * Nk) */
3809 /* dbl *a = first matrix (Ni * Nj) */
3810 /* dbl *b = second matrix (Nj * Nk) */
3811 /* int Ni */
3812 /* int Nj */
3813 /* int Nk */
3814 /* int NaOffs */
3815 /* int NbOffs */
3816 /* */
3817 /* Author: J.Schwindling 24-Jan-00 */
3818 /***********************************************************/
3819 
3821  int Ni, int Nj, int Nk, int NaOffs, int NbOffs)
3822 {
3823 //int i,j,k;
3824 int j,k;
3825 dbl s00,s01,s10,s11;
3826 type_pat *pa0,*pa1;
3827 dbl *pb0,*pb1,*pc0,*pc1;
3828 
3829  for (j=0; j<=Nj-2; j+=2)
3830  {
3831  pc0 = c+j;
3832  pc1 = c+j+Nj;
3833  s00 = 0.0; s01 = 0.0; s10 = 0.0; s11 = 0.0;
3834 
3835  for (k=0,pb0=b+k+NbOffs*j,
3836  pb1=b+k+NbOffs*(j+1),
3837  pa0=a+k,
3838  pa1=a+k+NaOffs;
3839  k<Nk;
3840  k++,pa0++,
3841  pa1++,
3842  pb0++,
3843  pb1++)
3844  {
3845  s00 += (*pa0)*(*pb0);
3846  s01 += (*pa0)*(*pb1);
3847  s10 += (*pa1)*(*pb0);
3848  s11 += (*pa1)*(*pb1);
3849  }
3850  *pc0 = s00; *(pc0+1) = s01; *pc1 = s10; *(pc1+1) = s11;
3851  }
3852  for (/*j set above*/; j<Nj; j++)
3853  {
3854  pc0 = c+j;
3855  pc1 = c+j+Nj;
3856  s00 = 0.0; s10 = 0.0;
3857  for (k=0,pb0=b+k+NbOffs*j,
3858  pa0=a+k,
3859  pa1=a+k+NaOffs;
3860  k<Nk;
3861  k++,pa0++,
3862  pa1++,
3863  pb0++)
3864  {
3865  s00 += (*pa0)*(*pb0);
3866  s10 += (*pa1)*(*pb0);
3867  }
3868  *pc0 = s00; *pc1 = s10;
3869  }
3870 }
3871 
3872 #ifdef __cplusplus
3873 } // extern "C"
3874 #endif
dbl * delta
Definition: mlp_gen.cc:36
dbl MLP_Epoch(int iepoch, dbl *alpmin, int *Ntest)
Definition: mlp_gen.cc:706
int MessLang
Definition: mlp_gen.cc:22
float alpha
Definition: AMPTWrapper.h:95
static const TGPicture * info(bool iBackgroundIsBlack)
Definition: mlp_gen.h:56
void getnLexemes(int n, char *s, char **ss)
Definition: mlp_gen.cc:2602
int JacobianMemory
Definition: mlp_gen.cc:28
Definition: mlp_gen.h:28
dbl MLP_Test_MM(int ifile, dbl *tmp)
Definition: mlp_gen.cc:263
void LearnFree()
Definition: mlp_gen.cc:2643
dbl * sigma
Definition: mlp_gen.h:58
dbl DeDwProd()
Definition: mlp_gen.cc:1020
int ReadPatterns(char *filename, int ifile, int *inet, int *ilearn, int *iexamples)
Definition: mlp_gen.cc:2229
void CGDir(dbl beta)
Definition: mlp_gen.cc:1266
int FixedStep(dbl alpha)
Definition: mlp_gen.cc:1704
void DeDwSaveZero()
Definition: mlp_gen.cc:1097
int DecreaseSearch(dbl *alpmin, int *Ntest, dbl Err0)
Definition: mlp_gen.cc:1616
void InitWeights()
Definition: mlp_gen.cc:2147
Definition: mlp_gen.h:38
int OutputWeights
Definition: mlp_gen.cc:23
int * ExamplesIndex
Definition: mlp_gen.cc:40
int SaveWeights(char *filename, int iepoch)
Definition: mlp_gen.cc:2975
int NLineSearchFail
Definition: mlp_gen.cc:33
int dgels_(char *trans, int *m, int *n, int *nrhs, double *a, int *lda, double *b, int *ldb, double *work, int *lwork, int *info)
void MLP_Out(type_pat *rrin, dbl *rrout)
Definition: mlp_gen.cc:62
#define NLMAX
Definition: mlp_gen.cc:14
#define STAT
Definition: mlp_gen.h:60
TRandom random
Definition: MVATrainer.cc:138
int MLP_SetNet(int *nl, int *nn)
Definition: mlp_gen.cc:3642
int LearnMemory
Definition: mlp_gen.cc:29
void EtaDecay()
Definition: mlp_gen.cc:2086
The Signals That Services Can Subscribe To This is based on ActivityRegistry and is current per Services can connect to the signals distributed by the ActivityRegistry in order to monitor the activity of the application Each possible callback has some defined which we here list in angle e g
Definition: Activities.doc:4
#define PAT
Definition: mlp_gen.h:45
dbl ** BFGSH
Definition: mlp_gen.cc:37
int MLP_PrCFun(char *filename)
Definition: mlp_gen.cc:2881
int nin
float MLPfitVersion
Definition: mlp_gen.cc:31
#define NET
Definition: mlp_gen.h:25
int MLP_StatInputs(int Nexamples, int Ninputs, type_pat **inputs, dbl *mean, dbl *sigma, dbl *minimum, dbl *maximum)
Definition: mlp_gen.cc:3278
void SetLambda(double Wmax)
Definition: mlp_gen.cc:1949
void SteepestDir()
Definition: mlp_gen.cc:1244
int GetBFGSH(int Nweights)
Definition: mlp_gen.cc:1418
int BFGSMemory
Definition: mlp_gen.cc:27
int np
Definition: AMPTWrapper.h:33
int GetNetStructure(char *s, int *Nlayer, int *Nneur)
Definition: mlp_gen.cc:3594
void PrintWeights()
Definition: mlp_gen.cc:2168
void MLP_Out_T(type_pat *rrin)
Definition: mlp_gen.cc:113
int StochStep()
Definition: mlp_gen.cc:965
int LearnAlloc()
Definition: mlp_gen.cc:2688
dbl MLP_Stochastic()
Definition: mlp_gen.cc:518
T sqrt(T t)
Definition: SSEVec.h:18
void MLP_vSigmoideDeriv(dbl *x, dbl *dy, int n)
double type_pat
Definition: mlp_gen.h:13
int MLP_Train(int *ipat, dbl *err)
Definition: mlp_gen.cc:895
int ShuffleExamples(int n, int *index)
Definition: mlp_gen.cc:2102
double MLP_Rand(dbl mini, dbl maxi)
Definition: mlp_gen.cc:2131
dbl DeDwNorm()
Definition: mlp_gen.cc:998
int DeDwSum(type_pat *ans, dbl *out, int ipat)
Definition: mlp_gen.cc:1123
dbl LastAlpha
Definition: mlp_gen.cc:32
int CountLexemes(char *s)
Definition: mlp_gen.cc:2585
struct net_ net_ MLP_HIDDEN
Definition: mlp_gen.cc:16
int NetMemory
Definition: mlp_gen.cc:30
int LoadWeights(char *filename, int *iepoch)
Definition: mlp_gen.cc:3025
void MLP_LineHyb(dbl ***w0, dbl alpha)
Definition: mlp_gen.cc:1923
Definition: mlp_gen.h:16
int AllocPatterns(int ifile, int npat, int nin, int nout, int iadd)
Definition: mlp_gen.cc:3080
dbl ** JacobianMatrix
Definition: mlp_gen.cc:39
#define DIVERS
Definition: mlp_gen.h:54
ii
Definition: cuy.py:590
int k[5][pyjets_maxn]
int SetTransFunc(int layer, int neuron, int func)
Definition: mlp_gen.cc:1200
int StochStepHyb()
Definition: mlp_gen.cc:926
void InitBFGSH(int Nweights)
Definition: mlp_gen.cc:1388
void GetGammaDelta()
Definition: mlp_gen.cc:1313
int PatMemory[2]
Definition: mlp_gen.cc:26
void MLP_ResLin()
Definition: mlp_gen.cc:1971
void MLP_MM2rows(dbl *c, type_pat *a, dbl *b, int Ni, int Nj, int Nk, int NaOffs, int NbOffs)
Definition: mlp_gen.cc:3820
void MLP_MatrixVector(dbl *M, type_pat *v, dbl *r, int n, int m)
Definition: mlp_gen.cc:3751
void DeDwScale(int Nexamples)
Definition: mlp_gen.cc:1060
void MLP_Line(dbl ***w0, dbl alpha)
Definition: mlp_gen.cc:1757
void MLP_MatrixVectorBias(dbl *M, dbl *v, dbl *r, int n, int m)
Definition: mlp_gen.cc:3685
void getLexemes(char *s, char **ss)
Definition: mlp_gen.cc:2617
#define LEARN
Definition: mlp_gen.h:36
double b
Definition: hdecay.h:120
void MLP_vSigmoide(dbl *x, int n)
void FreeNetwork()
Definition: mlp_gen.cc:3533
int NormalizeInputs()
Definition: mlp_gen.cc:3382
int ExamplesMemory
Definition: mlp_gen.cc:24
int LineSearch(dbl *alpmin, int *Ntest, dbl Err0)
Definition: mlp_gen.cc:1478
void DeDwSave()
Definition: mlp_gen.cc:1078
std::vector< std::vector< double > > tmp
Definition: MVATrainer.cc:100
int nout
void BFGSdir(int Nweights)
Definition: mlp_gen.cc:1340
double a
Definition: hdecay.h:121
dbl * Gamma
Definition: mlp_gen.cc:38
double dbl
Definition: mlp_gen.h:12
dbl MLP_Sigmoide(dbl x)
Definition: mlp_sigmoide.cc:27
int WeightsMemory
Definition: mlp_gen.cc:25
int MLP_PrintInputStat()
Definition: mlp_gen.cc:3335
#define CLEN
Definition: mlp_gen.cc:2226
dbl *** dir
Definition: mlp_gen.cc:35
void MLP_Out2(type_pat *rrin)
Definition: mlp_gen.cc:184
void SetDefaultFuncs()
Definition: mlp_gen.cc:1223
int FreePatterns(int ifile)
Definition: mlp_gen.cc:3235
dbl MLP_Test(int ifile, int regul)
Definition: mlp_gen.cc:446
int MLP_PrFFun(char *filename)
Definition: mlp_gen.cc:2772
void DeDwZero()
Definition: mlp_gen.cc:1041
int AllocNetwork(int Nlayer, int *Neurons)
Definition: mlp_gen.cc:3451
dbl ** Hessian
Definition: mlp_gen.cc:41
int LineSearchHyb(dbl *alpmin, int *Ntest)
Definition: mlp_gen.cc:1781
dbl DerivDir()
Definition: mlp_gen.cc:1288