46 int dgels_(
char *trans,
int *
m,
int *
n,
int *
47 nrhs,
double *
a,
int *lda,
double *
b,
int *ldb,
48 double *
work,
int *lwork,
int *
info);
64 int i, il,
in, j,
m, mp1;
72 for(j=0;j<
m;j++)
NET.Outn[0][j] = rrin[j];
75 for(i=mp1; i<=
NET.Nneur[0]; i+=4)
77 NET.Outn[0][i-1] = rrin[i-1];
78 NET.Outn[0][
i] = rrin[
i];
79 NET.Outn[0][i+1] = rrin[i+1];
80 NET.Outn[0][i+2] = rrin[i+2];
88 for(il=2; il<
NET.Nlayer; il++)
91 deriv1[il-1],
NET.Nneur[il-1]);
93 NET.Outn[il],
NET.Nneur[il],
96 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
98 deriv1[
NET.Nlayer-1][
in] = 1;
115 int i, il,
in, j, ilm1,
m, mp1;
122 for(j=0;j<
m;j++)
NET.Outn[0][j] = rrin[j];
125 for(i=mp1; i<=
NET.Nneur[0]; i+=4)
127 NET.Outn[0][i-1] = rrin[i-1];
128 NET.Outn[0][
i] = rrin[
i];
129 NET.Outn[0][i+1] = rrin[i+1];
130 NET.Outn[0][i+2] = rrin[i+2];
138 for(il=1; il<
NET.Nlayer; il++)
141 m =
NET.Nneur[ilm1]%4;
142 for(in=0; in<
NET.Nneur[il]; in++)
144 a =
NET.Weights[il][
in][0];
146 for(j=1;j<=
m;j++) a +=
147 NET.Weights[il][in][j]*
NET.Outn[ilm1][j-1];
150 for(j=mp1; j<=
NET.Nneur[ilm1]; j+=4)
153 NET.Weights[il][
in][j+3]*
NET.Outn[ilm1][j+2]+
154 NET.Weights[il][
in][j+2]*
NET.Outn[ilm1][j+1]+
155 NET.Weights[il][
in][j+1]*
NET.Outn[ilm1][j]+
156 NET.Weights[il][
in][j]*
NET.Outn[ilm1][j-1];
158 switch(
NET.T_func[il][in])
162 case 1:
NET.Outn[il][
in] =
a;
164 case 0:
NET.Outn[il][
in] = 0;
188 dbl **rrout, **deriv1;
191 int nhid =
NET.Nneur[1];
201 rrout[0][0] = rrin[1];
206 rrout[0][0] = rrin[1];
207 rrout[0][1] = rrin[2];
212 rrout[0][0] = rrin[1];
213 rrout[0][1] = rrin[2];
214 rrout[0][2] = rrin[3];
219 prrout = &(rrout[0][mp1]);
220 prrin = &(rrin[mp1+1]);
221 for(i=mp1; i<=
NET.Nneur[0]; i+=4, prrout+=4, prrin+=4)
223 *(prrout-1) = *(prrin-1);
225 *(prrout+1)= *(prrin+1);
226 *(prrout+2) = *(prrin+2);
232 NET.Outn[1],nhid,nin);
237 for(il=2; il<
NET.Nlayer; il++)
241 NET.Outn[il],
NET.Nneur[il],
NET.Nneur[il-1]);
243 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
244 deriv1[
NET.Nlayer-1][in] = 1;
267 int nhid =
NET.Nneur[1];
269 int jpat, j, il, ilm1,
m,
in, mp1;
271 dbl *pweights, *ptmp;
274 for(ipat=0; ipat<npat-1; ipat+=2)
277 NET.vWeights[1], 2, nhid, nin+1,
280 switch(
NET.T_func[1][0])
291 for(jpat=0; jpat<2; jpat++)
293 for(j=0; j<nhid; j++)
295 tmp[j+jpat*nhid] = 0;
301 for(jpat=0; jpat<2; jpat++)
303 for(in=0; in<nhid; in++)
305 NET.Outn[1][
in] = tmp[jpat*nhid+
in];
307 for(il=2; il<
NET.Nlayer; il++)
310 m =
NET.Nneur[ilm1]%4;
311 for(in=0; in<
NET.Nneur[il]; in++)
313 pweights = &(
NET.Weights[il][
in][0]);
317 for(j=1;j<=
m;j++,pweights++) a +=
318 (*pweights)*
NET.Outn[ilm1][j-1];
321 for(j=mp1; j<=
NET.Nneur[ilm1];
325 *(pweights+3)*
NET.Outn[ilm1][j+2]+
326 *(pweights+2)*
NET.Outn[ilm1][j+1]+
327 *(pweights+1)*
NET.Outn[ilm1][j]+
328 *(pweights )*
NET.Outn[ilm1][j-1];
330 switch(
NET.T_func[il][in])
334 case 1:
NET.Outn[il][
in] =
a;
336 case 0:
NET.Outn[il][
in] = 0;
340 if(il ==
NET.Nlayer-1)
342 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
344 rrans = (
dbl)
PAT.Rans[ifile][ipat+jpat][in];
345 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
347 PAT.Pond[ifile][ipat+jpat];
355 for(; ipat<npat; ipat++)
358 &(
PAT.vRin[ifile][ipat*(nin+1)]),tmp,
361 switch(
NET.T_func[1][0])
372 for(j=0; j<nhid; j++)
379 for(in=0; in<nhid; in++)
383 for(il=2; il<
NET.Nlayer; il++)
386 m =
NET.Nneur[ilm1]%4;
387 for(in=0; in<
NET.Nneur[il]; in++)
389 pweights = &(
NET.Weights[il][
in][0]);
393 for(j=1;j<=
m;j++,pweights++) a +=
394 (*pweights)*
NET.Outn[ilm1][j-1];
397 for(j=mp1; j<=
NET.Nneur[ilm1];
401 *(pweights+3)*
NET.Outn[ilm1][j+2]+
402 *(pweights+2)*
NET.Outn[ilm1][j+1]+
403 *(pweights+1)*
NET.Outn[ilm1][j]+
404 *(pweights )*
NET.Outn[ilm1][j-1];
406 switch(
NET.T_func[il][in])
410 case 1:
NET.Outn[il][
in] =
a;
412 case 0:
NET.Outn[il][
in] = 0;
416 if(il ==
NET.Nlayer-1)
418 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
420 rrans = (
dbl)
PAT.Rans[ifile][ipat][in];
421 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
423 PAT.Pond[ifile][ipat];
449 int in,jn,ipat,ipati;
453 tmp = (
dbl *) malloc(2 *
NET.Nneur[1] *
sizeof(
dbl));
456 printf(
"not enough memory in MLP_Test\n");
458 for(ipat=0; ipat<
PAT.Npat[
ifile]; ipat++)
462 ipati = ExamplesIndex[ipat];
469 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
471 rrans = (
dbl)
PAT.Rans[ifile][ipati][in];
472 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
474 PAT.Pond[ifile][ipati];
480 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
481 for(jn=0; jn<=
NET.Nneur[
NET.Nlayer-2]; jn++)
495 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
496 for(jn=0; jn<=
NET.Nneur[
NET.Nlayer-2]; jn++)
522 int il, in1,
in, itest2;
523 dbl deriv, deriv1, deriv2, deriv3, deriv4, pond;
526 dbl *pout, *pdelta, *pw1, *pw2, *pw3, *pw4;
529 if(
NET.Debug>=5) printf(
" Entry MLP_Stochastic\n");
530 weights =
NET.Weights;
541 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
543 ii = ExamplesIndex[ipat];
544 pond =
PAT.Pond[0][
ii];
549 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
551 deriv =
NET.Deriv1[
NET.Nlayer-1][
in];
552 a = (
dbl)
PAT.Rans[0][ii][in];
553 b =
NET.Outn[
NET.Nlayer-1][in]-a;
555 NET.Delta[
NET.Nlayer-1][in] = b*deriv*pond*eta;
558 for(il=
NET.Nlayer-2; il>0; il--)
560 dd =
NET.Delta[il+1][0];
561 for(in=0; in<
NET.Nneur[il]-3; in+=4)
563 deriv1 =
NET.Deriv1[il][
in];
564 deriv2 =
NET.Deriv1[il][in+1];
565 deriv3 =
NET.Deriv1[il][in+2];
566 deriv4 =
NET.Deriv1[il][in+3];
567 itest2 = (
NET.Nneur[il+1]==1);
568 a1 = dd*weights[il+1][0][in+1];
569 a2 = dd*weights[il+1][0][in+2];
570 a3 = dd*weights[il+1][0][in+3];
571 a4 = dd*weights[il+1][0][in+4];
573 pdelta = &(
NET.Delta[il+1][1]);
574 for(in1=1; in1<
NET.Nneur[il+1];
577 a1 += *pdelta * weights[il+1][in1][in+1];
578 a2 += *pdelta * weights[il+1][in1][in+2];
579 a3 += *pdelta * weights[il+1][in1][in+3];
580 a4 += *pdelta * weights[il+1][in1][in+4];
582 L1:
NET.Delta[il][
in] = a1*deriv1;
583 NET.Delta[il][in+1] = a2*deriv2;
584 NET.Delta[il][in+2] = a3*deriv3;
585 NET.Delta[il][in+3] = a4*deriv4;
587 for(; in<
NET.Nneur[il]; in++)
589 deriv =
NET.Deriv1[il][
in];
590 itest2 = (
NET.Nneur[il+1]==1);
591 a = dd*weights[il+1][0][in+1];
593 pdelta = &(
NET.Delta[il+1][1]);
594 for(in1=1; in1<
NET.Nneur[il+1];
598 weights[il+1][in1][in+1];
600 L2:
NET.Delta[il][
in] = a*deriv;
609 for(il=1; il<
NET.Nlayer; il++)
611 inm1 =
NET.Nneur[il-1];
612 for(in=0; in<
NET.Nneur[il]-3; in+=4)
614 a1 =
NET.Delta[il][
in];
615 a2 =
NET.Delta[il][in+1];
616 a3 =
NET.Delta[il][in+2];
617 a4 =
NET.Delta[il][in+3];
618 pout = &(
NET.Outn[il-1][0]);
619 weights[il][
in][0] += a1;
620 weights[il][in+1][0] += a2;
621 weights[il][in+2][0] += a3;
622 weights[il][in+3][0] += a4;
623 weights[il][
in][1] += a1* (*pout);
624 weights[il][in+1][1] += a2* (*pout);
625 weights[il][in+2][1] += a3* (*pout);
626 weights[il][in+3][1] += a4* (*pout);
628 pw1 = &(weights[il][
in][2]);
629 pw2 = &(weights[il][in+1][2]);
630 pw3 = &(weights[il][in+2][2]);
631 pw4 = &(weights[il][in+3][2]);
632 for(in1=2; in1<=inm1;
633 ++in1, ++pout, ++pw1, ++pw2,
642 for(; in<
NET.Nneur[il]; in++)
644 a1 =
NET.Delta[il][
in];
645 pout = &(
NET.Outn[il-1][0]);
646 weights[il][
in][0] += a1;
647 weights[il][
in][1] += a1* (*pout);
649 pw1 = &(weights[il][
in][2]);
650 for(in1=2; in1<=inm1;
651 ++in1, ++pout, ++pw1)
660 for(il=1; il<
NET.Nlayer; il++)
662 for(in=0; in<
NET.Nneur[il]; in++)
665 a =
NET.Delta[il][
in];
669 b = a*
NET.Outn[il-1][0];
673 for(in1=2; in1<=
NET.Nneur[il-1]; in1++)
675 b = a*
NET.Outn[il-1][in1-1];
710 int Nweights, Nlinear, ipat, ierr;
716 Nweights =
NET.Nweights;
717 Nlinear =
NET.Nneur[
NET.Nlayer-2] + 1;
719 if(
NET.Debug>=5) printf(
" Entry MLP_Epoch\n");
729 if(iepoch==1 &&
LEARN.Meth==7)
743 for(ipat=0;ipat<
nn;ipat++)
745 ierr =
MLP_Train(&ExamplesIndex[ipat],&err);
746 if(ierr!=0) printf(
"Epoch: ierr= %d\n",ierr);
751 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
754 if(ierr!=0) printf(
"Epoch: ierr= %d\n",ierr);
768 if((iepoch-1)%
LEARN.Nreset==0)
787 if((iepoch-1)%
LEARN.Nreset==0)
796 beta =
LEARN.Norm/ONorm;
803 if((iepoch-1)%
LEARN.Nreset==0)
835 printf(
"Line search fail \n");
841 if((iepoch-1)%
LEARN.Nreset==0)
848 if(
NET.Debug>=5) printf(
"Before GetGammaDelta \n");
850 if(
NET.Debug>=5) printf(
"After GetGammaDelta \n");
852 if(
NET.Debug>=5) printf(
"After GetBFGSH \n");
862 if(
NET.Debug>=5) printf(
"After BFGSdir \n");
871 printf(
"Line search fail \n");
877 if(
NET.Debug>=5) printf(
" End MLP_Epoch\n");
900 if(*ipat<0)
return(2);
904 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
906 *err += ((
dbl)
PAT.Rans[0][*ipat][in]-
NET.Outn[
NET.Nlayer-1][in])
907 *((
dbl)
PAT.Rans[0][*ipat][in]-
NET.Outn[
NET.Nlayer-1][in])*
933 for(il=
NET.Nlayer-2; il>0; il--) {
935 for(in=0; in<
NET.Nneur[il]; in++) {
938 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
940 + eps *
LEARN.Odw[il][
in][in1];
944 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
973 for(il=
NET.Nlayer-1; il>0; il--) {
974 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
977 for(in=0; in<
NET.Nneur[il]; in++) {
979 + epseta *
LEARN.Odw[il][
in][in1]);
1002 for(il=1; il<
NET.Nlayer; il++)
1003 for(in=0; in<
NET.Nneur[il]; in++)
1004 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1005 dd +=
LEARN.DeDw[il][in][jn]*
1006 LEARN.DeDw[il][in][jn];
1024 for(il=1; il<
NET.Nlayer; il++)
1025 for(in=0; in<
NET.Nneur[il]; in++)
1026 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1027 dd +=
LEARN.DeDw[il][in][jn]*
1028 LEARN.ODeDw[il][in][jn];
1044 for(il=1; il<
NET.Nlayer; il++)
1045 for(in=0; in<
NET.Nneur[il]; in++)
1046 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1047 LEARN.DeDw[il][in][jn] = 0;
1063 for(il=1; il<
NET.Nlayer; il++)
1064 for(in=0; in<
NET.Nneur[il]; in++)
1065 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1066 LEARN.DeDw[il][in][jn] /= (
dbl) Nexamples;
1081 for(il=1; il<
NET.Nlayer; il++)
1082 for(in=0; in<
NET.Nneur[il]; in++)
1083 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1084 LEARN.ODeDw[il][in][jn] =
LEARN.DeDw[il][in][jn];
1100 for(il=1; il<
NET.Nlayer; il++)
1101 for(in=0; in<
NET.Nneur[il]; in++)
1102 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1125 int il, in1,
in,
ii;
1128 dbl *pout, *pdedw, *pdelta;
1133 b = (
dbl)
PAT.Pond[0][ipat];
1134 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
1136 deriv =
NET.Deriv1[
NET.Nlayer-1][
in];
1138 (out[
in] - (
dbl) ans[in])*deriv*
b;
1141 for(il=
NET.Nlayer-2; il>0; il--)
1144 for(in=0; in<
NET.Nneur[il]; in++)
1146 deriv =
NET.Deriv1[il][
in];
1147 a =
NET.Delta[il+1][0] *
NET.Weights[il+1][0][in+1];
1148 pdelta = &(
NET.Delta[il+1][1]);
1149 for(in1=1; in1<
NET.Nneur[il+1]; in1++, pdelta++)
1151 a += *pdelta *
NET.Weights[il+1][in1][in+1];
1153 NET.Delta[il][
in] = a * deriv;
1157 for(il=1; il<
NET.Nlayer; il++)
1159 ii =
NET.Nneur[il-1];
1160 for(in=0; in<
NET.Nneur[il]; in++)
1162 a =
NET.Delta[il][
in];
1164 LEARN.DeDw[il][
in][1] += a *
NET.Outn[il-1][0];
1165 pout = &(
NET.Outn[il-1][1]);
1166 pdedw = &(
LEARN.DeDw[il][
in][2]);
1167 for(in1=1; in1<
ii; ++in1, ++pout, ++pdedw)
1169 (*pdedw) += a * (*pout);
1203 if(layer>
NLMAX)
return(1);
1206 NET.T_func[layer-1][neuron-1] =
func;
1226 for(il=0; il<
NET.Nlayer; il++) {
1227 for(in=0; in<
NET.Nneur[il]; in++) {
1228 NET.T_func[il][
in] = 2;
1229 if(il==
NET.Nlayer-1)
NET.T_func[il][
in] = 1;
1247 for(il=1; il<
NET.Nlayer; il++)
1248 for(in=0; in<
NET.Nneur[il]; in++)
1249 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1250 dir[il][in][jn] = -
LEARN.DeDw[il][in][jn];
1269 for(il=1; il<
NET.Nlayer; il++)
1270 for(in=0; in<
NET.Nneur[il]; in++)
1271 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1273 dir[il][
in][jn] = -
LEARN.DeDw[il][
in][jn]+
1274 beta*dir[il][
in][jn];
1293 for(il=1; il<
NET.Nlayer; il++)
1294 for(in=0; in<
NET.Nneur[il]; in++)
1295 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1297 ddir +=
LEARN.DeDw[il][
in][jn]*dir[il][
in][jn];
1317 for(il=1; il<
NET.Nlayer; il++)
1318 for(in=0; in<
NET.Nneur[il]; in++)
1319 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1346 g = (
dbl*) malloc(
NET.Nweights*
sizeof(
dbl));
1347 s = (
dbl*) malloc(Nweights*
sizeof(
dbl));
1349 for(il=1; kk<Nweights; il++)
1350 for(in=0; in<
NET.Nneur[il]; in++)
1351 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1356 for(i=0; i<Nweights; i++)
1359 for(j=0; j<Nweights; j++)
1361 s[
i] += BFGSH[
i][j] * g[j];
1366 for(il=1; kk<Nweights; il++)
1367 for(in=0; in<
NET.Nneur[il]; in++)
1368 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1370 dir[il][
in][jn] = -s[
kk];
1391 for(i=0; i<Nweights; i++)
1392 for(j=0; j<Nweights; j++)
1395 if(i==j) BFGSH[
i][j] = 1;
1420 typedef double dble;
1428 Hgamma = (dble *) malloc(Nweights*
sizeof(dble));
1429 tmp = (dble *) malloc(Nweights*
sizeof(dble));
1431 for(i=0; i<Nweights; i++)
1433 deltaTgamma += (dble) delta[i] * (dble) Gamma[
i];
1436 for(j=0; j<Nweights; j++)
1438 a += (dble) BFGSH[i][j] * (dble) Gamma[j];
1439 b += (dble) Gamma[j] * (dble) BFGSH[j][
i];
1443 factor += (dble) Gamma[i]*Hgamma[i];
1445 if(deltaTgamma == 0)
1451 a = 1 / deltaTgamma;
1452 factor = 1 + factor*
a;
1454 for(i=0; i<Nweights; i++)
1456 b = (dble) delta[i];
1457 for(j=0; j<Nweights; j++)
1458 BFGSH[i][j] += (
dbl) (factor*b* (dble)
1459 delta[j]-(tmp[j]*b+Hgamma[i]*(dble)delta[j]))*
a;
1481 dbl alpha1, alpha2, alpha3;
1482 dbl err1, err2, err3;
1484 int icount, il,
in, jn;
1491 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1492 for(il=1; il<
NET.Nlayer; il++)
1494 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1495 for(in=0; in<
NET.Nneur[il]; in++)
1497 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1499 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1501 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1512 if(
NET.Debug>=4) printf(
"err depart= %f\n",err1);
1519 if(alpha2 < 0.01) alpha2 = 0.01;
1520 if(alpha2 > 2.0) alpha2 = 2.0;
1524 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha2,err2);
1534 for(icount=1;icount<=100;icount++)
1536 alpha3 = alpha3*
tau;
1539 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha3,err3);
1541 if(err3>err2)
break;
1556 for(icount=1;icount<=100;icount++)
1558 alpha2 = alpha2/
tau;
1561 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha2,err2);
1563 if(err1>err2)
break;
1578 *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1579 -(err2-err1)/(alpha2-alpha1)));
1580 if(*alpmin>10000) *alpmin=10000;
1584 LastAlpha = *alpmin;
1587 for(il=1; il<
NET.Nlayer; il++)
1588 for(in=0; in<
NET.Nneur[il]; in++)
1589 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1590 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1593 for(il=1; il<
NET.Nlayer; il++)
1594 for(in=0; in<
NET.Nneur[il]; in++)
1596 for(il=1; il<
NET.Nlayer; il++)
1622 int icount, il,
in, jn;
1629 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1630 for(il=1; il<
NET.Nlayer; il++)
1632 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1633 for(in=0; in<
NET.Nneur[il]; in++)
1635 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1637 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1639 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1650 if(
NET.Debug>=4) printf(
"err depart= %f\n",err1);
1666 for(icount=1;icount<=100;icount++)
1668 alpha2 = alpha2/
tau;
1672 if(err1>err2)
break;
1687 for(il=1; il<
NET.Nlayer; il++)
1688 for(in=0; in<
NET.Nneur[il]; in++)
1689 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1690 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1693 for(il=1; il<
NET.Nlayer; il++)
1694 for(in=0; in<
NET.Nneur[il]; in++)
1696 for(il=1; il<
NET.Nlayer; il++)
1709 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1710 for(il=1; il<
NET.Nlayer; il++)
1712 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1713 for(in=0; in<
NET.Nneur[il]; in++)
1715 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1717 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1719 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1729 for(il=1; il<
NET.Nlayer; il++)
1730 for(in=0; in<
NET.Nneur[il]; in++)
1731 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1732 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1735 for(il=1; il<
NET.Nlayer; il++)
1736 for(in=0; in<
NET.Nneur[il]; in++)
1738 for(il=1; il<
NET.Nlayer; il++)
1761 for(il=1; il<
NET.Nlayer; il++)
1762 for(in=0; in<
NET.Nneur[il]; in++)
1763 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1764 NET.Weights[il][in][jn] = w0[il][in][jn]+
1765 alpha*dir[il][in][jn];
1784 dbl alpha1, alpha2, alpha3;
1785 dbl err1, err2, err3;
1787 int icount, il,
in, jn;
1794 printf(
" entry LineSearchHyb \n");
1801 w0 = (
dbl ***) malloc((
NET.Nlayer-1)*
sizeof(
dbl**));
1802 for(il=1; il<
NET.Nlayer-1; il++)
1804 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1805 for(in=0; in<
NET.Nneur[il]; in++)
1807 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1809 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1811 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1819 if(
NET.Debug>=4) printf(
"LinesearchHyb err depart= %f\n",err1);
1826 if(alpha2 < 0.01) alpha2 = 0.01;
1827 if(alpha2 > 2.0) alpha2 = 2.0;
1840 for(icount=1;icount<=100;icount++)
1842 alpha3 = alpha3*
tau;
1846 if(err3>err2)
break;
1861 for(icount=1;icount<=100;icount++)
1863 alpha2 = alpha2/
tau;
1867 if(err1>err2)
break;
1881 *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1882 -(err2-err1)/(alpha2-alpha1)));
1883 if(*alpmin>10000) *alpmin=10000;
1887 LastAlpha = *alpmin;
1890 for(il=1; il<
NET.Nlayer-1; il++)
1891 for(in=0; in<
NET.Nneur[il]; in++)
1892 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1893 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1896 for(il=1; il<
NET.Nlayer-1; il++)
1897 for(in=0; in<
NET.Nneur[il]; in++)
1899 for(il=1; il<
NET.Nlayer-1; il++)
1903 printf(
" exit LineSearchHyb \n");
1926 for(il=1; il<
NET.Nlayer-1; il++)
1927 for(in=0; in<
NET.Nneur[il]; in++)
1928 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1930 NET.Weights[il][
in][jn] = w0[il][
in][jn]+
1931 alpha*dir[il][
in][jn];
1976 double err,lambda,lambda2;
1977 int Nl,M,Nhr,khr,nrhs,iret,ierr;
1978 int il,
in, inl, ipat;
1986 lambda2 =
LEARN.Alambda;
1990 Nl =
NET.Nneur[
NET.Nlayer-2] + 1;
1994 double *Work = (
double*) malloc((
int) Lwork*
sizeof(double));
1997 dpat = (
double*) malloc((
int) M*
sizeof(double));
2002 HR = (
double*) malloc((
int) Nhr*
sizeof(double));
2004 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
2019 dpat[ipat] = (
dbl)
PAT.Rans[0][ipat][0]*
sqrt(
PAT.Pond[0][ipat]);
2022 for(in=0;in<
NET.Nneur[il];in++)
2024 khr = M *(in+1) + ipat;
2025 HR[khr] =
NET.Outn[il][
in]*
2030 lambda =
sqrt(lambda2);
2031 for(ipat=0;ipat<=
NET.Nneur[il];ipat++)
2033 dpat[ipat+
PAT.Npat[0]] = 0;
2034 for(in=0;in<=
NET.Nneur[il];in++)
2036 khr = M *in + ipat +
PAT.Npat[0];
2038 if(in==ipat) HR[khr]=lambda;
2044 printf(
"entry ResLin, err=MLP_Test(0,0), err= %f\n",err);
2050 ierr =
dgels_(&Trans,&M,&Nl,&nrhs,HR,&M,dpat,&M,Work,
2052 if(iret != 0) printf(
"Warning from dgels: iret = %d\n",(
int)iret);
2053 if(ierr != 0) printf(
"Warning from dgels: ierr = %d\n",(
int)ierr);
2061 for (inl=0; inl<=
NET.Nneur[il-1];inl++)
2063 NET.Weights[il][0][inl] = dpat[inl];
2068 printf(
"ResLin, apres tlsfor, err= %f\n",err);
2111 index[
ii] = index[
i];
2133 return mini+(maxi-mini)*
random()/RAND_MAX;
2151 for(ilayer=1;ilayer<
NET.Nlayer;ilayer++)
2152 for(ineur=0;ineur<
NET.Nneur[ilayer];ineur++)
2153 for(i=0;i<=
NET.Nneur[ilayer-1];i++)
2154 NET.Weights[ilayer][ineur][i]=
2172 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
2176 printf(
"Couche %d\n",ilayer);
2180 printf(
"Layer %d\n",ilayer);
2182 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
2186 printf(
"Neurone %d",ineur);
2190 printf(
"Neuron %d",ineur);
2192 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
2195 (
double)
NET.Weights[ilayer][ineur][i]);
2230 int *inet,
int *ilearn,
int *iexamples)
2233 char otherfile[
CLEN];
2247 int nlayer, nneur[
NLMAX];
2249 printf(
"\nLoading file %s\n",filename);
2250 LVQpat=fopen(filename,
"r");
2251 if(LVQpat ==
nullptr)
return -1;
2255 while(fgets(s,
CLEN,LVQpat))
2261 printf(
"Number of neurons %s",s);
2263 sscanf(s,
"%s %s",cc,s2);
2265 if(ierr != 0)
return ierr;
2267 if(ierr != 0)
return ierr;
2271 sscanf(s,
"%s %d",cc,&l);
2275 printf(
"Number of patterns %d\n",np);
2277 else if(*(cc+1)==
'I')
2281 printf(
"Number of inputs %d\n",nin);
2283 else if(*(cc+1)==
'O' && *(cc+2)==
'U')
2287 printf(
"Number of outputs %d\n",nout);
2289 else if(*(cc+1)==
'O' && *(cc+2)==
'R')
2292 if(l==1) printf(
"Normalize inputs\n");
2295 else if(*(cc+1)==
'L')
2297 printf(
"NLAY datacard is no longer needed\n");
2299 else if(*(cc+1)==
'E')
2302 printf(
"Number of epochs %d\n",l);
2304 else if(*(cc+1)==
'R')
2308 "Reset to steepest descent every %d epochs\n",
2317 sscanf(s,
"%s %le",cc,&p);
2318 printf(
"Learning parameter %f\n",p);
2321 else if(*(s+1)==
'M')
2324 sscanf(s,
"%s %d",cc,&(
LEARN.Meth));
2325 printf(
"Learning method = ");
2328 case 1: printf(
"Stochastic Minimization\n");
2330 case 2: printf(
"Steepest descent with fixed step\n");
2332 case 3: printf(
"Steepest descent with line search\n");
break;
2333 case 4: printf(
"Polak-Ribiere Conjugate Gradients\n");
break;
2334 case 5: printf(
"Fletcher-Reeves Conjugate Gradients\n");
2336 case 6: printf(
"BFGS\n");
2338 case 7: printf(
"Hybrid BFGS-linear\n");
2340 default: printf(
"Error: unknown method\n");
break;
2344 else if(*(s+1)==
'T')
2346 sscanf(s,
"%s %lf",cc,&p);
2347 printf(
"Tau %f\n",p);
2350 else if(*(s+1)==
'A')
2352 sscanf(s,
"%s %lf",cc,&p);
2353 printf(
"Lambda %f\n",p);
2361 sscanf(s,
"%s %le",cc,&p);
2362 printf(
"Flat spot elimination parameter %f\n",p);
2365 else if(*(s+1)==
'I')
2367 sscanf(s,
"%s %s",cc,otherfile);
2368 ierr =
ReadPatterns(otherfile,ifile, inet, ilearn, iexamples);
2369 if(ierr != 0)
return ierr;
2374 sscanf(s,
"%s %le",cc,&p);
2375 printf(
"Momentum term %f\n",p);
2382 sscanf(s,
"%s %d",cc,&OutputWeights);
2383 if(OutputWeights == 0)
2385 printf(
"Never write file weights.out\n");
2387 else if(OutputWeights == -1)
2389 printf(
"Write weights to output file at the end\n");
2393 printf(
"Write weights to file every %d epochs\n",
2397 else if(*(s+3)==
'F')
2399 sscanf(s,
"%s %s",cc,cc2);
2400 if(*cc2==
'F' || *cc2==
'C')
2406 printf(
" *** Error while loading file %s at line %s :",
2408 printf(
" unknown language\n");
2413 printf(
" *** Error while loading file %s at line %s\n",
2419 sscanf(s,
"%s %d",cc,&(
NET.Rdwt));
2422 printf(
"Random weights \n");
2426 printf(
"Read weights from file weights.in\n");
2431 sscanf(s,
"%s %d",cc,&(
DIVERS.Stat));
2440 sscanf(s,
"%s %d",cc,&(
DIVERS.Ihess));
2446 sscanf(s,
"%s %le",cc,&p);
2448 printf(
"Learning parameter decay %f\n",
2449 (
double)
LEARN.Decay);
2453 sscanf(s,
"%s %d",cc,&(
DIVERS.Dbin));
2454 printf(
"Fill histogram every %d epochs\n",
DIVERS.Dbin);
2458 sscanf(s,
"%s %d",cc,&(
NET.Debug));
2459 printf(
"Debug mode %d\n",
NET.Debug);
2467 sscanf(s,
"%s %d",cc,&(
PAT.Iponde));
2471 ss = (
char**) malloc((npon+1)*
sizeof(
char*));
2472 for(i=0;i<=npon;i++)
2473 ss[i]=(
char*) malloc(40*
sizeof(
char));
2475 sscanf(ss[1],
"%d",&(
PAT.Iponde));
2478 sscanf(ss[i],
"%le",&(
PAT.Ponds[i-2]));
2496 if(nin==0)
return 2;
2497 if(nout==0)
return 3;
2505 if(ierr != 0)
return ierr;
2530 if(nout>nin) nmax=
nout;
2531 ss = (
char**) malloc((nmax+1)*
sizeof(
char*));
2532 if(ss ==
nullptr)
return -111;
2533 for(i=0;i<=nmax;i++)
2535 ss[
i]=(
char*) malloc(40*
sizeof(
char));
2536 if(ss[i] ==
nullptr)
return -111;
2545 sscanf(ss[i],
"%le",&p);
2555 sscanf(ss[i],
"%le",&p);
2577 printf(
"%d examples loaded \n\n",
PAT.Npat[ifile]);
2592 if (strtok_r(tmp,
" ",&saveptr))
2595 while (strtok_r(
nullptr,
" ",&saveptr)) i++;
2610 strcpy(ss[0],strtok_r(tmp,
" ",&saveptr));
2612 strcpy(ss[i],strtok_r(
nullptr,
" ",&saveptr));
2627 strcpy(ss[0],strtok_r(tmp,
" ",&saveptr));
2629 strcpy(ss[i],strtok_r(
nullptr,
" ",&saveptr));
2646 if(LearnMemory==0)
return;
2648 for(il=0; il<
NET.Nlayer; il++)
2650 for(in=0; in<
NET.Nneur[il]; in++)
2657 if(BFGSMemory==0)
return;
2659 for(il=0; il<
NET.Nweights; il++)
2695 dir = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
2696 if(dir ==
nullptr)
return -111;
2698 for(il=0; il<
NET.Nlayer; il++)
2700 dir[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
2701 if(dir[il] ==
nullptr)
return -111;
2702 for(in=0; in<
NET.Nneur[il]; in++)
2707 dir[0][
in] = (
dbl *)
2708 malloc(101*
sizeof(
dbl));
2709 if(dir[0][in] ==
nullptr)
return -111;
2713 dir[il][
in] = (
dbl *)
2714 malloc((
NET.Nneur[il-1]+1)*
sizeof(
dbl));
2715 if(dir[il][in] ==
nullptr)
return -111;
2716 Nweights +=
NET.Nneur[il-1]+1;
2720 NET.Nweights = Nweights;
2722 if(BFGSMemory==0 &&
LEARN.Meth>= 6)
2725 Gamma = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2726 delta = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2727 BFGSH = (
dbl**) malloc(Nweights*
sizeof(
dbl*));
2728 if(Gamma ==
nullptr || delta ==
nullptr || BFGSH ==
nullptr)
2731 for(i=0; i<Nweights; i++)
2733 BFGSH[
i] = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2734 if(BFGSH[i] ==
nullptr)
return -111;
2777 W=fopen(filename,
"w");
2778 if(W==
nullptr)
return -1;
2779 fprintf(W,
" SUBROUTINE RNNFUN(rin,rout)\n");
2780 fprintf(W,
" DIMENSION RIN(%d)\n",
NET.Nneur[0]);
2781 fprintf(W,
" DIMENSION ROUT(%d)\n",
NET.Nneur[
NET.Nlayer-1]);
2784 for(in=0; in<
NET.Nneur[0]; in++)
2788 fprintf(W,
" OUT%d = RIN(%d)\n",in+1,in+1);
2792 fprintf(W,
" OUT%d = (RIN(%d)-%e)/%e\n",in+1,in+1,
2796 for(il=1; il<
NET.Nlayer-1; il++)
2799 fprintf(W,
"C layer %d\n",il+1);
2800 for(in=0; in<
NET.Nneur[il]; in++)
2802 fprintf(W,
" RIN%d = %e\n",in+1,
2803 (
double)
NET.Weights[il][in][0]);
2804 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2805 fprintf(W,
" > +(%e) * OUT%d\n",
2806 (
double)
NET.Weights[il][in][jn],jn);
2809 for(in=0; in<
NET.Nneur[il]; in++)
2811 if(
NET.T_func[il][in]==0)
2813 fprintf(W,
" OUT%d = 0\n",in+1);
2815 else if(
NET.T_func[il][in]==1)
2817 fprintf(W,
" OUT%d = RIN%d\n",in+1,in+1);
2819 else if(
NET.T_func[il][in]==2)
2821 fprintf(W,
" OUT%d = SIGMOID(RIN%d)\n",
2828 fprintf(W,
"C layer %d\n",il+1);
2829 for(in=0; in<
NET.Nneur[il]; in++)
2831 fprintf(W,
" RIN%d = %e\n",in+1,
2832 (
double)
NET.Weights[il][in][0]);
2833 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2834 fprintf(W,
" > +(%e) * OUT%d\n",
2835 (
double)
NET.Weights[il][in][jn],jn);
2838 for(in=0; in<
NET.Nneur[il]; in++)
2840 if(
NET.T_func[il][in]==0)
2842 fprintf(W,
" ROUT(%d) = 0\n",in+1);
2844 else if(
NET.T_func[il][in]==1)
2846 fprintf(W,
" ROUT(%d) = RIN%d\n",in+1,in+1);
2848 else if(
NET.T_func[il][in]==2)
2850 fprintf(W,
" ROUT(%d) = SIGMOID(RIN%d)\n",
2856 fprintf(W,
" END\n");
2857 fprintf(W,
" REAL FUNCTION SIGMOID(X)\n");
2858 fprintf(W,
" SIGMOID = 1./(1.+EXP(-X))\n");
2859 fprintf(W,
" END\n");
2886 W=fopen(filename,
"w");
2887 if(W==
nullptr)
return -1;
2889 fprintf(W,
"double sigmoid(double x)\n");
2891 fprintf(W,
"return 1/(1+exp(-x));\n");
2893 fprintf(W,
"void rnnfun(double *rin,double *rout)\n");
2895 fprintf(W,
" double out1[%d];\n",
NET.Nneur[0]);
2896 fprintf(W,
" double out2[%d];\n",
NET.Nneur[1]);
2897 if(
NET.Nlayer>=3) fprintf(W,
" double out3[%d];\n",
NET.Nneur[2]);
2898 if(
NET.Nlayer>=4) fprintf(W,
" double out4[%d];\n",
NET.Nneur[3]);
2901 for(in=0; in<
NET.Nneur[0]; in++)
2905 fprintf(W,
" out1[%d] = rin[%d];\n",in,in);
2909 fprintf(W,
" out1[%d] = (rin[%d]-%e)/%e;\n",
2915 for(il=1; il<=
NET.Nlayer-1; il++)
2918 fprintf(W,
"/* layer %d */\n",il+1);
2919 for(in=0; in<
NET.Nneur[il]; in++)
2921 fprintf(W,
" out%d[%d] = %e\n",il+1,in,
2922 (
double)
NET.Weights[il][in][0]);
2923 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2924 fprintf(W,
" +(%e) * out%d[%d]\n",
2925 (
double)
NET.Weights[il][in][jn],il,jn-1);
2929 for(in=0; in<
NET.Nneur[il]; in++)
2931 if(
NET.T_func[il][in]==0)
2933 fprintf(W,
" out%d[%d] = 0;\n",il+1,in);
2935 else if(
NET.T_func[il][in]==1)
2938 else if(
NET.T_func[il][in]==2)
2940 fprintf(W,
" out%d[%d] = sigmoid(out%d[%d]);\n",
2946 for(in=0; in<
NET.Nneur[il]; in++)
2948 fprintf(W,
" rout[%d] = out%d[%d];\n",in,il+1,in);
2980 W=fopen(filename,
"w");
2981 if(W==
nullptr)
return -1;
2983 fprintf(W,
"# network structure ");
2984 for(ilayer=0; ilayer<
NET.Nlayer; ilayer++)
2986 fprintf(W,
"%d ",
NET.Nneur[ilayer]);
2989 fprintf(W,
"\n %d\n",iepoch);
2990 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
2992 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
2994 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
2996 fprintf(W,
" %1.15e\n",
2997 (
double)
NET.Weights[ilayer][ineur][i]);
3032 W=fopen(filename,
"r");
3033 if(W==
nullptr)
return -1;
3039 sscanf(s,
" %d",iepoch);
3040 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
3042 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
3044 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
3046 fscanf(W,
" %le",&p);
3047 NET.Weights[ilayer][ineur][
i] = (
dbl) p;
3087 if(ifile>1 || ifile<0)
return(1);
3089 if(ExamplesMemory==0)
3096 if(
PAT.Pond ==
nullptr ||
PAT.Rin ==
nullptr 3097 ||
PAT.Rans ==
nullptr ||
PAT.vRin ==
nullptr)
return -111;
3102 if(iadd==0 && PatMemory[ifile]!=0)
3108 if(iadd==0 || PatMemory[ifile]==0)
3110 PatMemory[
ifile] = 1;
3112 if(
PAT.Pond[ifile] ==
nullptr)
return -111;
3113 for(j=0; j<npat; j++)
3114 PAT.Pond[ifile][j] = 1;
3117 if(
PAT.Rin[ifile] ==
nullptr)
return -111;
3119 if(
PAT.Rans[ifile] ==
nullptr)
return -111;
3123 if(
PAT.vRin[ifile] ==
nullptr)
return -111;
3125 for(j=0; j<npat; j++)
3130 for(j=0; j<npat; j++)
3133 if(
PAT.Rans[ifile][j] ==
nullptr)
return -111;
3139 ExamplesIndex = (
int *) malloc(npat*
sizeof(
int));
3140 if(ExamplesIndex ==
nullptr)
return -111;
3141 for(j=0; j<npat; j++) ExamplesIndex[j] = j;
3150 if(tmp ==
nullptr)
return -111;
3156 for(j=
PAT.Npat[ifile];j<ntot;j++)
3160 if(PatMemory[ifile]==1) free(
PAT.Pond[ifile]);
3177 if(tmp3 ==
nullptr)
return -111;
3179 for(j=0; j<
PAT.Npat[
ifile]*(nin+1); j++)
3183 if(PatMemory[ifile]==1) free(
PAT.vRin[ifile]);
3185 for(j=0; j<ntot; j++)
3192 if(tmp2 ==
nullptr)
return -111;
3197 for(j=
PAT.Npat[ifile];j<ntot;j++)
3200 if(tmp2[j] ==
nullptr)
return -111;
3202 if(PatMemory[ifile]==1) free(
PAT.Rans[ifile]);
3205 PatMemory[
ifile] = 1;
3210 free(ExamplesIndex);
3211 ExamplesIndex = (
int *) malloc(ntot*
sizeof(
int));
3212 if(ExamplesIndex ==
nullptr)
return -111;
3213 for(j=0; j<ntot; j++) ExamplesIndex[j] = j;
3239 if(ifile>1 || ifile<0)
return 1;
3241 if(PatMemory[ifile]==0)
return 2;
3243 free(
PAT.Pond[ifile]);
3247 free(
PAT.Rans[ifile][i]);
3249 free(
PAT.Rin[ifile]);
3250 free(
PAT.Rans[ifile]);
3251 free(
PAT.vRin[ifile]);
3252 PatMemory[
ifile] = 0;
3285 fmean = (
dbl*) malloc(Ninputs*
sizeof(
dbl));
3287 if(Nexamples<100) nmax=Nexamples;
3289 for(j=0;j<Ninputs;j++)
3292 for(ipat=0;ipat<nmax;ipat++)
3294 fmean[j] += (
dbl) inputs[ipat][j];
3296 fmean[j] = fmean[j]/(
dbl) nmax;
3302 maximum[j] = -99999;
3303 for(ipat=0;ipat<Nexamples;ipat++)
3305 mean[j] += (
dbl) inputs[ipat][j];
3306 sigma[j] += ((
dbl) inputs[ipat][j]-fmean[j])*
3307 ((
dbl) inputs[ipat][j]-fmean[j]);
3308 if((
dbl) inputs[ipat][j] > maximum[j])
3309 maximum[j]=(
dbl) inputs[ipat][j];
3310 if((
dbl) inputs[ipat][j] < minimum[j])
3311 minimum[j]=(
dbl) inputs[ipat][j];
3313 mean[j] = mean[j]/(
dbl) Nexamples;
3314 sigma[j] =
sqrt(sigma[j]/ (
dbl) Nexamples -
3316 (mean[j]-fmean[j]));
3341 mean = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3342 sigma = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3343 minimum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3344 maximum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3347 if(mean && sigma && minimum && maximum) {
3351 printf(
"\t mean \t\t RMS \t\t min \t\t max\n");
3352 for(j=0;j<
NET.Nneur[0];j++)
3354 printf(
"var%d \t %e \t %e \t %e \t %e\n",j+1,
3355 mean[j],sigma[j],minimum[j],maximum[j]);
3388 mean = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3389 sigma = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3392 minimum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3393 maximum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3396 if(mean && sigma && minimum && maximum &&
STAT.mean &&
STAT.sigma) {
3400 if(
NET.Debug>=1) printf(
"\t mean \t\t RMS \t\t min \t\t max\n");
3401 for(j=0;j<
NET.Nneur[0];j++)
3404 printf(
"var%d \t %e \t %e \t %e \t %e\n",j+1,
3405 mean[j],sigma[j],minimum[j],maximum[j]);
3408 STAT.mean[j] = mean[j];
3409 STAT.sigma[j] = sigma[j];
3412 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
3414 PAT.Rin[0][ipat][j] =
3415 (
PAT.Rin[0][ipat][j]-(
float) mean[j])/
3418 for(ipat=0;ipat<
PAT.Npat[1];ipat++)
3420 PAT.Rin[1][ipat][j] =
3421 (
PAT.Rin[1][ipat][j]-(
float) mean[j])/
3432 if(
NET.Debug>=1) printf(
"\n");
3458 NET.Nneur = (
int *) malloc(Nlayer*
sizeof(
int));
3459 if(
NET.Nneur ==
nullptr)
return -111;
3461 NET.T_func = (
int **) malloc(Nlayer*
sizeof(
int *));
3462 NET.Deriv1 = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3463 NET.Inn = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3464 NET.Outn = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3465 NET.Delta = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3466 if(
NET.T_func ==
nullptr ||
NET.Deriv1 ==
nullptr 3467 ||
NET.Inn ==
nullptr ||
NET.Outn ==
nullptr 3468 ||
NET.Delta ==
nullptr)
return -111;
3470 for(i=0; i<Nlayer; i++)
3472 NET.T_func[
i] = (
int *) malloc(Neurons[i]*
sizeof(
int));
3473 NET.Deriv1[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3474 NET.Inn[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3475 NET.Outn[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3476 NET.Delta[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3477 if(
NET.T_func[i] ==
nullptr ||
NET.Deriv1[i] ==
nullptr 3478 ||
NET.Inn[i] ==
nullptr ||
NET.Outn[i] ==
nullptr 3479 ||
NET.Delta[i] ==
nullptr )
return -111;
3482 NET.Weights = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3483 NET.vWeights = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3484 LEARN.Odw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3485 LEARN.ODeDw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3486 LEARN.DeDw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3487 if(
NET.Weights ==
nullptr ||
NET.vWeights ==
nullptr 3488 ||
LEARN.Odw ==
nullptr ||
LEARN.ODeDw ==
nullptr 3489 ||
LEARN.DeDw ==
nullptr)
return -111;
3491 for(i=1; i<Nlayer; i++)
3494 NET.vWeights[
i] = (
dbl *) malloc(k * Neurons[i] *
3496 NET.Weights[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3497 LEARN.Odw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3498 LEARN.ODeDw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3499 LEARN.DeDw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3500 if(
NET.Weights[i] ==
nullptr ||
NET.vWeights[i] ==
nullptr 3501 ||
LEARN.Odw[i] ==
nullptr ||
LEARN.ODeDw[i] ==
nullptr 3502 ||
LEARN.DeDw[i] ==
nullptr)
return -111;
3504 for(j=0; j<Neurons[
i]; j++)
3506 NET.Weights[
i][j] = &(
NET.vWeights[
i][j*
k]);
3510 if(
LEARN.Odw[i][j] ==
nullptr 3511 ||
LEARN.ODeDw[i][j] ==
nullptr 3512 ||
LEARN.DeDw[i][j] ==
nullptr)
return -111;
3536 for(i=1; i<
NET.Nlayer; i++)
3538 for(j=0; j<
NET.Nneur[
i]; j++)
3541 free(
LEARN.Odw[i][j]);
3542 free(
LEARN.ODeDw[i][j]);
3543 free(
LEARN.DeDw[i][j]);
3545 free(
NET.vWeights[i]);
3546 free(
NET.Weights[i]);
3548 free(
LEARN.ODeDw[i]);
3549 free(
LEARN.DeDw[i]);
3558 for(i=0; i<
NET.Nlayer; i++)
3560 free(
NET.T_func[i]);
3561 free(
NET.Deriv1[i]);
3599 if(strlen(s)==0)
return -1;
3600 if(strlen(s)>1024)
return -2;
3604 if (strtok_r(tmp,
",",&saveptr))
3607 while (strtok_r(
nullptr,
",",&saveptr)) i++;
3610 if(i >
NLMAX)
return -3;
3615 sscanf(strtok_r(tmp,
",",&saveptr),
"%d",&(Nneur[0]));
3616 for (i=1;i<*Nlayer;i++)
3617 sscanf(strtok_r(
nullptr,
",",&saveptr),
"%d",&(Nneur[i]));
3646 if((*nl)>
NLMAX)
return(1);
3647 if((*nl)<2)
return(2);
3652 if(ierr != 0)
return ierr;
3658 for(il=0; il<
NET.Nlayer; il++) {
3659 NET.Nneur[il] = nn[il];
3688 dbl a1, a2, a3, a4,
c,
d;
3690 dbl *pM2 = &(M[m+1]);
3691 dbl *pM3 = &(M[2*(m+1)]);
3692 dbl *pM4 = &(M[3*(m+1)]);
3697 i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3704 pM1++; pM2++; pM3++; pM4++;
3705 for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3709 a1 = a1 + *pM1 * c + *(pM1+1) * d;
3710 a2 = a2 + *pM2 * c + *(pM2+1) * d;
3711 a3 = a3 + *pM3 * c + *(pM3+1) * d;
3712 a4 = a4 + *pM4 * c + *(pM4+1) * d;
3714 for(; j<
m; j++, pM1++, pM2++, pM3++, pM4++)
3722 *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3726 pM1 = &(M[i*(m+1)]);
3729 for(j=0; j<
m; j++, pM1++)
3731 a1 = a1 + *pM1 * v[j];
3754 dbl a1, a2, a3, a4,
c,
d;
3757 dbl *pM3 = &(M[2*
m]);
3758 dbl *pM4 = &(M[3*
m]);
3763 i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3770 for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3774 a1 = a1 + *pM1 * c + *(pM1+1) * d;
3775 a2 = a2 + *pM2 * c + *(pM2+1) * d;
3776 a3 = a3 + *pM3 * c + *(pM3+1) * d;
3777 a4 = a4 + *pM4 * c + *(pM4+1) * d;
3779 for(; j<
m; j++, pM1++, pM2++, pM3++, pM4++)
3787 *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3793 for(j=0; j<
m; j++, pM1++)
3795 a1 = a1 + *pM1 * v[j];
3821 int Ni,
int Nj,
int Nk,
int NaOffs,
int NbOffs)
3825 dbl s00,s01,s10,s11;
3827 dbl *pb0,*pb1,*pc0,*pc1;
3829 for (j=0; j<=Nj-2; j+=2)
3833 s00 = 0.0; s01 = 0.0; s10 = 0.0; s11 = 0.0;
3835 for (k=0,pb0=b+k+NbOffs*j,
3836 pb1=b+k+NbOffs*(j+1),
3845 s00 += (*pa0)*(*pb0);
3846 s01 += (*pa0)*(*pb1);
3847 s10 += (*pa1)*(*pb0);
3848 s11 += (*pa1)*(*pb1);
3850 *pc0 = s00; *(pc0+1) = s01; *pc1 = s10; *(pc1+1) = s11;
3856 s00 = 0.0; s10 = 0.0;
3857 for (k=0,pb0=b+k+NbOffs*j,
3865 s00 += (*pa0)*(*pb0);
3866 s10 += (*pa1)*(*pb0);
3868 *pc0 = s00; *pc1 = s10;
dbl MLP_Epoch(int iepoch, dbl *alpmin, int *Ntest)
void getnLexemes(int n, char *s, char **ss)
dbl MLP_Test_MM(int ifile, dbl *tmp)
int ReadPatterns(char *filename, int ifile, int *inet, int *ilearn, int *iexamples)
int DecreaseSearch(dbl *alpmin, int *Ntest, dbl Err0)
int SaveWeights(char *filename, int iepoch)
int dgels_(char *trans, int *m, int *n, int *nrhs, double *a, int *lda, double *b, int *ldb, double *work, int *lwork, int *info)
void MLP_Out(type_pat *rrin, dbl *rrout)
int MLP_SetNet(int *nl, int *nn)
The Signals That Services Can Subscribe To This is based on ActivityRegistry and is current per Services can connect to the signals distributed by the ActivityRegistry in order to monitor the activity of the application Each possible callback has some defined which we here list in angle e g
int MLP_PrCFun(char *filename)
int MLP_StatInputs(int Nexamples, int Ninputs, type_pat **inputs, dbl *mean, dbl *sigma, dbl *minimum, dbl *maximum)
void SetLambda(double Wmax)
int GetBFGSH(int Nweights)
int GetNetStructure(char *s, int *Nlayer, int *Nneur)
void MLP_Out_T(type_pat *rrin)
void MLP_vSigmoideDeriv(dbl *x, dbl *dy, int n)
int MLP_Train(int *ipat, dbl *err)
int ShuffleExamples(int n, int *index)
double MLP_Rand(dbl mini, dbl maxi)
int DeDwSum(type_pat *ans, dbl *out, int ipat)
int CountLexemes(char *s)
struct net_ net_ MLP_HIDDEN
int LoadWeights(char *filename, int *iepoch)
void MLP_LineHyb(dbl ***w0, dbl alpha)
int AllocPatterns(int ifile, int npat, int nin, int nout, int iadd)
int SetTransFunc(int layer, int neuron, int func)
void InitBFGSH(int Nweights)
void MLP_MM2rows(dbl *c, type_pat *a, dbl *b, int Ni, int Nj, int Nk, int NaOffs, int NbOffs)
void MLP_MatrixVector(dbl *M, type_pat *v, dbl *r, int n, int m)
void DeDwScale(int Nexamples)
void MLP_Line(dbl ***w0, dbl alpha)
void MLP_MatrixVectorBias(dbl *M, dbl *v, dbl *r, int n, int m)
void getLexemes(char *s, char **ss)
void MLP_vSigmoide(dbl *x, int n)
int LineSearch(dbl *alpmin, int *Ntest, dbl Err0)
std::vector< std::vector< double > > tmp
void BFGSdir(int Nweights)
void MLP_Out2(type_pat *rrin)
int FreePatterns(int ifile)
dbl MLP_Test(int ifile, int regul)
int MLP_PrFFun(char *filename)
int AllocNetwork(int Nlayer, int *Neurons)
int LineSearchHyb(dbl *alpmin, int *Ntest)