47 nrhs,
double *
a,
int *lda,
double *
b,
int *ldb,
48 double *work,
int *lwork,
int *
info);
64 int i, il,
in,
j,
m, mp1;
72 for(j=0;j<
m;j++)
NET.Outn[0][j] = rrin[j];
75 for(i=mp1; i<=
NET.Nneur[0]; i+=4)
77 NET.Outn[0][i-1] = rrin[i-1];
78 NET.Outn[0][
i] = rrin[
i];
79 NET.Outn[0][i+1] = rrin[i+1];
80 NET.Outn[0][i+2] = rrin[i+2];
88 for(il=2; il<
NET.Nlayer; il++)
91 deriv1[il-1],
NET.Nneur[il-1]);
93 NET.Outn[il],
NET.Nneur[il],
96 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
98 deriv1[
NET.Nlayer-1][
in] = 1;
115 int i, il,
in,
j, ilm1,
m, mp1;
122 for(j=0;j<
m;j++)
NET.Outn[0][j] = rrin[j];
125 for(i=mp1; i<=
NET.Nneur[0]; i+=4)
127 NET.Outn[0][i-1] = rrin[i-1];
128 NET.Outn[0][
i] = rrin[
i];
129 NET.Outn[0][i+1] = rrin[i+1];
130 NET.Outn[0][i+2] = rrin[i+2];
138 for(il=1; il<
NET.Nlayer; il++)
141 m =
NET.Nneur[ilm1]%4;
142 for(in=0; in<
NET.Nneur[il]; in++)
144 a =
NET.Weights[il][
in][0];
146 for(j=1;j<=
m;j++) a +=
147 NET.Weights[il][in][j]*
NET.Outn[ilm1][j-1];
150 for(j=mp1; j<=
NET.Nneur[ilm1]; j+=4)
153 NET.Weights[il][
in][j+3]*
NET.Outn[ilm1][j+2]+
154 NET.Weights[il][
in][j+2]*
NET.Outn[ilm1][j+1]+
155 NET.Weights[il][
in][j+1]*
NET.Outn[ilm1][
j]+
156 NET.Weights[il][
in][
j]*
NET.Outn[ilm1][j-1];
158 switch(
NET.T_func[il][in])
162 case 1:
NET.Outn[il][
in] =
a;
164 case 0:
NET.Outn[il][
in] = 0;
188 dbl **rrout, **deriv1;
189 register dbl *prrout;
191 int nhid =
NET.Nneur[1];
201 rrout[0][0] = rrin[1];
206 rrout[0][0] = rrin[1];
207 rrout[0][1] = rrin[2];
212 rrout[0][0] = rrin[1];
213 rrout[0][1] = rrin[2];
214 rrout[0][2] = rrin[3];
219 prrout = &(rrout[0][mp1]);
220 prrin = &(rrin[mp1+1]);
221 for(i=mp1; i<=
NET.Nneur[0]; i+=4, prrout+=4, prrin+=4)
223 *(prrout-1) = *(prrin-1);
225 *(prrout+1)= *(prrin+1);
226 *(prrout+2) = *(prrin+2);
232 NET.Outn[1],nhid,nin);
237 for(il=2; il<
NET.Nlayer; il++)
241 NET.Outn[il],
NET.Nneur[il],
NET.Nneur[il-1]);
243 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
244 deriv1[
NET.Nlayer-1][in] = 1;
267 int nhid =
NET.Nneur[1];
269 int jpat,
j, il, ilm1,
m,
in, mp1;
271 dbl *pweights, *ptmp;
274 for(ipat=0; ipat<npat-1; ipat+=2)
277 NET.vWeights[1], 2, nhid, nin+1,
280 switch(
NET.T_func[1][0])
291 for(jpat=0; jpat<2; jpat++)
293 for(j=0; j<nhid; j++)
295 tmp[j+jpat*nhid] = 0;
301 for(jpat=0; jpat<2; jpat++)
303 for(in=0; in<nhid; in++)
305 NET.Outn[1][
in] = tmp[jpat*nhid+
in];
307 for(il=2; il<
NET.Nlayer; il++)
310 m =
NET.Nneur[ilm1]%4;
311 for(in=0; in<
NET.Nneur[il]; in++)
313 pweights = &(
NET.Weights[il][
in][0]);
317 for(j=1;j<=
m;j++,pweights++) a +=
318 (*pweights)*
NET.Outn[ilm1][j-1];
321 for(j=mp1; j<=
NET.Nneur[ilm1];
325 *(pweights+3)*
NET.Outn[ilm1][j+2]+
326 *(pweights+2)*
NET.Outn[ilm1][j+1]+
327 *(pweights+1)*
NET.Outn[ilm1][j]+
328 *(pweights )*
NET.Outn[ilm1][j-1];
330 switch(
NET.T_func[il][in])
334 case 1:
NET.Outn[il][
in] =
a;
336 case 0:
NET.Outn[il][
in] = 0;
340 if(il ==
NET.Nlayer-1)
342 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
344 rrans = (
dbl)
PAT.Rans[ifile][ipat+jpat][in];
345 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
347 PAT.Pond[ifile][ipat+jpat];
355 for(ipat=ipat; ipat<npat; ipat++)
358 &(
PAT.vRin[ifile][ipat*(nin+1)]),tmp,
361 switch(
NET.T_func[1][0])
372 for(j=0; j<nhid; j++)
379 for(in=0; in<nhid; in++)
383 for(il=2; il<
NET.Nlayer; il++)
386 m =
NET.Nneur[ilm1]%4;
387 for(in=0; in<
NET.Nneur[il]; in++)
389 pweights = &(
NET.Weights[il][
in][0]);
393 for(j=1;j<=
m;j++,pweights++) a +=
394 (*pweights)*
NET.Outn[ilm1][j-1];
397 for(j=mp1; j<=
NET.Nneur[ilm1];
401 *(pweights+3)*
NET.Outn[ilm1][j+2]+
402 *(pweights+2)*
NET.Outn[ilm1][j+1]+
403 *(pweights+1)*
NET.Outn[ilm1][j]+
404 *(pweights )*
NET.Outn[ilm1][j-1];
406 switch(
NET.T_func[il][in])
410 case 1:
NET.Outn[il][
in] =
a;
412 case 0:
NET.Outn[il][
in] = 0;
416 if(il ==
NET.Nlayer-1)
418 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
420 rrans = (
dbl)
PAT.Rans[ifile][ipat][in];
421 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
423 PAT.Pond[ifile][ipat];
449 int in,jn,ipat,ipati;
453 tmp = (
dbl *) malloc(2 *
NET.Nneur[1] *
sizeof(
dbl));
456 printf(
"not enough memory in MLP_Test\n");
458 for(ipat=0; ipat<
PAT.Npat[
ifile]; ipat++)
462 ipati = ExamplesIndex[ipat];
469 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
471 rrans = (
dbl)
PAT.Rans[ifile][ipati][in];
472 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
474 PAT.Pond[ifile][ipati];
480 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
481 for(jn=0; jn<=
NET.Nneur[
NET.Nlayer-2]; jn++)
495 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
496 for(jn=0; jn<=
NET.Nneur[
NET.Nlayer-2]; jn++)
522 int il, in1,
in, itest2;
523 dbl deriv, deriv1, deriv2, deriv3, deriv4, pond;
525 register dbl a,
b,
dd, a1, a2, a3, a4;
526 dbl *pout, *pdelta, *pw1, *pw2, *pw3, *pw4;
529 if(
NET.Debug>=5) printf(
" Entry MLP_Stochastic\n");
530 weights =
NET.Weights;
541 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
543 ii = ExamplesIndex[ipat];
544 pond =
PAT.Pond[0][
ii];
549 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
551 deriv =
NET.Deriv1[
NET.Nlayer-1][
in];
552 a = (
dbl)
PAT.Rans[0][ii][in];
553 b =
NET.Outn[
NET.Nlayer-1][in]-a;
555 NET.Delta[
NET.Nlayer-1][in] = b*deriv*pond*eta;
558 for(il=
NET.Nlayer-2; il>0; il--)
560 dd =
NET.Delta[il+1][0];
561 for(in=0; in<
NET.Nneur[il]-3; in+=4)
563 deriv1 =
NET.Deriv1[il][
in];
564 deriv2 =
NET.Deriv1[il][in+1];
565 deriv3 =
NET.Deriv1[il][in+2];
566 deriv4 =
NET.Deriv1[il][in+3];
567 itest2 = (
NET.Nneur[il+1]==1);
568 a1 = dd*weights[il+1][0][in+1];
569 a2 = dd*weights[il+1][0][in+2];
570 a3 = dd*weights[il+1][0][in+3];
571 a4 = dd*weights[il+1][0][in+4];
573 pdelta = &(
NET.Delta[il+1][1]);
574 for(in1=1; in1<
NET.Nneur[il+1];
577 a1 += *pdelta * weights[il+1][in1][in+1];
578 a2 += *pdelta * weights[il+1][in1][in+2];
579 a3 += *pdelta * weights[il+1][in1][in+3];
580 a4 += *pdelta * weights[il+1][in1][in+4];
582 L1:
NET.Delta[il][
in] = a1*deriv1;
583 NET.Delta[il][in+1] = a2*deriv2;
584 NET.Delta[il][in+2] = a3*deriv3;
585 NET.Delta[il][in+3] = a4*deriv4;
587 for(in=in; in<
NET.Nneur[il]; in++)
589 deriv =
NET.Deriv1[il][
in];
590 itest2 = (
NET.Nneur[il+1]==1);
591 a = dd*weights[il+1][0][in+1];
593 pdelta = &(
NET.Delta[il+1][1]);
594 for(in1=1; in1<
NET.Nneur[il+1];
598 weights[il+1][in1][in+1];
600 L2:
NET.Delta[il][
in] = a*deriv;
609 for(il=1; il<
NET.Nlayer; il++)
611 inm1 =
NET.Nneur[il-1];
612 for(in=0; in<
NET.Nneur[il]-3; in+=4)
614 a1 =
NET.Delta[il][
in];
615 a2 =
NET.Delta[il][in+1];
616 a3 =
NET.Delta[il][in+2];
617 a4 =
NET.Delta[il][in+3];
618 pout = &(
NET.Outn[il-1][0]);
619 weights[il][
in][0] += a1;
620 weights[il][in+1][0] += a2;
621 weights[il][in+2][0] += a3;
622 weights[il][in+3][0] += a4;
623 weights[il][
in][1] += a1* (*pout);
624 weights[il][in+1][1] += a2* (*pout);
625 weights[il][in+2][1] += a3* (*pout);
626 weights[il][in+3][1] += a4* (*pout);
628 pw1 = &(weights[il][
in][2]);
629 pw2 = &(weights[il][in+1][2]);
630 pw3 = &(weights[il][in+2][2]);
631 pw4 = &(weights[il][in+3][2]);
632 for(in1=2; in1<=inm1;
633 ++in1, ++pout, ++pw1, ++pw2,
642 for(in=in; in<
NET.Nneur[il]; in++)
644 a1 =
NET.Delta[il][
in];
645 pout = &(
NET.Outn[il-1][0]);
646 weights[il][
in][0] += a1;
647 weights[il][
in][1] += a1* (*pout);
649 pw1 = &(weights[il][
in][2]);
650 for(in1=2; in1<=inm1;
651 ++in1, ++pout, ++pw1)
660 for(il=1; il<
NET.Nlayer; il++)
662 for(in=0; in<
NET.Nneur[il]; in++)
665 a =
NET.Delta[il][
in];
669 b = a*
NET.Outn[il-1][0];
673 for(in1=2; in1<=
NET.Nneur[il-1]; in1++)
675 b = a*
NET.Outn[il-1][in1-1];
710 int Nweights, Nlinear, ipat, ierr;
716 Nweights =
NET.Nweights;
717 Nlinear =
NET.Nneur[
NET.Nlayer-2] + 1;
719 if(
NET.Debug>=5) printf(
" Entry MLP_Epoch\n");
729 if(iepoch==1 &&
LEARN.Meth==7)
743 for(ipat=0;ipat<nn;ipat++)
745 ierr =
MLP_Train(&ExamplesIndex[ipat],&err);
746 if(ierr!=0) printf(
"Epoch: ierr= %d\n",ierr);
751 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
754 if(ierr!=0) printf(
"Epoch: ierr= %d\n",ierr);
768 if((iepoch-1)%
LEARN.Nreset==0)
787 if((iepoch-1)%
LEARN.Nreset==0)
796 beta =
LEARN.Norm/ONorm;
803 if((iepoch-1)%
LEARN.Nreset==0)
835 printf(
"Line search fail \n");
841 if((iepoch-1)%
LEARN.Nreset==0)
848 if(
NET.Debug>=5) printf(
"Before GetGammaDelta \n");
850 if(
NET.Debug>=5) printf(
"After GetGammaDelta \n");
852 if(
NET.Debug>=5) printf(
"After GetBFGSH \n");
862 if(
NET.Debug>=5) printf(
"After BFGSdir \n");
871 printf(
"Line search fail \n");
877 if(
NET.Debug>=5) printf(
" End MLP_Epoch\n");
900 if(*ipat<0)
return(2);
904 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
906 *err += ((
dbl)
PAT.Rans[0][*ipat][in]-
NET.Outn[
NET.Nlayer-1][in])
907 *((
dbl)
PAT.Rans[0][*ipat][in]-
NET.Outn[
NET.Nlayer-1][in])*
933 for(il=
NET.Nlayer-2; il>0; il--) {
935 for(in=0; in<
NET.Nneur[il]; in++) {
938 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
940 + eps *
LEARN.Odw[il][
in][in1];
944 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
973 for(il=
NET.Nlayer-1; il>0; il--) {
974 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
977 for(in=0; in<
NET.Nneur[il]; in++) {
979 + epseta *
LEARN.Odw[il][
in][in1]);
1002 for(il=1; il<
NET.Nlayer; il++)
1003 for(in=0; in<
NET.Nneur[il]; in++)
1004 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1005 dd +=
LEARN.DeDw[il][in][jn]*
1006 LEARN.DeDw[il][in][jn];
1024 for(il=1; il<
NET.Nlayer; il++)
1025 for(in=0; in<
NET.Nneur[il]; in++)
1026 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1027 dd +=
LEARN.DeDw[il][in][jn]*
1028 LEARN.ODeDw[il][in][jn];
1044 for(il=1; il<
NET.Nlayer; il++)
1045 for(in=0; in<
NET.Nneur[il]; in++)
1046 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1047 LEARN.DeDw[il][in][jn] = 0;
1063 for(il=1; il<
NET.Nlayer; il++)
1064 for(in=0; in<
NET.Nneur[il]; in++)
1065 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1066 LEARN.DeDw[il][in][jn] /= (
dbl) Nexamples;
1081 for(il=1; il<
NET.Nlayer; il++)
1082 for(in=0; in<
NET.Nneur[il]; in++)
1083 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1084 LEARN.ODeDw[il][in][jn] =
LEARN.DeDw[il][in][jn];
1100 for(il=1; il<
NET.Nlayer; il++)
1101 for(in=0; in<
NET.Nneur[il]; in++)
1102 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1125 int il, in1,
in,
ii;
1128 dbl *pout, *pdedw, *pdelta;
1133 b = (
dbl)
PAT.Pond[0][ipat];
1134 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
1136 deriv =
NET.Deriv1[
NET.Nlayer-1][
in];
1138 (out[
in] - (
dbl) ans[in])*deriv*
b;
1141 for(il=
NET.Nlayer-2; il>0; il--)
1144 for(in=0; in<
NET.Nneur[il]; in++)
1146 deriv =
NET.Deriv1[il][
in];
1147 a =
NET.Delta[il+1][0] *
NET.Weights[il+1][0][in+1];
1148 pdelta = &(
NET.Delta[il+1][1]);
1149 for(in1=1; in1<
NET.Nneur[il+1]; in1++, pdelta++)
1151 a += *pdelta *
NET.Weights[il+1][in1][in+1];
1153 NET.Delta[il][
in] = a * deriv;
1157 for(il=1; il<
NET.Nlayer; il++)
1159 ii =
NET.Nneur[il-1];
1160 for(in=0; in<
NET.Nneur[il]; in++)
1162 a =
NET.Delta[il][
in];
1164 LEARN.DeDw[il][
in][1] += a *
NET.Outn[il-1][0];
1165 pout = &(
NET.Outn[il-1][1]);
1166 pdedw = &(
LEARN.DeDw[il][
in][2]);
1167 for(in1=1; in1<
ii; ++in1, ++pout, ++pdedw)
1169 (*pdedw) += a * (*pout);
1203 if(layer>
NLMAX)
return(1);
1206 NET.T_func[layer-1][neuron-1] = func;
1226 for(il=0; il<
NET.Nlayer; il++) {
1227 for(in=0; in<
NET.Nneur[il]; in++) {
1228 NET.T_func[il][
in] = 2;
1229 if(il==
NET.Nlayer-1)
NET.T_func[il][
in] = 1;
1247 for(il=1; il<
NET.Nlayer; il++)
1248 for(in=0; in<
NET.Nneur[il]; in++)
1249 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1250 dir[il][in][jn] = -
LEARN.DeDw[il][in][jn];
1269 for(il=1; il<
NET.Nlayer; il++)
1270 for(in=0; in<
NET.Nneur[il]; in++)
1271 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1273 dir[il][
in][jn] = -
LEARN.DeDw[il][
in][jn]+
1274 beta*dir[il][
in][jn];
1293 for(il=1; il<
NET.Nlayer; il++)
1294 for(in=0; in<
NET.Nneur[il]; in++)
1295 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1297 ddir +=
LEARN.DeDw[il][
in][jn]*dir[il][
in][jn];
1317 for(il=1; il<
NET.Nlayer; il++)
1318 for(in=0; in<
NET.Nneur[il]; in++)
1319 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1346 g = (
dbl*) malloc(
NET.Nweights*
sizeof(
dbl));
1347 s = (
dbl*) malloc(Nweights*
sizeof(
dbl));
1349 for(il=1; kk<Nweights; il++)
1350 for(in=0; in<
NET.Nneur[il]; in++)
1351 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1356 for(i=0; i<Nweights; i++)
1359 for(j=0; j<Nweights; j++)
1361 s[
i] += BFGSH[
i][
j] * g[
j];
1366 for(il=1; kk<Nweights; il++)
1367 for(in=0; in<
NET.Nneur[il]; in++)
1368 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1370 dir[il][
in][jn] = -s[
kk];
1391 for(i=0; i<Nweights; i++)
1392 for(j=0; j<Nweights; j++)
1395 if(i==j) BFGSH[
i][
j] = 1;
1420 typedef double dble;
1428 Hgamma = (dble *) malloc(Nweights*
sizeof(dble));
1429 tmp = (dble *) malloc(Nweights*
sizeof(dble));
1431 for(i=0; i<Nweights; i++)
1433 deltaTgamma += (dble) delta[i] * (dble) Gamma[
i];
1436 for(j=0; j<Nweights; j++)
1438 a += (dble) BFGSH[i][j] * (dble) Gamma[
j];
1439 b += (dble) Gamma[j] * (dble) BFGSH[
j][
i];
1443 factor += (dble) Gamma[i]*Hgamma[i];
1445 if(deltaTgamma == 0)
1451 a = 1 / deltaTgamma;
1452 factor = 1 + factor*
a;
1454 for(i=0; i<Nweights; i++)
1456 b = (dble) delta[i];
1457 for(j=0; j<Nweights; j++)
1458 BFGSH[i][j] += (
dbl) (factor*b* (dble)
1459 delta[j]-(tmp[j]*b+Hgamma[i]*(dble)delta[
j]))*
a;
1481 dbl alpha1, alpha2, alpha3;
1482 dbl err1, err2, err3;
1484 int icount, il,
in, jn;
1491 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1492 for(il=1; il<
NET.Nlayer; il++)
1494 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1495 for(in=0; in<
NET.Nneur[il]; in++)
1497 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1499 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1501 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1512 if(
NET.Debug>=4) printf(
"err depart= %f\n",err1);
1519 if(alpha2 < 0.01) alpha2 = 0.01;
1520 if(alpha2 > 2.0) alpha2 = 2.0;
1524 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha2,err2);
1534 for(icount=1;icount<=100;icount++)
1536 alpha3 = alpha3*
tau;
1539 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha3,err3);
1541 if(err3>err2)
break;
1556 for(icount=1;icount<=100;icount++)
1558 alpha2 = alpha2/
tau;
1561 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha2,err2);
1563 if(err1>err2)
break;
1578 *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1579 -(err2-err1)/(alpha2-alpha1)));
1580 if(*alpmin>10000) *alpmin=10000;
1584 LastAlpha = *alpmin;
1587 for(il=1; il<
NET.Nlayer; il++)
1588 for(in=0; in<
NET.Nneur[il]; in++)
1589 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1590 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1593 for(il=1; il<
NET.Nlayer; il++)
1594 for(in=0; in<
NET.Nneur[il]; in++)
1596 for(il=1; il<
NET.Nlayer; il++)
1622 int icount, il,
in, jn;
1629 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1630 for(il=1; il<
NET.Nlayer; il++)
1632 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1633 for(in=0; in<
NET.Nneur[il]; in++)
1635 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1637 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1639 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1650 if(
NET.Debug>=4) printf(
"err depart= %f\n",err1);
1666 for(icount=1;icount<=100;icount++)
1668 alpha2 = alpha2/
tau;
1672 if(err1>err2)
break;
1687 for(il=1; il<
NET.Nlayer; il++)
1688 for(in=0; in<
NET.Nneur[il]; in++)
1689 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1690 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1693 for(il=1; il<
NET.Nlayer; il++)
1694 for(in=0; in<
NET.Nneur[il]; in++)
1696 for(il=1; il<
NET.Nlayer; il++)
1709 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1710 for(il=1; il<
NET.Nlayer; il++)
1712 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1713 for(in=0; in<
NET.Nneur[il]; in++)
1715 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1717 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1719 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1729 for(il=1; il<
NET.Nlayer; il++)
1730 for(in=0; in<
NET.Nneur[il]; in++)
1731 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1732 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1735 for(il=1; il<
NET.Nlayer; il++)
1736 for(in=0; in<
NET.Nneur[il]; in++)
1738 for(il=1; il<
NET.Nlayer; il++)
1759 register int il,
in,jn;
1761 for(il=1; il<
NET.Nlayer; il++)
1762 for(in=0; in<
NET.Nneur[il]; in++)
1763 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1764 NET.Weights[il][in][jn] = w0[il][in][jn]+
1765 alpha*dir[il][in][jn];
1784 dbl alpha1, alpha2, alpha3;
1785 dbl err1, err2, err3;
1787 int icount, il,
in, jn;
1794 printf(
" entry LineSearchHyb \n");
1801 w0 = (
dbl ***) malloc((
NET.Nlayer-1)*
sizeof(
dbl**));
1802 for(il=1; il<
NET.Nlayer-1; il++)
1804 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1805 for(in=0; in<
NET.Nneur[il]; in++)
1807 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1809 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1811 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1819 if(
NET.Debug>=4) printf(
"LinesearchHyb err depart= %f\n",err1);
1826 if(alpha2 < 0.01) alpha2 = 0.01;
1827 if(alpha2 > 2.0) alpha2 = 2.0;
1840 for(icount=1;icount<=100;icount++)
1842 alpha3 = alpha3*
tau;
1846 if(err3>err2)
break;
1861 for(icount=1;icount<=100;icount++)
1863 alpha2 = alpha2/
tau;
1867 if(err1>err2)
break;
1881 *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1882 -(err2-err1)/(alpha2-alpha1)));
1883 if(*alpmin>10000) *alpmin=10000;
1887 LastAlpha = *alpmin;
1890 for(il=1; il<
NET.Nlayer-1; il++)
1891 for(in=0; in<
NET.Nneur[il]; in++)
1892 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1893 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1896 for(il=1; il<
NET.Nlayer-1; il++)
1897 for(in=0; in<
NET.Nneur[il]; in++)
1899 for(il=1; il<
NET.Nlayer-1; il++)
1903 printf(
" exit LineSearchHyb \n");
1926 for(il=1; il<
NET.Nlayer-1; il++)
1927 for(in=0; in<
NET.Nneur[il]; in++)
1928 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1930 NET.Weights[il][
in][jn] = w0[il][
in][jn]+
1931 alpha*dir[il][
in][jn];
1976 double err,lambda,lambda2;
1977 int Nl,M,Nhr,khr,nrhs,iret,ierr;
1978 int il,
in, inl, ipat;
1986 lambda2 =
LEARN.Alambda;
1990 Nl =
NET.Nneur[
NET.Nlayer-2] + 1;
1994 double *Work = (
double*) malloc((
int) Lwork*
sizeof(double));
1997 dpat = (
double*) malloc((
int) M*
sizeof(double));
2002 HR = (
double*) malloc((
int) Nhr*
sizeof(double));
2004 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
2019 dpat[ipat] = (
dbl)
PAT.Rans[0][ipat][0]*
sqrt(
PAT.Pond[0][ipat]);
2022 for(in=0;in<
NET.Nneur[il];in++)
2024 khr = M *(in+1) + ipat;
2025 HR[khr] =
NET.Outn[il][
in]*
2030 lambda =
sqrt(lambda2);
2031 for(ipat=0;ipat<=
NET.Nneur[il];ipat++)
2033 dpat[ipat+
PAT.Npat[0]] = 0;
2034 for(in=0;in<=
NET.Nneur[il];in++)
2036 khr = M *in + ipat +
PAT.Npat[0];
2038 if(in==ipat) HR[khr]=lambda;
2044 printf(
"entry ResLin, err=MLP_Test(0,0), err= %f\n",err);
2050 ierr =
dgels_(&Trans,&M,&Nl,&nrhs,HR,&M,dpat,&M,Work,
2052 if(iret != 0) printf(
"Warning from dgels: iret = %d\n",(
int)iret);
2053 if(ierr != 0) printf(
"Warning from dgels: ierr = %d\n",(
int)ierr);
2061 for (inl=0; inl<=
NET.Nneur[il-1];inl++)
2063 NET.Weights[il][0][inl] = dpat[inl];
2068 printf(
"ResLin, apres tlsfor, err= %f\n",err);
2111 index[
ii] = index[
i];
2133 return mini+(maxi-mini)*
random()/RAND_MAX;
2151 for(ilayer=1;ilayer<
NET.Nlayer;ilayer++)
2152 for(ineur=0;ineur<
NET.Nneur[ilayer];ineur++)
2153 for(i=0;i<=
NET.Nneur[ilayer-1];i++)
2154 NET.Weights[ilayer][ineur][i]=
2172 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
2176 printf(
"Couche %d\n",ilayer);
2180 printf(
"Layer %d\n",ilayer);
2182 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
2186 printf(
"Neurone %d",ineur);
2190 printf(
"Neuron %d",ineur);
2192 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
2195 (
double)
NET.Weights[ilayer][ineur][i]);
2230 int *inet,
int *ilearn,
int *iexamples)
2233 char otherfile[
CLEN];
2247 int nlayer, nneur[
NLMAX];
2249 printf(
"\nLoading file %s\n",filename);
2250 LVQpat=fopen(filename,
"r");
2251 if(LVQpat == 0)
return -1;
2255 while(fgets(s,
CLEN,LVQpat))
2261 printf(
"Number of neurons %s",s);
2263 sscanf(s,
"%s %s",cc,s2);
2265 if(ierr != 0)
return ierr;
2267 if(ierr != 0)
return ierr;
2271 sscanf(s,
"%s %d",cc,&l);
2275 printf(
"Number of patterns %d\n",np);
2277 else if(*(cc+1)==
'I')
2281 printf(
"Number of inputs %d\n",nin);
2283 else if(*(cc+1)==
'O' && *(cc+2)==
'U')
2287 printf(
"Number of outputs %d\n",nout);
2289 else if(*(cc+1)==
'O' && *(cc+2)==
'R')
2292 if(l==1) printf(
"Normalize inputs\n");
2295 else if(*(cc+1)==
'L')
2297 printf(
"NLAY datacard is no longer needed\n");
2299 else if(*(cc+1)==
'E')
2302 printf(
"Number of epochs %d\n",l);
2304 else if(*(cc+1)==
'R')
2308 "Reset to steepest descent every %d epochs\n",
2317 sscanf(s,
"%s %le",cc,&p);
2318 printf(
"Learning parameter %f\n",p);
2321 else if(*(s+1)==
'M')
2324 sscanf(s,
"%s %d",cc,&(
LEARN.Meth));
2325 printf(
"Learning method = ");
2328 case 1: printf(
"Stochastic Minimization\n");
2330 case 2: printf(
"Steepest descent with fixed step\n");
2332 case 3: printf(
"Steepest descent with line search\n");
break;
2333 case 4: printf(
"Polak-Ribiere Conjugate Gradients\n");
break;
2334 case 5: printf(
"Fletcher-Reeves Conjugate Gradients\n");
2336 case 6: printf(
"BFGS\n");
2338 case 7: printf(
"Hybrid BFGS-linear\n");
2340 default: printf(
"Error: unknown method\n");
break;
2344 else if(*(s+1)==
'T')
2346 sscanf(s,
"%s %lf",cc,&p);
2347 printf(
"Tau %f\n",p);
2350 else if(*(s+1)==
'A')
2352 sscanf(s,
"%s %lf",cc,&p);
2353 printf(
"Lambda %f\n",p);
2361 sscanf(s,
"%s %le",cc,&p);
2362 printf(
"Flat spot elimination parameter %f\n",p);
2365 else if(*(s+1)==
'I')
2367 sscanf(s,
"%s %s",cc,otherfile);
2368 ierr =
ReadPatterns(otherfile,ifile, inet, ilearn, iexamples);
2369 if(ierr != 0)
return ierr;
2374 sscanf(s,
"%s %le",cc,&p);
2375 printf(
"Momentum term %f\n",p);
2382 sscanf(s,
"%s %d",cc,&OutputWeights);
2383 if(OutputWeights == 0)
2385 printf(
"Never write file weights.out\n");
2387 else if(OutputWeights == -1)
2389 printf(
"Write weights to output file at the end\n");
2393 printf(
"Write weights to file every %d epochs\n",
2397 else if(*(s+3)==
'F')
2399 sscanf(s,
"%s %s",cc,cc2);
2400 if(*cc2==
'F' || *cc2==
'C')
2406 printf(
" *** Error while loading file %s at line %s :",
2408 printf(
" unknown language\n");
2413 printf(
" *** Error while loading file %s at line %s\n",
2419 sscanf(s,
"%s %d",cc,&(
NET.Rdwt));
2422 printf(
"Random weights \n");
2426 printf(
"Read weights from file weights.in\n");
2431 sscanf(s,
"%s %d",cc,&(
DIVERS.Stat));
2440 sscanf(s,
"%s %d",cc,&(
DIVERS.Ihess));
2446 sscanf(s,
"%s %le",cc,&p);
2448 printf(
"Learning parameter decay %f\n",
2449 (
double)
LEARN.Decay);
2453 sscanf(s,
"%s %d",cc,&(
DIVERS.Dbin));
2454 printf(
"Fill histogram every %d epochs\n",
DIVERS.Dbin);
2458 sscanf(s,
"%s %d",cc,&(
NET.Debug));
2459 printf(
"Debug mode %d\n",
NET.Debug);
2467 sscanf(s,
"%s %d",cc,&(
PAT.Iponde));
2471 ss = (
char**) malloc((npon+1)*
sizeof(
char*));
2472 for(i=0;i<=npon;i++)
2473 ss[i]=(
char*) malloc(40*
sizeof(
char));
2475 sscanf(ss[1],
"%d",&(
PAT.Iponde));
2478 sscanf(ss[i],
"%le",&(
PAT.Ponds[i-2]));
2496 if(nin==0)
return 2;
2497 if(nout==0)
return 3;
2505 if(ierr != 0)
return ierr;
2530 if(nout>nin) nmax=
nout;
2531 ss = (
char**) malloc((nmax+1)*
sizeof(
char*));
2532 if(ss == 0)
return -111;
2533 for(i=0;i<=nmax;i++)
2535 ss[
i]=(
char*) malloc(40*
sizeof(
char));
2536 if(ss[i] == 0)
return -111;
2545 sscanf(ss[i],
"%le",&p);
2555 sscanf(ss[i],
"%le",&p);
2577 printf(
"%d examples loaded \n\n",
PAT.Npat[ifile]);
2591 if (strtok(tmp,
" "))
2594 while (strtok(
NULL,
" ")) i++;
2608 strcpy(ss[0],strtok(tmp,
" "));
2610 strcpy(ss[i],strtok(
NULL,
" "));
2624 strcpy(ss[0],strtok(tmp,
" "));
2626 strcpy(ss[i],strtok(
NULL,
" "));
2643 if(LearnMemory==0)
return;
2645 for(il=0; il<
NET.Nlayer; il++)
2647 for(in=0; in<
NET.Nneur[il]; in++)
2654 if(BFGSMemory==0)
return;
2656 for(il=0; il<
NET.Nweights; il++)
2692 dir = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
2693 if(dir == 0)
return -111;
2695 for(il=0; il<
NET.Nlayer; il++)
2697 dir[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
2698 if(dir[il] == 0)
return -111;
2699 for(in=0; in<
NET.Nneur[il]; in++)
2704 dir[0][
in] = (
dbl *)
2705 malloc(101*
sizeof(
dbl));
2706 if(dir[0][in] == 0)
return -111;
2710 dir[il][
in] = (
dbl *)
2711 malloc((
NET.Nneur[il-1]+1)*
sizeof(
dbl));
2712 if(dir[il][in] == 0)
return -111;
2713 Nweights +=
NET.Nneur[il-1]+1;
2717 NET.Nweights = Nweights;
2719 if(BFGSMemory==0 &&
LEARN.Meth>= 6)
2722 Gamma = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2723 delta = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2724 BFGSH = (
dbl**) malloc(Nweights*
sizeof(
dbl*));
2725 if(Gamma == 0 || delta == 0 || BFGSH == 0)
2728 for(i=0; i<Nweights; i++)
2730 BFGSH[
i] = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2731 if(BFGSH[i] == 0)
return -111;
2774 W=fopen(filename,
"w");
2776 fprintf(W,
" SUBROUTINE RNNFUN(rin,rout)\n");
2777 fprintf(W,
" DIMENSION RIN(%d)\n",
NET.Nneur[0]);
2778 fprintf(W,
" DIMENSION ROUT(%d)\n",
NET.Nneur[
NET.Nlayer-1]);
2781 for(in=0; in<
NET.Nneur[0]; in++)
2785 fprintf(W,
" OUT%d = RIN(%d)\n",in+1,in+1);
2789 fprintf(W,
" OUT%d = (RIN(%d)-%e)/%e\n",in+1,in+1,
2793 for(il=1; il<
NET.Nlayer-1; il++)
2796 fprintf(W,
"C layer %d\n",il+1);
2797 for(in=0; in<
NET.Nneur[il]; in++)
2799 fprintf(W,
" RIN%d = %e\n",in+1,
2800 (
double)
NET.Weights[il][in][0]);
2801 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2802 fprintf(W,
" > +(%e) * OUT%d\n",
2803 (
double)
NET.Weights[il][in][jn],jn);
2806 for(in=0; in<
NET.Nneur[il]; in++)
2808 if(
NET.T_func[il][in]==0)
2810 fprintf(W,
" OUT%d = 0\n",in+1);
2812 else if(
NET.T_func[il][in]==1)
2814 fprintf(W,
" OUT%d = RIN%d\n",in+1,in+1);
2816 else if(
NET.T_func[il][in]==2)
2818 fprintf(W,
" OUT%d = SIGMOID(RIN%d)\n",
2825 fprintf(W,
"C layer %d\n",il+1);
2826 for(in=0; in<
NET.Nneur[il]; in++)
2828 fprintf(W,
" RIN%d = %e\n",in+1,
2829 (
double)
NET.Weights[il][in][0]);
2830 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2831 fprintf(W,
" > +(%e) * OUT%d\n",
2832 (
double)
NET.Weights[il][in][jn],jn);
2835 for(in=0; in<
NET.Nneur[il]; in++)
2837 if(
NET.T_func[il][in]==0)
2839 fprintf(W,
" ROUT(%d) = 0\n",in+1);
2841 else if(
NET.T_func[il][in]==1)
2843 fprintf(W,
" ROUT(%d) = RIN%d\n",in+1,in+1);
2845 else if(
NET.T_func[il][in]==2)
2847 fprintf(W,
" ROUT(%d) = SIGMOID(RIN%d)\n",
2853 fprintf(W,
" END\n");
2854 fprintf(W,
" REAL FUNCTION SIGMOID(X)\n");
2855 fprintf(W,
" SIGMOID = 1./(1.+EXP(-X))\n");
2856 fprintf(W,
" END\n");
2883 W=fopen(filename,
"w");
2886 fprintf(W,
"double sigmoid(double x)\n");
2888 fprintf(W,
"return 1/(1+exp(-x));\n");
2890 fprintf(W,
"void rnnfun(double *rin,double *rout)\n");
2892 fprintf(W,
" double out1[%d];\n",
NET.Nneur[0]);
2893 fprintf(W,
" double out2[%d];\n",
NET.Nneur[1]);
2894 if(
NET.Nlayer>=3) fprintf(W,
" double out3[%d];\n",
NET.Nneur[2]);
2895 if(
NET.Nlayer>=4) fprintf(W,
" double out4[%d];\n",
NET.Nneur[3]);
2898 for(in=0; in<
NET.Nneur[0]; in++)
2902 fprintf(W,
" out1[%d] = rin[%d];\n",in,in);
2906 fprintf(W,
" out1[%d] = (rin[%d]-%e)/%e;\n",
2912 for(il=1; il<=
NET.Nlayer-1; il++)
2915 fprintf(W,
"/* layer %d */\n",il+1);
2916 for(in=0; in<
NET.Nneur[il]; in++)
2918 fprintf(W,
" out%d[%d] = %e\n",il+1,in,
2919 (
double)
NET.Weights[il][in][0]);
2920 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2921 fprintf(W,
" +(%e) * out%d[%d]\n",
2922 (
double)
NET.Weights[il][in][jn],il,jn-1);
2926 for(in=0; in<
NET.Nneur[il]; in++)
2928 if(
NET.T_func[il][in]==0)
2930 fprintf(W,
" out%d[%d] = 0;\n",il+1,in);
2932 else if(
NET.T_func[il][in]==1)
2935 else if(
NET.T_func[il][in]==2)
2937 fprintf(W,
" out%d[%d] = sigmoid(out%d[%d]);\n",
2943 for(in=0; in<
NET.Nneur[il]; in++)
2945 fprintf(W,
" rout[%d] = out%d[%d];\n",in,il+1,in);
2977 W=fopen(filename,
"w");
2980 fprintf(W,
"# network structure ");
2981 for(ilayer=0; ilayer<
NET.Nlayer; ilayer++)
2983 fprintf(W,
"%d ",
NET.Nneur[ilayer]);
2986 fprintf(W,
"\n %d\n",iepoch);
2987 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
2989 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
2991 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
2993 fprintf(W,
" %1.15e\n",
2994 (
double)
NET.Weights[ilayer][ineur][i]);
3029 W=fopen(filename,
"r");
3036 sscanf(s,
" %d",iepoch);
3037 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
3039 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
3041 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
3043 fscanf(W,
" %le",&p);
3044 NET.Weights[ilayer][ineur][
i] = (
dbl) p;
3084 if(ifile>1 || ifile<0)
return(1);
3086 if(ExamplesMemory==0)
3093 if(
PAT.Pond == 0 ||
PAT.Rin == 0
3094 ||
PAT.Rans == 0 ||
PAT.vRin == 0)
return -111;
3099 if(iadd==0 && PatMemory[ifile]!=0)
3105 if(iadd==0 || PatMemory[ifile]==0)
3107 PatMemory[
ifile] = 1;
3109 if(
PAT.Pond[ifile] == 0)
return -111;
3110 for(j=0; j<npat; j++)
3111 PAT.Pond[ifile][j] = 1;
3114 if(
PAT.Rin[ifile] == 0)
return -111;
3116 if(
PAT.Rans[ifile] == 0)
return -111;
3120 if(
PAT.vRin[ifile] == 0)
return -111;
3122 for(j=0; j<npat; j++)
3127 for(j=0; j<npat; j++)
3130 if(
PAT.Rans[ifile][j] == 0)
return -111;
3136 ExamplesIndex = (
int *) malloc(npat*
sizeof(
int));
3137 if(ExamplesIndex == 0)
return -111;
3138 for(j=0; j<npat; j++) ExamplesIndex[j] = j;
3147 if(tmp == 0)
return -111;
3153 for(j=
PAT.Npat[ifile];j<ntot;j++)
3157 if(PatMemory[ifile]==1) free(
PAT.Pond[ifile]);
3174 if(tmp3 == 0)
return -111;
3176 for(j=0; j<
PAT.Npat[
ifile]*(nin+1); j++)
3180 if(PatMemory[ifile]==1) free(
PAT.vRin[ifile]);
3182 for(j=0; j<ntot; j++)
3189 if(tmp2 == 0)
return -111;
3194 for(j=
PAT.Npat[ifile];j<ntot;j++)
3197 if(tmp2[j] == 0)
return -111;
3199 if(PatMemory[ifile]==1) free(
PAT.Rans[ifile]);
3202 PatMemory[
ifile] = 1;
3207 free(ExamplesIndex);
3208 ExamplesIndex = (
int *) malloc(ntot*
sizeof(
int));
3209 if(ExamplesIndex == 0)
return -111;
3210 for(j=0; j<ntot; j++) ExamplesIndex[j] = j;
3236 if(ifile>1 || ifile<0)
return 1;
3238 if(PatMemory[ifile]==0)
return 2;
3240 free(
PAT.Pond[ifile]);
3244 free(
PAT.Rans[ifile][i]);
3246 free(
PAT.Rin[ifile]);
3247 free(
PAT.Rans[ifile]);
3248 free(
PAT.vRin[ifile]);
3249 PatMemory[
ifile] = 0;
3282 fmean = (
dbl*) malloc(Ninputs*
sizeof(
dbl));
3284 if(Nexamples<100) nmax=Nexamples;
3286 for(j=0;j<Ninputs;j++)
3289 for(ipat=0;ipat<nmax;ipat++)
3291 fmean[
j] += (
dbl) inputs[ipat][j];
3293 fmean[
j] = fmean[
j]/(
dbl) nmax;
3299 maximum[
j] = -99999;
3300 for(ipat=0;ipat<Nexamples;ipat++)
3302 mean[
j] += (
dbl) inputs[ipat][j];
3303 sigma[
j] += ((
dbl) inputs[ipat][j]-fmean[j])*
3304 ((
dbl) inputs[ipat][j]-fmean[j]);
3305 if((
dbl) inputs[ipat][
j] > maximum[
j])
3306 maximum[j]=(
dbl) inputs[ipat][
j];
3307 if((
dbl) inputs[ipat][
j] < minimum[
j])
3308 minimum[j]=(
dbl) inputs[ipat][
j];
3310 mean[
j] = mean[
j]/(
dbl) Nexamples;
3311 sigma[
j] =
sqrt(sigma[j]/ (
dbl) Nexamples -
3313 (mean[j]-fmean[j]));
3338 mean = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3339 sigma = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3340 minimum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3341 maximum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3343 if(mean == 0 || sigma == 0 || minimum == 0
3344 || maximum == 0)
return -111;
3347 mean,sigma,minimum,maximum);
3349 printf(
"\t mean \t\t RMS \t\t min \t\t max\n");
3350 for(j=0;j<
NET.Nneur[0];j++)
3352 printf(
"var%d \t %e \t %e \t %e \t %e\n",j+1,
3353 mean[j],sigma[j],minimum[j],maximum[j]);
3384 mean = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3385 if (mean == 0)
return -111;
3386 sigma = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3387 if (sigma == 0)
return -111;
3389 if (
STAT.mean == 0)
return -111;
3391 if (
STAT.sigma == 0)
return -111;
3392 minimum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3393 if (minimum == 0)
return -111;
3394 maximum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3395 if (maximum == 0)
return -111;
3398 mean,sigma,minimum,maximum);
3400 if(
NET.Debug>=1) printf(
"\t mean \t\t RMS \t\t min \t\t max\n");
3401 for(j=0;j<
NET.Nneur[0];j++)
3404 printf(
"var%d \t %e \t %e \t %e \t %e\n",j+1,
3405 mean[j],sigma[j],minimum[j],maximum[j]);
3409 STAT.sigma[
j] = sigma[
j];
3412 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
3414 PAT.Rin[0][ipat][
j] =
3415 (
PAT.Rin[0][ipat][
j]-(float) mean[j])/
3418 for(ipat=0;ipat<
PAT.Npat[1];ipat++)
3420 PAT.Rin[1][ipat][
j] =
3421 (
PAT.Rin[1][ipat][
j]-(float) mean[j])/
3430 if(
NET.Debug>=1) printf(
"\n");
3456 NET.Nneur = (
int *) malloc(Nlayer*
sizeof(
int));
3457 if(
NET.Nneur == 0)
return -111;
3459 NET.T_func = (
int **) malloc(Nlayer*
sizeof(
int *));
3460 NET.Deriv1 = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3461 NET.Inn = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3462 NET.Outn = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3463 NET.Delta = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3464 if(
NET.T_func == 0 ||
NET.Deriv1 == 0
3465 ||
NET.Inn == 0 ||
NET.Outn == 0
3466 ||
NET.Delta == 0)
return -111;
3468 for(i=0; i<Nlayer; i++)
3470 NET.T_func[
i] = (
int *) malloc(Neurons[i]*
sizeof(
int));
3471 NET.Deriv1[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3472 NET.Inn[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3473 NET.Outn[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3474 NET.Delta[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3475 if(
NET.T_func[i] == 0 ||
NET.Deriv1[i] == 0
3476 ||
NET.Inn[i] == 0 ||
NET.Outn[i] == 0
3477 ||
NET.Delta[i] ==0 )
return -111;
3480 NET.Weights = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3481 NET.vWeights = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3482 LEARN.Odw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3483 LEARN.ODeDw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3484 LEARN.DeDw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3485 if(
NET.Weights == 0 ||
NET.vWeights == 0
3487 ||
LEARN.DeDw == 0)
return -111;
3489 for(i=1; i<Nlayer; i++)
3492 NET.vWeights[
i] = (
dbl *) malloc(k * Neurons[i] *
3494 NET.Weights[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3495 LEARN.Odw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3496 LEARN.ODeDw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3497 LEARN.DeDw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3498 if(
NET.Weights[i] == 0 ||
NET.vWeights[i] == 0
3500 ||
LEARN.DeDw[i] == 0)
return -111;
3502 for(j=0; j<Neurons[
i]; j++)
3508 if(
LEARN.Odw[i][j] == 0
3509 ||
LEARN.ODeDw[i][j] == 0
3510 ||
LEARN.DeDw[i][j] == 0)
return -111;
3534 for(i=1; i<
NET.Nlayer; i++)
3536 for(j=0; j<
NET.Nneur[
i]; j++)
3539 free(
LEARN.Odw[i][j]);
3540 free(
LEARN.ODeDw[i][j]);
3541 free(
LEARN.DeDw[i][j]);
3543 free(
NET.vWeights[i]);
3544 free(
NET.Weights[i]);
3546 free(
LEARN.ODeDw[i]);
3547 free(
LEARN.DeDw[i]);
3556 for(i=0; i<
NET.Nlayer; i++)
3558 free(
NET.T_func[i]);
3559 free(
NET.Deriv1[i]);
3597 if(strlen(s)==0)
return -1;
3598 if(strlen(s)>1024)
return -2;
3601 if (strtok(tmp,
","))
3604 while (strtok(
NULL,
",")) i++;
3607 if(i >
NLMAX)
return -3;
3612 sscanf(strtok(tmp,
","),
"%d",&(Nneur[0]));
3613 for (i=1;i<*Nlayer;i++)
3614 sscanf(strtok(
NULL,
","),
"%d",&(Nneur[i]));
3643 if((*nl)>
NLMAX)
return(1);
3644 if((*nl)<2)
return(2);
3649 if(ierr != 0)
return ierr;
3652 NET.Nlayer = (int) *nl;
3655 for(il=0; il<
NET.Nlayer; il++) {
3656 NET.Nneur[il] = nn[il];
3685 register dbl a1, a2, a3, a4,
c,
d;
3687 dbl *pM2 = &(M[m+1]);
3688 dbl *pM3 = &(M[2*(m+1)]);
3689 dbl *pM4 = &(M[3*(m+1)]);
3694 i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3701 pM1++; pM2++; pM3++; pM4++;
3702 for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3706 a1 = a1 + *pM1 * c + *(pM1+1) * d;
3707 a2 = a2 + *pM2 * c + *(pM2+1) * d;
3708 a3 = a3 + *pM3 * c + *(pM3+1) * d;
3709 a4 = a4 + *pM4 * c + *(pM4+1) * d;
3711 for(j=j; j<
m; j++, pM1++, pM2++, pM3++, pM4++)
3719 *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3723 pM1 = &(M[i*(m+1)]);
3726 for(j=0; j<
m; j++, pM1++)
3728 a1 = a1 + *pM1 * v[
j];
3751 register dbl a1, a2, a3, a4,
c,
d;
3754 dbl *pM3 = &(M[2*
m]);
3755 dbl *pM4 = &(M[3*
m]);
3760 i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3767 for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3771 a1 = a1 + *pM1 * c + *(pM1+1) * d;
3772 a2 = a2 + *pM2 * c + *(pM2+1) * d;
3773 a3 = a3 + *pM3 * c + *(pM3+1) * d;
3774 a4 = a4 + *pM4 * c + *(pM4+1) * d;
3776 for(j=j; j<
m; j++, pM1++, pM2++, pM3++, pM4++)
3784 *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3790 for(j=0; j<
m; j++, pM1++)
3792 a1 = a1 + *pM1 * v[
j];
3818 int Ni,
int Nj,
int Nk,
int NaOffs,
int NbOffs)
3822 dbl s00,s01,s10,s11;
3824 dbl *pb0,*pb1,*pc0,*pc1;
3826 for (j=0; j<=Nj-2; j+=2)
3830 s00 = 0.0; s01 = 0.0; s10 = 0.0; s11 = 0.0;
3832 for (k=0,pb0=b+k+NbOffs*j,
3833 pb1=b+k+NbOffs*(j+1),
3842 s00 += (*pa0)*(*pb0);
3843 s01 += (*pa0)*(*pb1);
3844 s10 += (*pa1)*(*pb0);
3845 s11 += (*pa1)*(*pb1);
3847 *pc0 = s00; *(pc0+1) = s01; *pc1 = s10; *(pc1+1) = s11;
3849 for (j=j; j<Nj; j++)
3853 s00 = 0.0; s10 = 0.0;
3854 for (k=0,pb0=b+k+NbOffs*j,
3862 s00 += (*pa0)*(*pb0);
3863 s10 += (*pa1)*(*pb0);
3865 *pc0 = s00; *pc1 = s10;
dbl MLP_Epoch(int iepoch, dbl *alpmin, int *Ntest)
void getnLexemes(int n, char *s, char **ss)
dbl MLP_Test_MM(int ifile, dbl *tmp)
int ReadPatterns(char *filename, int ifile, int *inet, int *ilearn, int *iexamples)
int DecreaseSearch(dbl *alpmin, int *Ntest, dbl Err0)
int SaveWeights(char *filename, int iepoch)
int dgels_(char *trans, int *m, int *n, int *nrhs, double *a, int *lda, double *b, int *ldb, double *work, int *lwork, int *info)
void MLP_Out(type_pat *rrin, dbl *rrout)
int MLP_SetNet(int *nl, int *nn)
The Signals That Services Can Subscribe To This is based on ActivityRegistry and is current per Services can connect to the signals distributed by the ActivityRegistry in order to monitor the activity of the application Each possible callback has some defined which we here list in angle e g
int MLP_PrCFun(char *filename)
int MLP_StatInputs(int Nexamples, int Ninputs, type_pat **inputs, dbl *mean, dbl *sigma, dbl *minimum, dbl *maximum)
void SetLambda(double Wmax)
int GetBFGSH(int Nweights)
int GetNetStructure(char *s, int *Nlayer, int *Nneur)
void MLP_Out_T(type_pat *rrin)
void MLP_vSigmoideDeriv(dbl *x, dbl *dy, int n)
int MLP_Train(int *ipat, dbl *err)
int ShuffleExamples(int n, int *index)
double MLP_Rand(dbl mini, dbl maxi)
int DeDwSum(type_pat *ans, dbl *out, int ipat)
int CountLexemes(char *s)
struct net_ net_ MLP_HIDDEN
int LoadWeights(char *filename, int *iepoch)
void MLP_LineHyb(dbl ***w0, dbl alpha)
int AllocPatterns(int ifile, int npat, int nin, int nout, int iadd)
int SetTransFunc(int layer, int neuron, int func)
void InitBFGSH(int Nweights)
void MLP_MM2rows(dbl *c, type_pat *a, dbl *b, int Ni, int Nj, int Nk, int NaOffs, int NbOffs)
void MLP_MatrixVector(dbl *M, type_pat *v, dbl *r, int n, int m)
void DeDwScale(int Nexamples)
void MLP_Line(dbl ***w0, dbl alpha)
void MLP_MatrixVectorBias(dbl *M, dbl *v, dbl *r, int n, int m)
void getLexemes(char *s, char **ss)
void MLP_vSigmoide(dbl *x, int n)
int LineSearch(dbl *alpmin, int *Ntest, dbl Err0)
std::vector< std::vector< double > > tmp
void BFGSdir(int Nweights)
void MLP_Out2(type_pat *rrin)
int FreePatterns(int ifile)
dbl MLP_Test(int ifile, int regul)
int MLP_PrFFun(char *filename)
int AllocNetwork(int Nlayer, int *Neurons)
int LineSearchHyb(dbl *alpmin, int *Ntest)