66 static int i, il,
in,
j,
m, mp1;
74 for(j=0;j<
m;j++)
NET.Outn[0][j] = rrin[j];
77 for(i=mp1; i<=
NET.Nneur[0]; i+=4)
79 NET.Outn[0][i-1] = rrin[i-1];
80 NET.Outn[0][
i] = rrin[
i];
81 NET.Outn[0][i+1] = rrin[i+1];
82 NET.Outn[0][i+2] = rrin[i+2];
90 for(il=2; il<
NET.Nlayer; il++)
93 deriv1[il-1],
NET.Nneur[il-1]);
95 NET.Outn[il],
NET.Nneur[il],
98 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
100 deriv1[
NET.Nlayer-1][
in] = 1;
117 static int i, il,
in,
j, ilm1,
m, mp1;
124 for(j=0;j<
m;j++)
NET.Outn[0][j] = rrin[j];
127 for(i=mp1; i<=
NET.Nneur[0]; i+=4)
129 NET.Outn[0][i-1] = rrin[i-1];
130 NET.Outn[0][
i] = rrin[
i];
131 NET.Outn[0][i+1] = rrin[i+1];
132 NET.Outn[0][i+2] = rrin[i+2];
140 for(il=1; il<
NET.Nlayer; il++)
143 m =
NET.Nneur[ilm1]%4;
144 for(in=0; in<
NET.Nneur[il]; in++)
146 a =
NET.Weights[il][
in][0];
148 for(j=1;j<=
m;j++) a +=
149 NET.Weights[il][in][j]*
NET.Outn[ilm1][j-1];
152 for(j=mp1; j<=
NET.Nneur[ilm1]; j+=4)
155 NET.Weights[il][
in][j+3]*
NET.Outn[ilm1][j+2]+
156 NET.Weights[il][
in][j+2]*
NET.Outn[ilm1][j+1]+
157 NET.Weights[il][
in][j+1]*
NET.Outn[ilm1][
j]+
158 NET.Weights[il][
in][
j]*
NET.Outn[ilm1][j-1];
160 switch(
NET.T_func[il][in])
164 case 1:
NET.Outn[il][
in] =
a;
166 case 0:
NET.Outn[il][
in] = 0;
189 static int il,
in,
m, mp1;
191 dbl **rrout, **deriv1;
192 register dbl *prrout;
194 int nhid =
NET.Nneur[1];
195 int nin =
NET.Nneur[0];
204 rrout[0][0] = rrin[1];
209 rrout[0][0] = rrin[1];
210 rrout[0][1] = rrin[2];
215 rrout[0][0] = rrin[1];
216 rrout[0][1] = rrin[2];
217 rrout[0][2] = rrin[3];
222 prrout = &(rrout[0][mp1]);
223 prrin = &(rrin[mp1+1]);
224 for(i=mp1; i<=
NET.Nneur[0]; i+=4, prrout+=4, prrin+=4)
226 *(prrout-1) = *(prrin-1);
228 *(prrout+1)= *(prrin+1);
229 *(prrout+2) = *(prrin+2);
235 NET.Outn[1],nhid,nin);
240 for(il=2; il<
NET.Nlayer; il++)
244 NET.Outn[il],
NET.Nneur[il],
NET.Nneur[il-1]);
246 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
247 deriv1[
NET.Nlayer-1][in] = 1;
270 int nhid =
NET.Nneur[1];
271 int nin =
NET.Nneur[0];
272 int jpat,
j, il, ilm1,
m,
in, mp1;
274 dbl *pweights, *ptmp;
277 for(ipat=0; ipat<npat-1; ipat+=2)
280 NET.vWeights[1], 2, nhid, nin+1,
283 switch(
NET.T_func[1][0])
294 for(jpat=0; jpat<2; jpat++)
296 for(j=0; j<nhid; j++)
298 tmp[j+jpat*nhid] = 0;
304 for(jpat=0; jpat<2; jpat++)
306 for(in=0; in<nhid; in++)
308 NET.Outn[1][
in] = tmp[jpat*nhid+
in];
310 for(il=2; il<
NET.Nlayer; il++)
313 m =
NET.Nneur[ilm1]%4;
314 for(in=0; in<
NET.Nneur[il]; in++)
316 pweights = &(
NET.Weights[il][
in][0]);
320 for(j=1;j<=
m;j++,pweights++) a +=
321 (*pweights)*
NET.Outn[ilm1][j-1];
324 for(j=mp1; j<=
NET.Nneur[ilm1];
328 *(pweights+3)*
NET.Outn[ilm1][j+2]+
329 *(pweights+2)*
NET.Outn[ilm1][j+1]+
330 *(pweights+1)*
NET.Outn[ilm1][j]+
331 *(pweights )*
NET.Outn[ilm1][j-1];
333 switch(
NET.T_func[il][in])
337 case 1:
NET.Outn[il][
in] =
a;
339 case 0:
NET.Outn[il][
in] = 0;
343 if(il ==
NET.Nlayer-1)
345 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
347 rrans = (
dbl)
PAT.Rans[ifile][ipat+jpat][in];
348 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
350 PAT.Pond[ifile][ipat+jpat];
358 for(ipat=ipat; ipat<npat; ipat++)
361 &(
PAT.vRin[ifile][ipat*(nin+1)]),tmp,
364 switch(
NET.T_func[1][0])
375 for(j=0; j<nhid; j++)
382 for(in=0; in<nhid; in++)
386 for(il=2; il<
NET.Nlayer; il++)
389 m =
NET.Nneur[ilm1]%4;
390 for(in=0; in<
NET.Nneur[il]; in++)
392 pweights = &(
NET.Weights[il][
in][0]);
396 for(j=1;j<=
m;j++,pweights++) a +=
397 (*pweights)*
NET.Outn[ilm1][j-1];
400 for(j=mp1; j<=
NET.Nneur[ilm1];
404 *(pweights+3)*
NET.Outn[ilm1][j+2]+
405 *(pweights+2)*
NET.Outn[ilm1][j+1]+
406 *(pweights+1)*
NET.Outn[ilm1][j]+
407 *(pweights )*
NET.Outn[ilm1][j-1];
409 switch(
NET.T_func[il][in])
413 case 1:
NET.Outn[il][
in] =
a;
415 case 0:
NET.Outn[il][
in] = 0;
419 if(il ==
NET.Nlayer-1)
421 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
423 rrans = (
dbl)
PAT.Rans[ifile][ipat][in];
424 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
426 PAT.Pond[ifile][ipat];
452 int in,jn,ipat,ipati;
456 tmp = (
dbl *) malloc(2 *
NET.Nneur[1] *
sizeof(
dbl));
459 printf(
"not enough memory in MLP_Test\n");
461 for(ipat=0; ipat<
PAT.Npat[
ifile]; ipat++)
465 ipati = ExamplesIndex[ipat];
472 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
474 rrans = (
dbl)
PAT.Rans[ifile][ipati][in];
475 err += (rrans-
NET.Outn[
NET.Nlayer-1][in])*
477 PAT.Pond[ifile][ipati];
483 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
484 for(jn=0; jn<=
NET.Nneur[
NET.Nlayer-2]; jn++)
498 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
499 for(jn=0; jn<=
NET.Nneur[
NET.Nlayer-2]; jn++)
525 int il, in1,
in, itest2;
526 dbl deriv, deriv1, deriv2, deriv3, deriv4, pond;
528 register dbl a,
b,
dd, a1, a2, a3, a4;
529 dbl *pout, *pdelta, *pw1, *pw2, *pw3, *pw4;
532 if(
NET.Debug>=5) printf(
" Entry MLP_Stochastic\n");
533 weights =
NET.Weights;
545 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
547 ii = ExamplesIndex[ipat];
548 pond =
PAT.Pond[0][ii];
553 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
555 deriv =
NET.Deriv1[
NET.Nlayer-1][
in];
556 a = (
dbl)
PAT.Rans[0][ii][in];
557 b =
NET.Outn[
NET.Nlayer-1][in]-a;
559 NET.Delta[
NET.Nlayer-1][in] = b*deriv*pond*eta;
562 for(il=
NET.Nlayer-2; il>0; il--)
564 dd =
NET.Delta[il+1][0];
565 for(in=0; in<
NET.Nneur[il]-3; in+=4)
567 deriv1 =
NET.Deriv1[il][
in];
568 deriv2 =
NET.Deriv1[il][in+1];
569 deriv3 =
NET.Deriv1[il][in+2];
570 deriv4 =
NET.Deriv1[il][in+3];
571 itest2 = (
NET.Nneur[il+1]==1);
572 a1 = dd*weights[il+1][0][in+1];
573 a2 = dd*weights[il+1][0][in+2];
574 a3 = dd*weights[il+1][0][in+3];
575 a4 = dd*weights[il+1][0][in+4];
577 pdelta = &(
NET.Delta[il+1][1]);
578 for(in1=1; in1<
NET.Nneur[il+1];
581 a1 += *pdelta * weights[il+1][in1][in+1];
582 a2 += *pdelta * weights[il+1][in1][in+2];
583 a3 += *pdelta * weights[il+1][in1][in+3];
584 a4 += *pdelta * weights[il+1][in1][in+4];
586 L1:
NET.Delta[il][
in] = a1*deriv1;
587 NET.Delta[il][in+1] = a2*deriv2;
588 NET.Delta[il][in+2] = a3*deriv3;
589 NET.Delta[il][in+3] = a4*deriv4;
591 for(in=in; in<
NET.Nneur[il]; in++)
593 deriv =
NET.Deriv1[il][
in];
594 itest2 = (
NET.Nneur[il+1]==1);
595 a = dd*weights[il+1][0][in+1];
597 pdelta = &(
NET.Delta[il+1][1]);
598 for(in1=1; in1<
NET.Nneur[il+1];
602 weights[il+1][in1][in+1];
604 L2:
NET.Delta[il][
in] = a*deriv;
613 for(il=1; il<
NET.Nlayer; il++)
615 inm1 =
NET.Nneur[il-1];
616 for(in=0; in<
NET.Nneur[il]-3; in+=4)
618 a1 =
NET.Delta[il][
in];
619 a2 =
NET.Delta[il][in+1];
620 a3 =
NET.Delta[il][in+2];
621 a4 =
NET.Delta[il][in+3];
622 pout = &(
NET.Outn[il-1][0]);
623 weights[il][
in][0] += a1;
624 weights[il][in+1][0] += a2;
625 weights[il][in+2][0] += a3;
626 weights[il][in+3][0] += a4;
627 weights[il][
in][1] += a1* (*pout);
628 weights[il][in+1][1] += a2* (*pout);
629 weights[il][in+2][1] += a3* (*pout);
630 weights[il][in+3][1] += a4* (*pout);
632 pw1 = &(weights[il][
in][2]);
633 pw2 = &(weights[il][in+1][2]);
634 pw3 = &(weights[il][in+2][2]);
635 pw4 = &(weights[il][in+3][2]);
636 for(in1=2; in1<=inm1;
637 ++in1, ++pout, ++pw1, ++pw2,
646 for(in=in; in<
NET.Nneur[il]; in++)
648 a1 =
NET.Delta[il][
in];
649 pout = &(
NET.Outn[il-1][0]);
650 weights[il][
in][0] += a1;
651 weights[il][
in][1] += a1* (*pout);
653 pw1 = &(weights[il][
in][2]);
654 for(in1=2; in1<=inm1;
655 ++in1, ++pout, ++pw1)
664 for(il=1; il<
NET.Nlayer; il++)
666 for(in=0; in<
NET.Nneur[il]; in++)
669 a =
NET.Delta[il][
in];
673 b = a*
NET.Outn[il-1][0];
677 for(in1=2; in1<=
NET.Nneur[il-1]; in1++)
679 b = a*
NET.Outn[il-1][in1-1];
714 int Nweights, Nlinear, ipat, ierr;
721 Nweights =
NET.Nweights;
722 Nlinear =
NET.Nneur[
NET.Nlayer-2] + 1;
724 if(
NET.Debug>=5) printf(
" Entry MLP_Epoch\n");
734 if(iepoch==1 &&
LEARN.Meth==7)
748 for(ipat=0;ipat<nn;ipat++)
750 ierr =
MLP_Train(&ExamplesIndex[ipat],&err);
751 if(ierr!=0) printf(
"Epoch: ierr= %d\n",ierr);
756 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
759 if(ierr!=0) printf(
"Epoch: ierr= %d\n",ierr);
773 if((iepoch-1)%
LEARN.Nreset==0)
792 if((iepoch-1)%
LEARN.Nreset==0)
801 beta =
LEARN.Norm/ONorm;
808 if((iepoch-1)%
LEARN.Nreset==0)
840 printf(
"Line search fail \n");
846 if((iepoch-1)%
LEARN.Nreset==0)
853 if(
NET.Debug>=5) printf(
"Before GetGammaDelta \n");
855 if(
NET.Debug>=5) printf(
"After GetGammaDelta \n");
857 if(
NET.Debug>=5) printf(
"After GetBFGSH \n");
867 if(
NET.Debug>=5) printf(
"After BFGSdir \n");
876 printf(
"Line search fail \n");
882 if(
NET.Debug>=5) printf(
" End MLP_Epoch\n");
905 if(*ipat<0)
return(2);
909 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
911 *err += ((
dbl)
PAT.Rans[0][*ipat][in]-
NET.Outn[
NET.Nlayer-1][in])
912 *((
dbl)
PAT.Rans[0][*ipat][in]-
NET.Outn[
NET.Nlayer-1][in])*
938 for(il=
NET.Nlayer-2; il>0; il--) {
940 for(in=0; in<
NET.Nneur[il]; in++) {
943 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
945 + eps *
LEARN.Odw[il][
in][in1];
949 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
978 for(il=
NET.Nlayer-1; il>0; il--) {
979 for(in1=0; in1<=
NET.Nneur[il-1]; in1++) {
982 for(in=0; in<
NET.Nneur[il]; in++) {
984 + epseta *
LEARN.Odw[il][
in][in1]);
1007 for(il=1; il<
NET.Nlayer; il++)
1008 for(in=0; in<
NET.Nneur[il]; in++)
1009 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1010 dd +=
LEARN.DeDw[il][in][jn]*
1011 LEARN.DeDw[il][in][jn];
1029 for(il=1; il<
NET.Nlayer; il++)
1030 for(in=0; in<
NET.Nneur[il]; in++)
1031 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1032 dd +=
LEARN.DeDw[il][in][jn]*
1033 LEARN.ODeDw[il][in][jn];
1049 for(il=1; il<
NET.Nlayer; il++)
1050 for(in=0; in<
NET.Nneur[il]; in++)
1051 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1052 LEARN.DeDw[il][in][jn] = 0;
1068 for(il=1; il<
NET.Nlayer; il++)
1069 for(in=0; in<
NET.Nneur[il]; in++)
1070 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1071 LEARN.DeDw[il][in][jn] /= (
dbl) Nexamples;
1086 for(il=1; il<
NET.Nlayer; il++)
1087 for(in=0; in<
NET.Nneur[il]; in++)
1088 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1089 LEARN.ODeDw[il][in][jn] =
LEARN.DeDw[il][in][jn];
1105 for(il=1; il<
NET.Nlayer; il++)
1106 for(in=0; in<
NET.Nneur[il]; in++)
1107 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1130 int il, in1,
in, ii;
1133 dbl *pout, *pdedw, *pdelta;
1138 b = (
dbl)
PAT.Pond[0][ipat];
1139 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
1141 deriv =
NET.Deriv1[
NET.Nlayer-1][
in];
1143 (out[
in] - (
dbl) ans[in])*deriv*
b;
1146 for(il=
NET.Nlayer-2; il>0; il--)
1149 for(in=0; in<
NET.Nneur[il]; in++)
1151 deriv =
NET.Deriv1[il][
in];
1152 a =
NET.Delta[il+1][0] *
NET.Weights[il+1][0][in+1];
1153 pdelta = &(
NET.Delta[il+1][1]);
1154 for(in1=1; in1<
NET.Nneur[il+1]; in1++, pdelta++)
1156 a += *pdelta *
NET.Weights[il+1][in1][in+1];
1158 NET.Delta[il][
in] = a * deriv;
1162 for(il=1; il<
NET.Nlayer; il++)
1164 ii =
NET.Nneur[il-1];
1165 for(in=0; in<
NET.Nneur[il]; in++)
1167 a =
NET.Delta[il][
in];
1169 LEARN.DeDw[il][
in][1] += a *
NET.Outn[il-1][0];
1170 pout = &(
NET.Outn[il-1][1]);
1171 pdedw = &(
LEARN.DeDw[il][
in][2]);
1172 for(in1=1; in1<ii; ++in1, ++pout, ++pdedw)
1174 (*pdedw) += a * (*pout);
1208 if(layer>
NLMAX)
return(1);
1211 NET.T_func[layer-1][neuron-1] = func;
1231 for(il=0; il<
NET.Nlayer; il++) {
1232 for(in=0; in<
NET.Nneur[il]; in++) {
1233 NET.T_func[il][
in] = 2;
1234 if(il==
NET.Nlayer-1)
NET.T_func[il][
in] = 1;
1252 for(il=1; il<
NET.Nlayer; il++)
1253 for(in=0; in<
NET.Nneur[il]; in++)
1254 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1255 dir[il][in][jn] = -
LEARN.DeDw[il][in][jn];
1274 for(il=1; il<
NET.Nlayer; il++)
1275 for(in=0; in<
NET.Nneur[il]; in++)
1276 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1278 dir[il][
in][jn] = -
LEARN.DeDw[il][
in][jn]+
1279 beta*dir[il][
in][jn];
1298 for(il=1; il<
NET.Nlayer; il++)
1299 for(in=0; in<
NET.Nneur[il]; in++)
1300 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1302 ddir +=
LEARN.DeDw[il][
in][jn]*dir[il][
in][jn];
1322 for(il=1; il<
NET.Nlayer; il++)
1323 for(in=0; in<
NET.Nneur[il]; in++)
1324 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1351 g = (
dbl*) malloc(
NET.Nweights*
sizeof(
dbl));
1352 s = (
dbl*) malloc(Nweights*
sizeof(
dbl));
1354 for(il=1; kk<Nweights; il++)
1355 for(in=0; in<
NET.Nneur[il]; in++)
1356 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1358 g[kk] =
LEARN.DeDw[il][
in][jn];
1361 for(i=0; i<Nweights; i++)
1364 for(j=0; j<Nweights; j++)
1366 s[
i] += BFGSH[
i][
j] * g[
j];
1371 for(il=1; kk<Nweights; il++)
1372 for(in=0; in<
NET.Nneur[il]; in++)
1373 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1375 dir[il][
in][jn] = -s[kk];
1396 for(i=0; i<Nweights; i++)
1397 for(j=0; j<Nweights; j++)
1400 if(i==j) BFGSH[
i][
j] = 1;
1425 typedef double dble;
1433 Hgamma = (dble *) malloc(Nweights*
sizeof(dble));
1434 tmp = (dble *) malloc(Nweights*
sizeof(dble));
1436 for(i=0; i<Nweights; i++)
1438 deltaTgamma += (dble) delta[i] * (dble) Gamma[
i];
1441 for(j=0; j<Nweights; j++)
1443 a += (dble) BFGSH[i][j] * (dble) Gamma[
j];
1444 b += (dble) Gamma[j] * (dble) BFGSH[
j][
i];
1448 factor += (dble) Gamma[i]*Hgamma[i];
1450 if(deltaTgamma == 0)
return 1;
1451 a = 1 / deltaTgamma;
1452 factor = 1 + factor*
a;
1454 for(i=0; i<Nweights; i++)
1456 b = (dble) delta[i];
1457 for(j=0; j<Nweights; j++)
1458 BFGSH[i][j] += (
dbl) (factor*b* (dble)
1459 delta[j]-(tmp[j]*b+Hgamma[i]*(dble)delta[
j]))*
a;
1481 dbl alpha1, alpha2, alpha3;
1482 dbl err1, err2, err3;
1484 int icount, il,
in, jn;
1491 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1492 for(il=1; il<
NET.Nlayer; il++)
1494 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1495 for(in=0; in<
NET.Nneur[il]; in++)
1497 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1499 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1501 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1512 if(
NET.Debug>=4) printf(
"err depart= %f\n",err1);
1519 if(alpha2 < 0.01) alpha2 = 0.01;
1520 if(alpha2 > 2.0) alpha2 = 2.0;
1524 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha2,err2);
1534 for(icount=1;icount<=100;icount++)
1536 alpha3 = alpha3*
tau;
1539 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha3,err3);
1541 if(err3>err2)
break;
1556 for(icount=1;icount<=100;icount++)
1558 alpha2 = alpha2/
tau;
1561 if(
NET.Debug>=4) printf(
"alpha, err= %e %e\n",alpha2,err2);
1563 if(err1>err2)
break;
1578 *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1579 -(err2-err1)/(alpha2-alpha1)));
1580 if(*alpmin>10000) *alpmin=10000;
1584 LastAlpha = *alpmin;
1587 for(il=1; il<
NET.Nlayer; il++)
1588 for(in=0; in<
NET.Nneur[il]; in++)
1589 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1590 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1593 for(il=1; il<
NET.Nlayer; il++)
1594 for(in=0; in<
NET.Nneur[il]; in++)
1596 for(il=1; il<
NET.Nlayer; il++)
1619 dbl alpha1, alpha2, alpha3;
1620 dbl err1, err2, err3;
1622 int icount, il,
in, jn;
1629 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1630 for(il=1; il<
NET.Nlayer; il++)
1632 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1633 for(in=0; in<
NET.Nneur[il]; in++)
1635 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1637 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1639 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1650 if(
NET.Debug>=4) printf(
"err depart= %f\n",err1);
1669 for(icount=1;icount<=100;icount++)
1671 alpha2 = alpha2/
tau;
1675 if(err1>err2)
break;
1692 for(il=1; il<
NET.Nlayer; il++)
1693 for(in=0; in<
NET.Nneur[il]; in++)
1694 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1695 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1698 for(il=1; il<
NET.Nlayer; il++)
1699 for(in=0; in<
NET.Nneur[il]; in++)
1701 for(il=1; il<
NET.Nlayer; il++)
1714 w0 = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
1715 for(il=1; il<
NET.Nlayer; il++)
1717 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1718 for(in=0; in<
NET.Nneur[il]; in++)
1720 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1722 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1724 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1734 for(il=1; il<
NET.Nlayer; il++)
1735 for(in=0; in<
NET.Nneur[il]; in++)
1736 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1737 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1740 for(il=1; il<
NET.Nlayer; il++)
1741 for(in=0; in<
NET.Nneur[il]; in++)
1743 for(il=1; il<
NET.Nlayer; il++)
1764 register int il,
in,jn;
1766 for(il=1; il<
NET.Nlayer; il++)
1767 for(in=0; in<
NET.Nneur[il]; in++)
1768 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1769 NET.Weights[il][in][jn] = w0[il][in][jn]+
1770 alpha*dir[il][in][jn];
1789 dbl alpha1, alpha2, alpha3;
1790 dbl err1, err2, err3;
1792 int icount, il,
in, jn;
1799 printf(
" entry LineSearchHyb \n");
1806 w0 = (
dbl ***) malloc((
NET.Nlayer-1)*
sizeof(
dbl**));
1807 for(il=1; il<
NET.Nlayer-1; il++)
1809 w0[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
1810 for(in=0; in<
NET.Nneur[il]; in++)
1812 w0[il][
in] = (
dbl *) malloc((
NET.Nneur[il-1]+1)*
1814 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1816 w0[il][
in][jn] =
NET.Weights[il][
in][jn];
1824 if(
NET.Debug>=4) printf(
"LinesearchHyb err depart= %f\n",err1);
1831 if(alpha2 < 0.01) alpha2 = 0.01;
1832 if(alpha2 > 2.0) alpha2 = 2.0;
1845 for(icount=1;icount<=100;icount++)
1847 alpha3 = alpha3*
tau;
1851 if(err3>err2)
break;
1866 for(icount=1;icount<=100;icount++)
1868 alpha2 = alpha2/
tau;
1872 if(err1>err2)
break;
1886 *alpmin = 0.5*(alpha1+alpha3-(err3-err1)/((err3-err2)/(alpha3-alpha2)
1887 -(err2-err1)/(alpha2-alpha1)));
1888 if(*alpmin>10000) *alpmin=10000;
1892 LastAlpha = *alpmin;
1895 for(il=1; il<
NET.Nlayer-1; il++)
1896 for(in=0; in<
NET.Nneur[il]; in++)
1897 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1898 LEARN.Odw[il][in][jn] =
NET.Weights[il][in][jn]
1901 for(il=1; il<
NET.Nlayer-1; il++)
1902 for(in=0; in<
NET.Nneur[il]; in++)
1904 for(il=1; il<
NET.Nlayer-1; il++)
1908 printf(
" exit LineSearchHyb \n");
1931 for(il=1; il<
NET.Nlayer-1; il++)
1932 for(in=0; in<
NET.Nneur[il]; in++)
1933 for(jn=0; jn<=
NET.Nneur[il-1]; jn++)
1935 NET.Weights[il][
in][jn] = w0[il][
in][jn]+
1936 alpha*dir[il][
in][jn];
1981 double err,lambda,lambda2;
1982 integer Nl,M,Nhr,khr,nrhs,iret,ierr;
1983 int il,
in, inl, ipat;
1991 lambda2 =
LEARN.Alambda;
1995 Nl =
NET.Nneur[
NET.Nlayer-2] + 1;
2009 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
2019 for(in=0; in<
NET.Nneur[
NET.Nlayer-1]; in++)
2021 a = (
dbl)
PAT.Rans[0][ipat][in];
2024 dpat[ipat] = (
dbl)
PAT.Rans[0][ipat][0]*
sqrt(
PAT.Pond[0][ipat]);
2027 for(in=0;in<
NET.Nneur[il];in++)
2029 khr = M *(in+1) + ipat;
2030 HR[khr] =
NET.Outn[il][
in]*
2035 lambda =
sqrt(lambda2);
2036 for(ipat=0;ipat<=
NET.Nneur[il];ipat++)
2038 dpat[ipat+
PAT.Npat[0]] = 0;
2039 for(in=0;in<=
NET.Nneur[il];in++)
2041 khr = M *in + ipat +
PAT.Npat[0];
2043 if(in==ipat) HR[khr]=lambda;
2049 printf(
"entry ResLin, err=MLP_Test(0,0), err= %f\n",err);
2055 ierr =
dgels_(&Trans,&M,&Nl,&nrhs,HR,&M,dpat,&M,Work,
2057 if(iret != 0) printf(
"Warning from dgels: iret = %d\n",(
int)iret);
2058 if(ierr != 0) printf(
"Warning from dgels: ierr = %d\n",(
int)ierr);
2066 for (inl=0; inl<=
NET.Nneur[il-1];inl++)
2068 NET.Weights[il][0][inl] = dpat[inl];
2073 printf(
"ResLin, apres tlsfor, err= %f\n",err);
2116 index[ii] = index[
i];
2138 return mini+(maxi-mini)*
random()/RAND_MAX;
2156 for(ilayer=1;ilayer<
NET.Nlayer;ilayer++)
2157 for(ineur=0;ineur<
NET.Nneur[ilayer];ineur++)
2158 for(i=0;i<=
NET.Nneur[ilayer-1];i++)
2159 NET.Weights[ilayer][ineur][i]=
2177 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
2181 printf(
"Couche %d\n",ilayer);
2185 printf(
"Layer %d\n",ilayer);
2187 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
2191 printf(
"Neurone %d",ineur);
2195 printf(
"Neuron %d",ineur);
2197 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
2200 (
double)
NET.Weights[ilayer][ineur][i]);
2235 int *inet,
int *ilearn,
int *iexamples)
2238 char otherfile[
CLEN];
2252 int nlayer, nneur[
NLMAX];
2254 printf(
"\nLoading file %s\n",filename);
2255 LVQpat=fopen(filename,
"r");
2256 if(LVQpat == 0)
return -1;
2260 while(fgets(s,
CLEN,LVQpat))
2266 printf(
"Number of neurons %s",s);
2268 sscanf(s,
"%s %s",cc,s2);
2270 if(ierr != 0)
return ierr;
2272 if(ierr != 0)
return ierr;
2276 sscanf(s,
"%s %d",cc,&l);
2280 printf(
"Number of patterns %d\n",np);
2282 else if(*(cc+1)==
'I')
2286 printf(
"Number of inputs %d\n",nin);
2288 else if(*(cc+1)==
'O' && *(cc+2)==
'U')
2292 printf(
"Number of outputs %d\n",nout);
2294 else if(*(cc+1)==
'O' && *(cc+2)==
'R')
2297 if(l==1) printf(
"Normalize inputs\n");
2300 else if(*(cc+1)==
'L')
2302 printf(
"NLAY datacard is no longer needed\n");
2304 else if(*(cc+1)==
'E')
2307 printf(
"Number of epochs %d\n",l);
2309 else if(*(cc+1)==
'R')
2313 "Reset to steepest descent every %d epochs\n",
2322 sscanf(s,
"%s %le",cc,&p);
2323 printf(
"Learning parameter %f\n",p);
2326 else if(*(s+1)==
'M')
2329 sscanf(s,
"%s %d",cc,&(
LEARN.Meth));
2330 printf(
"Learning method = ");
2333 case 1: printf(
"Stochastic Minimization\n");
2335 case 2: printf(
"Steepest descent with fixed step\n");
2337 case 3: printf(
"Steepest descent with line search\n");
break;
2338 case 4: printf(
"Polak-Ribiere Conjugate Gradients\n");
break;
2339 case 5: printf(
"Fletcher-Reeves Conjugate Gradients\n");
2341 case 6: printf(
"BFGS\n");
2343 case 7: printf(
"Hybrid BFGS-linear\n");
2345 default: printf(
"Error: unknown method\n");
break;
2349 else if(*(s+1)==
'T')
2351 sscanf(s,
"%s %lf",cc,&p);
2352 printf(
"Tau %f\n",p);
2355 else if(*(s+1)==
'A')
2357 sscanf(s,
"%s %lf",cc,&p);
2358 printf(
"Lambda %f\n",p);
2366 sscanf(s,
"%s %le",cc,&p);
2367 printf(
"Flat spot elimination parameter %f\n",p);
2370 else if(*(s+1)==
'I')
2372 sscanf(s,
"%s %s",cc,otherfile);
2373 ierr =
ReadPatterns(otherfile,ifile, inet, ilearn, iexamples);
2374 if(ierr != 0)
return ierr;
2379 sscanf(s,
"%s %le",cc,&p);
2380 printf(
"Momentum term %f\n",p);
2387 sscanf(s,
"%s %d",cc,&OutputWeights);
2388 if(OutputWeights == 0)
2390 printf(
"Never write file weights.out\n");
2392 else if(OutputWeights == -1)
2394 printf(
"Write weights to output file at the end\n");
2398 printf(
"Write weights to file every %d epochs\n",
2402 else if(*(s+3)==
'F')
2404 sscanf(s,
"%s %s",cc,cc2);
2405 if(*cc2==
'F' || *cc2==
'C')
2411 printf(
" *** Error while loading file %s at line %s :",
2413 printf(
" unknown language\n");
2418 printf(
" *** Error while loading file %s at line %s\n",
2424 sscanf(s,
"%s %d",cc,&(
NET.Rdwt));
2427 printf(
"Random weights \n");
2431 printf(
"Read weights from file weights.in\n");
2436 sscanf(s,
"%s %d",cc,&(
DIVERS.Stat));
2445 sscanf(s,
"%s %d",cc,&(
DIVERS.Ihess));
2451 sscanf(s,
"%s %le",cc,&p);
2453 printf(
"Learning parameter decay %f\n",
2454 (
double)
LEARN.Decay);
2458 sscanf(s,
"%s %d",cc,&(
DIVERS.Dbin));
2459 printf(
"Fill histogram every %d epochs\n",
DIVERS.Dbin);
2463 sscanf(s,
"%s %d",cc,&(
NET.Debug));
2464 printf(
"Debug mode %d\n",
NET.Debug);
2472 sscanf(s,
"%s %d",cc,&(
PAT.Iponde));
2476 ss = (
char**) malloc((npon+1)*
sizeof(
char*));
2477 for(i=0;i<=npon;i++)
2478 ss[i]=(
char*) malloc(40*
sizeof(
char));
2480 sscanf(ss[1],
"%d",&(
PAT.Iponde));
2483 sscanf(ss[i],
"%le",&(
PAT.Ponds[i-2]));
2501 if(nin==0)
return 2;
2502 if(nout==0)
return 3;
2510 if(ierr != 0)
return ierr;
2535 if(nout>nin) nmax=nout;
2536 ss = (
char**) malloc((nmax+1)*
sizeof(
char*));
2537 if(ss == 0)
return -111;
2538 for(i=0;i<=nmax;i++)
2540 ss[
i]=(
char*) malloc(40*
sizeof(
char));
2541 if(ss[i] == 0)
return -111;
2550 sscanf(ss[i],
"%le",&p);
2560 sscanf(ss[i],
"%le",&p);
2582 printf(
"%d examples loaded \n\n",
PAT.Npat[ifile]);
2596 if (strtok(tmp,
" "))
2599 while (strtok(
NULL,
" ")) i++;
2613 strcpy(ss[0],strtok(tmp,
" "));
2615 strcpy(ss[i],strtok(
NULL,
" "));
2629 strcpy(ss[0],strtok(tmp,
" "));
2631 strcpy(ss[i],strtok(
NULL,
" "));
2648 if(LearnMemory==0)
return;
2650 for(il=0; il<
NET.Nlayer; il++)
2652 for(in=0; in<
NET.Nneur[il]; in++)
2659 if(BFGSMemory==0)
return;
2661 for(il=0; il<
NET.Nweights; il++)
2697 dir = (
dbl ***) malloc(
NET.Nlayer*
sizeof(
dbl**));
2698 if(dir == 0)
return -111;
2700 for(il=0; il<
NET.Nlayer; il++)
2702 dir[il] = (
dbl **) malloc(
NET.Nneur[il]*
sizeof(
dbl*));
2703 if(dir[il] == 0)
return -111;
2704 for(in=0; in<
NET.Nneur[il]; in++)
2709 dir[0][
in] = (
dbl *)
2710 malloc(101*
sizeof(
dbl));
2711 if(dir[0][in] == 0)
return -111;
2715 dir[il][
in] = (
dbl *)
2716 malloc((
NET.Nneur[il-1]+1)*
sizeof(
dbl));
2717 if(dir[il][in] == 0)
return -111;
2718 Nweights +=
NET.Nneur[il-1]+1;
2722 NET.Nweights = Nweights;
2724 if(BFGSMemory==0 &&
LEARN.Meth>= 6)
2727 Gamma = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2728 delta = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2729 BFGSH = (
dbl**) malloc(Nweights*
sizeof(
dbl*));
2730 if(Gamma == 0 || delta == 0 || BFGSH == 0)
2733 for(i=0; i<Nweights; i++)
2735 BFGSH[
i] = (
dbl*) malloc(Nweights*
sizeof(
dbl));
2736 if(BFGSH[i] == 0)
return -111;
2779 W=fopen(filename,
"w");
2781 fprintf(W,
" SUBROUTINE RNNFUN(rin,rout)\n");
2782 fprintf(W,
" DIMENSION RIN(%d)\n",
NET.Nneur[0]);
2783 fprintf(W,
" DIMENSION ROUT(%d)\n",
NET.Nneur[
NET.Nlayer-1]);
2786 for(in=0; in<
NET.Nneur[0]; in++)
2790 fprintf(W,
" OUT%d = RIN(%d)\n",in+1,in+1);
2794 fprintf(W,
" OUT%d = (RIN(%d)-%e)/%e\n",in+1,in+1,
2798 for(il=1; il<
NET.Nlayer-1; il++)
2801 fprintf(W,
"C layer %d\n",il+1);
2802 for(in=0; in<
NET.Nneur[il]; in++)
2804 fprintf(W,
" RIN%d = %e\n",in+1,
2805 (
double)
NET.Weights[il][in][0]);
2806 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2807 fprintf(W,
" > +(%e) * OUT%d\n",
2808 (
double)
NET.Weights[il][in][jn],jn);
2811 for(in=0; in<
NET.Nneur[il]; in++)
2813 if(
NET.T_func[il][in]==0)
2815 fprintf(W,
" OUT%d = 0\n",in+1);
2817 else if(
NET.T_func[il][in]==1)
2819 fprintf(W,
" OUT%d = RIN%d\n",in+1,in+1);
2821 else if(
NET.T_func[il][in]==2)
2823 fprintf(W,
" OUT%d = SIGMOID(RIN%d)\n",
2830 fprintf(W,
"C layer %d\n",il+1);
2831 for(in=0; in<
NET.Nneur[il]; in++)
2833 fprintf(W,
" RIN%d = %e\n",in+1,
2834 (
double)
NET.Weights[il][in][0]);
2835 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2836 fprintf(W,
" > +(%e) * OUT%d\n",
2837 (
double)
NET.Weights[il][in][jn],jn);
2840 for(in=0; in<
NET.Nneur[il]; in++)
2842 if(
NET.T_func[il][in]==0)
2844 fprintf(W,
" ROUT(%d) = 0\n",in+1);
2846 else if(
NET.T_func[il][in]==1)
2848 fprintf(W,
" ROUT(%d) = RIN%d\n",in+1,in+1);
2850 else if(
NET.T_func[il][in]==2)
2852 fprintf(W,
" ROUT(%d) = SIGMOID(RIN%d)\n",
2858 fprintf(W,
" END\n");
2859 fprintf(W,
" REAL FUNCTION SIGMOID(X)\n");
2860 fprintf(W,
" SIGMOID = 1./(1.+EXP(-X))\n");
2861 fprintf(W,
" END\n");
2888 W=fopen(filename,
"w");
2891 fprintf(W,
"double sigmoid(double x)\n");
2893 fprintf(W,
"return 1/(1+exp(-x));\n");
2895 fprintf(W,
"void rnnfun(double *rin,double *rout)\n");
2897 fprintf(W,
" double out1[%d];\n",
NET.Nneur[0]);
2898 fprintf(W,
" double out2[%d];\n",
NET.Nneur[1]);
2899 if(
NET.Nlayer>=3) fprintf(W,
" double out3[%d];\n",
NET.Nneur[2]);
2900 if(
NET.Nlayer>=4) fprintf(W,
" double out4[%d];\n",
NET.Nneur[3]);
2903 for(in=0; in<
NET.Nneur[0]; in++)
2907 fprintf(W,
" out1[%d] = rin[%d];\n",in,in);
2911 fprintf(W,
" out1[%d] = (rin[%d]-%e)/%e;\n",
2917 for(il=1; il<=
NET.Nlayer-1; il++)
2920 fprintf(W,
"/* layer %d */\n",il+1);
2921 for(in=0; in<
NET.Nneur[il]; in++)
2923 fprintf(W,
" out%d[%d] = %e\n",il+1,in,
2924 (
double)
NET.Weights[il][in][0]);
2925 for(jn=1;jn<=
NET.Nneur[il-1]; jn++)
2926 fprintf(W,
" +(%e) * out%d[%d]\n",
2927 (
double)
NET.Weights[il][in][jn],il,jn-1);
2931 for(in=0; in<
NET.Nneur[il]; in++)
2933 if(
NET.T_func[il][in]==0)
2935 fprintf(W,
" out%d[%d] = 0;\n",il+1,in);
2937 else if(
NET.T_func[il][in]==1)
2940 else if(
NET.T_func[il][in]==2)
2942 fprintf(W,
" out%d[%d] = sigmoid(out%d[%d]);\n",
2948 for(in=0; in<
NET.Nneur[il]; in++)
2950 fprintf(W,
" rout[%d] = out%d[%d];\n",in,il+1,in);
2982 W=fopen(filename,
"w");
2985 fprintf(W,
"# network structure ");
2986 for(ilayer=0; ilayer<
NET.Nlayer; ilayer++)
2988 fprintf(W,
"%d ",
NET.Nneur[ilayer]);
2991 fprintf(W,
"\n %d\n",iepoch);
2992 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
2994 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
2996 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
2998 fprintf(W,
" %1.15e\n",
2999 (
double)
NET.Weights[ilayer][ineur][i]);
3034 W=fopen(filename,
"r");
3041 sscanf(s,
" %d",iepoch);
3042 for(ilayer=1; ilayer<
NET.Nlayer; ilayer++)
3044 for(ineur=0; ineur<
NET.Nneur[ilayer]; ineur++)
3046 for(i=0; i<=
NET.Nneur[ilayer-1]; i++)
3048 fscanf(W,
" %le",&p);
3049 NET.Weights[ilayer][ineur][
i] = (
dbl) p;
3089 if(ifile>1 || ifile<0)
return(1);
3091 if(ExamplesMemory==0)
3098 if(
PAT.Pond == 0 ||
PAT.Rin == 0
3099 ||
PAT.Rans == 0 ||
PAT.vRin == 0)
return -111;
3104 if(iadd==0 && PatMemory[ifile]!=0)
3110 if(iadd==0 || PatMemory[ifile]==0)
3112 PatMemory[
ifile] = 1;
3114 if(
PAT.Pond[ifile] == 0)
return -111;
3115 for(j=0; j<npat; j++)
3116 PAT.Pond[ifile][j] = 1;
3119 if(
PAT.Rin[ifile] == 0)
return -111;
3121 if(
PAT.Rans[ifile] == 0)
return -111;
3125 if(
PAT.vRin[ifile] == 0)
return -111;
3127 for(j=0; j<npat; j++)
3132 for(j=0; j<npat; j++)
3135 if(
PAT.Rans[ifile][j] == 0)
return -111;
3141 ExamplesIndex = (
int *) malloc(npat*
sizeof(
int));
3142 if(ExamplesIndex == 0)
return -111;
3143 for(j=0; j<npat; j++) ExamplesIndex[j] = j;
3152 if(tmp == 0)
return -111;
3158 for(j=
PAT.Npat[ifile];j<ntot;j++)
3162 if(PatMemory[ifile]==1) free(
PAT.Pond[ifile]);
3179 if(tmp3 == 0)
return -111;
3181 for(j=0; j<
PAT.Npat[
ifile]*(nin+1); j++)
3185 if(PatMemory[ifile]==1) free(
PAT.vRin[ifile]);
3187 for(j=0; j<ntot; j++)
3194 if(tmp2 == 0)
return -111;
3199 for(j=
PAT.Npat[ifile];j<ntot;j++)
3202 if(tmp2[j] == 0)
return -111;
3204 if(PatMemory[ifile]==1) free(
PAT.Rans[ifile]);
3207 PatMemory[
ifile] = 1;
3212 free(ExamplesIndex);
3213 ExamplesIndex = (
int *) malloc(ntot*
sizeof(
int));
3214 if(ExamplesIndex == 0)
return -111;
3215 for(j=0; j<ntot; j++) ExamplesIndex[j] = j;
3241 if(ifile>1 || ifile<0)
return 1;
3243 if(PatMemory[ifile]==0)
return 2;
3245 free(
PAT.Pond[ifile]);
3249 free(
PAT.Rans[ifile][i]);
3251 free(
PAT.Rin[ifile]);
3252 free(
PAT.Rans[ifile]);
3253 free(
PAT.vRin[ifile]);
3254 PatMemory[
ifile] = 0;
3287 fmean = (
dbl*) malloc(Ninputs*
sizeof(
dbl));
3289 if(Nexamples<100) nmax=Nexamples;
3291 for(j=0;j<Ninputs;j++)
3294 for(ipat=0;ipat<nmax;ipat++)
3296 fmean[
j] += (
dbl) inputs[ipat][j];
3298 fmean[
j] = fmean[
j]/(
dbl) nmax;
3304 maximum[
j] = -99999;
3305 for(ipat=0;ipat<Nexamples;ipat++)
3307 mean[
j] += (
dbl) inputs[ipat][j];
3308 sigma[
j] += ((
dbl) inputs[ipat][j]-fmean[j])*
3309 ((
dbl) inputs[ipat][j]-fmean[j]);
3310 if((
dbl) inputs[ipat][
j] > maximum[
j])
3311 maximum[j]=(
dbl) inputs[ipat][
j];
3312 if((
dbl) inputs[ipat][
j] < minimum[
j])
3313 minimum[j]=(
dbl) inputs[ipat][
j];
3315 mean[
j] = mean[
j]/(
dbl) Nexamples;
3316 sigma[
j] =
sqrt(sigma[j]/ (
dbl) Nexamples -
3318 (mean[j]-fmean[j]));
3343 mean = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3344 sigma = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3345 minimum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3346 maximum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3348 if(mean == 0 || sigma == 0 || minimum == 0
3349 || maximum == 0)
return -111;
3352 mean,sigma,minimum,maximum);
3354 printf(
"\t mean \t\t RMS \t\t min \t\t max\n");
3355 for(j=0;j<
NET.Nneur[0];j++)
3357 printf(
"var%d \t %e \t %e \t %e \t %e\n",j+1,
3358 mean[j],sigma[j],minimum[j],maximum[j]);
3389 mean = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3390 sigma = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3393 minimum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3394 maximum = (
dbl *) malloc(
NET.Nneur[0]*
sizeof(
dbl));
3396 if(mean == 0 || sigma == 0 || minimum == 0
3397 || maximum == 0 ||
STAT.mean == 0 ||
3398 STAT.sigma == 0)
return -111;
3401 mean,sigma,minimum,maximum);
3403 if(
NET.Debug>=1) printf(
"\t mean \t\t RMS \t\t min \t\t max\n");
3404 for(j=0;j<
NET.Nneur[0];j++)
3407 printf(
"var%d \t %e \t %e \t %e \t %e\n",j+1,
3408 mean[j],sigma[j],minimum[j],maximum[j]);
3412 STAT.sigma[
j] = sigma[
j];
3415 for(ipat=0;ipat<
PAT.Npat[0];ipat++)
3417 PAT.Rin[0][ipat][
j] =
3418 (
PAT.Rin[0][ipat][
j]-(float) mean[j])/
3421 for(ipat=0;ipat<
PAT.Npat[1];ipat++)
3423 PAT.Rin[1][ipat][
j] =
3424 (
PAT.Rin[1][ipat][
j]-(float) mean[j])/
3433 if(
NET.Debug>=1) printf(
"\n");
3459 NET.Nneur = (
int *) malloc(Nlayer*
sizeof(
int));
3460 if(
NET.Nneur == 0)
return -111;
3462 NET.T_func = (
int **) malloc(Nlayer*
sizeof(
int *));
3463 NET.Deriv1 = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3464 NET.Inn = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3465 NET.Outn = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3466 NET.Delta = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3467 if(
NET.T_func == 0 ||
NET.Deriv1 == 0
3468 ||
NET.Inn == 0 ||
NET.Outn == 0
3469 ||
NET.Delta == 0)
return -111;
3471 for(i=0; i<Nlayer; i++)
3473 NET.T_func[
i] = (
int *) malloc(Neurons[i]*
sizeof(
int));
3474 NET.Deriv1[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3475 NET.Inn[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3476 NET.Outn[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3477 NET.Delta[
i] = (
dbl *) malloc(Neurons[i]*
sizeof(
dbl));
3478 if(
NET.T_func[i] == 0 ||
NET.Deriv1[i] == 0
3479 ||
NET.Inn[i] == 0 ||
NET.Outn[i] == 0
3480 ||
NET.Delta[i] ==0 )
return -111;
3483 NET.Weights = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3484 NET.vWeights = (
dbl **) malloc(Nlayer*
sizeof(
dbl *));
3485 LEARN.Odw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3486 LEARN.ODeDw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3487 LEARN.DeDw = (
dbl ***) malloc(Nlayer*
sizeof(
dbl **));
3488 if(
NET.Weights == 0 ||
NET.vWeights == 0
3490 ||
LEARN.DeDw == 0)
return -111;
3492 for(i=1; i<Nlayer; i++)
3495 NET.vWeights[
i] = (
dbl *) malloc(k * Neurons[i] *
3497 NET.Weights[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3498 LEARN.Odw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3499 LEARN.ODeDw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3500 LEARN.DeDw[
i] = (
dbl **) malloc(Neurons[i]*
sizeof(
dbl *));
3501 if(
NET.Weights[i] == 0 ||
NET.vWeights[i] == 0
3503 ||
LEARN.DeDw[i] == 0)
return -111;
3505 for(j=0; j<Neurons[
i]; j++)
3511 if(
LEARN.Odw[i][j] == 0
3512 ||
LEARN.ODeDw[i][j] == 0
3513 ||
LEARN.DeDw[i][j] == 0)
return -111;
3537 for(i=1; i<
NET.Nlayer; i++)
3539 for(j=0; j<
NET.Nneur[
i]; j++)
3542 free(
LEARN.Odw[i][j]);
3543 free(
LEARN.ODeDw[i][j]);
3544 free(
LEARN.DeDw[i][j]);
3546 free(
NET.vWeights[i]);
3547 free(
NET.Weights[i]);
3549 free(
LEARN.ODeDw[i]);
3550 free(
LEARN.DeDw[i]);
3559 for(i=0; i<
NET.Nlayer; i++)
3561 free(
NET.T_func[i]);
3562 free(
NET.Deriv1[i]);
3600 if(strlen(s)==0)
return -1;
3601 if(strlen(s)>1024)
return -2;
3604 if (strtok(tmp,
","))
3607 while (strtok(
NULL,
",")) i++;
3610 if(i >
NLMAX)
return -3;
3615 sscanf(strtok(tmp,
","),
"%d",&(Nneur[0]));
3616 for (i=1;i<*Nlayer;i++)
3617 sscanf(strtok(
NULL,
","),
"%d",&(Nneur[i]));
3646 if((*nl)>
NLMAX)
return(1);
3647 if((*nl)<2)
return(2);
3652 if(ierr != 0)
return ierr;
3655 NET.Nlayer = (int) *nl;
3658 for(il=0; il<
NET.Nlayer; il++) {
3659 NET.Nneur[il] = nn[il];
3688 register dbl a1, a2, a3, a4,
c, d;
3690 dbl *pM2 = &(M[m+1]);
3691 dbl *pM3 = &(M[2*(m+1)]);
3692 dbl *pM4 = &(M[3*(m+1)]);
3697 i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3704 pM1++; pM2++; pM3++; pM4++;
3705 for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3709 a1 = a1 + *pM1 * c + *(pM1+1) * d;
3710 a2 = a2 + *pM2 * c + *(pM2+1) * d;
3711 a3 = a3 + *pM3 * c + *(pM3+1) * d;
3712 a4 = a4 + *pM4 * c + *(pM4+1) * d;
3714 for(j=j; j<
m; j++, pM1++, pM2++, pM3++, pM4++)
3722 *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3726 pM1 = &(M[i*(m+1)]);
3729 for(j=0; j<
m; j++, pM1++)
3731 a1 = a1 + *pM1 * v[
j];
3754 register dbl a1, a2, a3, a4,
c, d;
3757 dbl *pM3 = &(M[2*
m]);
3758 dbl *pM4 = &(M[3*
m]);
3763 i+=4, pM1 += 3*mp1, pM2 += 3*mp1, pM3 += 3*mp1, pM4 += 3*mp1,
3770 for(j=0; j<m-1; j+=2, pM1+=2, pM2+=2, pM3+=2, pM4+=2)
3774 a1 = a1 + *pM1 * c + *(pM1+1) * d;
3775 a2 = a2 + *pM2 * c + *(pM2+1) * d;
3776 a3 = a3 + *pM3 * c + *(pM3+1) * d;
3777 a4 = a4 + *pM4 * c + *(pM4+1) * d;
3779 for(j=j; j<
m; j++, pM1++, pM2++, pM3++, pM4++)
3787 *pr = a1; *(pr+1) = a2; *(pr+2) = a3; *(pr+3) = a4;
3793 for(j=0; j<
m; j++, pM1++)
3795 a1 = a1 + *pM1 * v[
j];
3821 int Ni,
int Nj,
int Nk,
int NaOffs,
int NbOffs)
3825 dbl s00,s01,s10,s11;
3827 dbl *pb0,*pb1,*pc0,*pc1;
3829 for (j=0; j<=Nj-2; j+=2)
3833 s00 = 0.0; s01 = 0.0; s10 = 0.0; s11 = 0.0;
3835 for (k=0,pb0=b+k+NbOffs*j,
3836 pb1=b+k+NbOffs*(j+1),
3845 s00 += (*pa0)*(*pb0);
3846 s01 += (*pa0)*(*pb1);
3847 s10 += (*pa1)*(*pb0);
3848 s11 += (*pa1)*(*pb1);
3850 *pc0 = s00; *(pc0+1) = s01; *pc1 = s10; *(pc1+1) = s11;
3852 for (j=j; j<Nj; j++)
3856 s00 = 0.0; s10 = 0.0;
3857 for (k=0,pb0=b+k+NbOffs*j,
3865 s00 += (*pa0)*(*pb0);
3866 s10 += (*pa1)*(*pb0);
3868 *pc0 = s00; *pc1 = s10;
dbl MLP_Epoch(int iepoch, dbl *alpmin, int *Ntest)
void getnLexemes(int n, char *s, char **ss)
dbl MLP_Test_MM(int ifile, dbl *tmp)
int ReadPatterns(char *filename, int ifile, int *inet, int *ilearn, int *iexamples)
int DecreaseSearch(dbl *alpmin, int *Ntest, dbl Err0)
int SaveWeights(char *filename, int iepoch)
void MLP_Out(type_pat *rrin, dbl *rrout)
int MLP_SetNet(int *nl, int *nn)
The Signals That Services Can Subscribe To This is based on ActivityRegistry and is current per Services can connect to the signals distributed by the ActivityRegistry in order to monitor the activity of the application Each possible callback has some defined which we here list in angle e g
int MLP_PrCFun(char *filename)
int MLP_StatInputs(int Nexamples, int Ninputs, type_pat **inputs, dbl *mean, dbl *sigma, dbl *minimum, dbl *maximum)
void SetLambda(double Wmax)
int GetBFGSH(int Nweights)
int GetNetStructure(char *s, int *Nlayer, int *Nneur)
void MLP_Out_T(type_pat *rrin)
void MLP_vSigmoideDeriv(dbl *x, dbl *dy, int n)
int MLP_Train(int *ipat, dbl *err)
int ShuffleExamples(int n, int *index)
double MLP_Rand(dbl mini, dbl maxi)
int DeDwSum(type_pat *ans, dbl *out, int ipat)
int CountLexemes(char *s)
struct net_ net_ MLP_HIDDEN
int LoadWeights(char *filename, int *iepoch)
void MLP_LineHyb(dbl ***w0, dbl alpha)
int AllocPatterns(int ifile, int npat, int nin, int nout, int iadd)
int SetTransFunc(int layer, int neuron, int func)
void InitBFGSH(int Nweights)
void MLP_MM2rows(dbl *c, type_pat *a, dbl *b, int Ni, int Nj, int Nk, int NaOffs, int NbOffs)
void MLP_MatrixVector(dbl *M, type_pat *v, dbl *r, int n, int m)
void DeDwScale(int Nexamples)
void MLP_Line(dbl ***w0, dbl alpha)
void MLP_MatrixVectorBias(dbl *M, dbl *v, dbl *r, int n, int m)
void getLexemes(char *s, char **ss)
int dgels_(char *trans, integer *m, integer *n, integer *nrhs, doublereal *a, integer *lda, doublereal *b, integer *ldb, doublereal *work, integer *lwork, integer *info)
void MLP_vSigmoide(dbl *x, int n)
int LineSearch(dbl *alpmin, int *Ntest, dbl Err0)
std::vector< std::vector< double > > tmp
void BFGSdir(int Nweights)
void MLP_Out2(type_pat *rrin)
int FreePatterns(int ifile)
dbl MLP_Test(int ifile, int regul)
int MLP_PrFFun(char *filename)
int AllocNetwork(int Nlayer, int *Neurons)
int LineSearchHyb(dbl *alpmin, int *Ntest)