Go to the documentation of this file.00001 #include <iostream>
00002 #include <cstdlib>
00003 #include <cstring>
00004 #include <sstream>
00005 #include <string>
00006 #include <vector>
00007
00008 #include "FWCore/Utilities/interface/Exception.h"
00009
00010 #include "MLP.h"
00011
00012 #include "mlp_gen.h"
00013
00014 namespace PhysicsTools {
00015
00016 bool MLP::inUse = false;
00017
00018 static std::vector<std::string> split(const std::string line, char delim)
00019 {
00020 const char *str = line.c_str();
00021 const char *p = str;
00022
00023 std::vector<std::string> tokens;
00024
00025 if (line[0] == '\0')
00026 return tokens;
00027
00028 while(p) {
00029 const char *q = std::strchr(p, delim);
00030
00031 if (!q) {
00032 tokens.push_back(std::string(p));
00033 p = 0;
00034 } else {
00035 tokens.push_back(std::string(p, q - p));
00036 p = q + 1;
00037 }
00038 }
00039
00040 return tokens;
00041 }
00042
00043 MLP::MLP(unsigned int nIn, unsigned int nOut, const std::string layout_) :
00044 initialized(false), layers(0), layout(0), epoch(0)
00045 {
00046 if (inUse)
00047 throw cms::Exception("MLP")
00048 << "mlpfit doesn't support more than one instance."
00049 << std::endl;
00050
00051 std::vector<std::string> parsed = split(layout_, ':');
00052 if (parsed.size() < 1)
00053 throw cms::Exception("MLP")
00054 << "Invalid layout." << std::endl;
00055
00056 layout = new int[parsed.size() + 2];
00057
00058 layers = parsed.size();
00059 layout[0] = (int)nIn;
00060 for(int i = 0; i < layers; i++) {
00061 std::istringstream ss(parsed[i]);
00062 int nodes;
00063 ss >> nodes;
00064 if (nodes < 1)
00065 throw cms::Exception("MLP")
00066 << "Invalid layout." << std::endl;
00067
00068 layout[i + 1] = nodes;
00069 }
00070 layout[layers + 1] = (int)nOut;
00071 layers += 2;
00072
00073 inUse = true;
00074
00075 MLP_SetNet(&layers, layout);
00076 setLearn();
00077 LearnAlloc();
00078 InitWeights();
00079 }
00080
00081 MLP::~MLP()
00082 {
00083 clear();
00084
00085 LearnFree();
00086 inUse = false;
00087 delete[] layout;
00088 }
00089
00090 void MLP::clear()
00091 {
00092 if (!initialized)
00093 return;
00094 initialized = false;
00095
00096 FreePatterns(0);
00097 free(PAT.Rin);
00098 free(PAT.Rans);
00099 free(PAT.Pond);
00100 }
00101
00102 void MLP::setLearn(void)
00103 {
00104 LEARN.Meth = 7;
00105 LEARN.Nreset = 50;
00106 LEARN.Tau = 1.5;
00107 LEARN.Decay = 1.0;
00108 LEARN.eta = 0.1;
00109 LEARN.Lambda = 1.0;
00110 LEARN.delta = 0.0;
00111 LEARN.epsilon = 0.2;
00112 }
00113
00114 void MLP::setNPattern(unsigned int size)
00115 {
00116 PAT.Npat[0] = (int)size;
00117 PAT.Npat[1] = 0;
00118 PAT.Nin = layout[0];
00119 PAT.Nout = layout[layers - 1];
00120 }
00121
00122 void MLP::init(unsigned int rows)
00123 {
00124 setNPattern(rows);
00125 AllocPatterns(0, rows, layout[0], layout[layers - 1], 0);
00126 initialized = true;
00127 }
00128
00129 void MLP::set(unsigned int row, double *data, double *target, double weight)
00130 {
00131 int nIn = layout[0];
00132 int nOut = layout[layers - 1];
00133
00134 std::memcpy(&PAT.vRin[0][row*(nIn + 1) + 1], data, sizeof(double) * nIn);
00135 std::memcpy(&PAT.Rans[0][row][0], target, sizeof(double) * nOut);
00136 PAT.Pond[0][row] = weight;
00137 }
00138
00139 double MLP::train()
00140 {
00141 double alpMin;
00142 int nTest;
00143
00144 return MLP_Epoch(++epoch, &alpMin, &nTest);
00145 }
00146
00147 const double *MLP::eval(double *data) const
00148 {
00149 MLP_Out_T(data);
00150
00151 return &NET.Outn[layers - 1][0];
00152 }
00153
00154 void MLP::save(const std::string file) const
00155 {
00156 if (SaveWeights(const_cast<char*>(file.c_str()), (int)epoch) < 0)
00157 throw cms::Exception("MLP")
00158 << "Error opening \"" << file << "\"." << std::endl;
00159 }
00160
00161 void MLP::load(const std::string file)
00162 {
00163 int epoch_ = 0;
00164 if (LoadWeights(const_cast<char*>(file.c_str()), &epoch_) < 0)
00165 throw cms::Exception("MLP")
00166 << "Error opening \"" << file << "\"." << std::endl;
00167 epoch = (unsigned int)epoch_;
00168 }
00169
00170 }