40 #include "lwtnn/LightweightNeuralNetwork.hh" 41 #include "lwtnn/parse_json.hh" 50 #include <boost/algorithm/string.hpp> 66 class NeuralNetworkAndConstants {
71 std::unique_ptr<const lwt::LightweightNeuralNetwork>
const& neural_network()
const {
return neural_network_; }
72 vector<string>
const&
outputs()
const {
return outputs_; }
73 bool check_sv_for_defaults()
const {
return check_sv_for_defaults_; }
74 map<string, string>
const& toadd()
const {
return toadd_; }
75 vector<MVAVar>
const&
variables()
const {
return variables_; }
79 std::unique_ptr<const lwt::LightweightNeuralNetwork> neural_network_;
80 vector<string> outputs_;
81 bool check_sv_for_defaults_;
82 map<string, string> toadd_;
83 vector<MVAVar> variables_;
93 static std::unique_ptr<NeuralNetworkAndConstants> initializeGlobalCache(
const edm::ParameterSet& iConfig) {
94 return std::make_unique<NeuralNetworkAndConstants>(iConfig);
97 static void globalEndJob(NeuralNetworkAndConstants*) { }
100 typedef std::vector<reco::ShallowTagInfo> INFOS;
103 void endStream()
override {}
107 lwt::ValueMap inputs_;
123 NeuralNetworkAndConstants::NeuralNetworkAndConstants(
const edm::ParameterSet& iConfig) :
124 check_sv_for_defaults_(iConfig.getParameter<
bool>(
"checkSVForDefaults"))
126 bool mean_padding = iConfig.
getParameter<
bool>(
"meanPadding");
130 ifstream
jsonfile(nnconfig.fullPath());
134 neural_network_ = std::make_unique<const lwt::LightweightNeuralNetwork>(config.inputs, config.layers, config.outputs);
136 outputs_ = config.outputs;
137 set<string> outset(outputs_.begin(), outputs_.end());
141 for(
auto const&
output : toaddPSet.getParameterNamesForType<
string>()) {
143 if(outset.find(
output) == outset.end())
144 throw cms::Exception(
"RuntimeError") <<
"The required output: " <<
output <<
" to be added to " << target <<
" could not be found among the NN outputs" << endl;
145 if(outset.find(target) == outset.end())
146 throw cms::Exception(
"RuntimeError") <<
"The required output: " << target <<
", target of addition of " <<
output <<
" could not be found among the NN outputs" << endl;
151 for(
auto const&
input : config.inputs) {
153 var.name =
input.name;
155 vector<string> tokens;
156 if (var.name !=
"Jet_JP" && var.name !=
"Jet_JBP" && var.name !=
"Jet_SoftMu" && var.name !=
"Jet_SoftEl"){
boost::split(tokens,var.name,boost::is_any_of(
"_"));}
157 else {tokens.push_back(var.name);}
159 throw cms::Exception(
"RuntimeError") <<
"I could not parse properly " <<
input.name <<
" as input feature" << std::endl;
164 throw cms::Exception(
"ValueError") <<
"I could not find the TaggingVariable named " << tokens.at(0)
165 <<
" from the NN input variable: " <<
input.name
166 <<
". Please check the spelling" << std::endl;
168 var.index = (tokens.size() == 2) ? stoi(tokens.at(1)) : -1;
169 var.default_value = (mean_padding) ? 0. : -1*
input.offset;
172 variables_.push_back(var);
176 DeepFlavourJetTagsProducer::DeepFlavourJetTagsProducer(
const edm::ParameterSet& iConfig, NeuralNetworkAndConstants
const* gc) :
177 src_( consumes< INFOS >(iConfig.getParameter<
edm::InputTag>(
"src")) ),
181 for(
auto const& outnode : gc->outputs()) {
182 if(gc->toadd().find(outnode) == gc->toadd().end()){
183 produces<JetTagCollection>(outnode);
188 DeepFlavourJetTagsProducer::~DeepFlavourJetTagsProducer()
201 NeuralNetworkAndConstants
const* gc = globalCache();
202 vector<string>
const&
outputs = gc->outputs();
203 map<string, string>
const& toadd = gc->toadd();
211 vector< std::unique_ptr<JetTagCollection> > output_tags;
212 output_tags.reserve(outputs.size());
213 for(
size_t i = 0;
i < outputs.size(); ++
i) {
214 if(!taginfos->empty()) {
216 output_tags.push_back(
217 std::make_unique<JetTagCollection>(
222 output_tags.push_back(
223 std::make_unique<JetTagCollection>()
232 for(
auto&
info : *(taginfos)) {
238 bool defaulted = (gc->check_sv_for_defaults()) ? (notracks && novtx) : notracks;
242 for(
auto const& var : gc->variables()) {
244 std::vector<float>
vals = vars.
getList(var.id,
false);
245 inputs_[var.name] = (((
int) vals.size()) > var.index) ? vals.at(var.index) : var.default_value;
249 inputs_[var.name] = vars.
get(var.id, var.default_value);
259 nnout = gc->neural_network()->compute(inputs_);
262 for(
auto const&
entry : toadd) {
267 for(
const auto&
entry : nnout) {
278 for(
size_t i = 0;
i < outputs.size(); ++
i) {
279 (*output_tags[
i])[key] = (defaulted) ? -1 : nnout[outputs[
i]];
283 if( naninput + nanoutput > 0 ) {
284 edm::LogWarning(
"ValueError") <<
"The NN encountered " << naninput <<
" nan input TagInfo values and produced " << nanoutput <<
" nan output values";
288 for(
size_t i = 0;
i < outputs.size(); ++
i) {
289 if(toadd.find(outputs[
i]) == toadd.end()) {
T getParameter(std::string const &) const
OrphanHandle< PROD > put(std::unique_ptr< PROD > product)
Put a new product.
bool getByToken(EDGetToken token, Handle< PROD > &result) const
static std::string const input
#define DEFINE_FWK_MODULE(type)
void addDefault(ParameterSetDescription const &psetDescription)
std::vector< TaggingValue > getList(TaggingVariableName tag, bool throwOnEmptyList=true) const
TaggingValue get(TaggingVariableName tag) const
static void fillDescriptions(edm::ConfigurationDescriptions &descriptions)
TaggingVariableName getTaggingVariableName(const std::string &name)
RefToBaseProd< T > makeRefToBaseProdFrom(RefToBase< T > const &iRef, Event const &iEvent)