00001 #include <stdexcept>
00002
00003 #include "../interface/Asymptotic.h"
00004 #include <RooRealVar.h>
00005 #include <RooArgSet.h>
00006 #include <RooAbsPdf.h>
00007 #include <RooFitResult.h>
00008 #include <RooRandom.h>
00009 #include <RooStats/ModelConfig.h>
00010 #include <Math/DistFuncMathCore.h>
00011 #include "../interface/Combine.h"
00012 #include "../interface/CloseCoutSentry.h"
00013 #include "../interface/RooFitGlobalKillSentry.h"
00014 #include "../interface/ProfiledLikelihoodRatioTestStatExt.h"
00015 #include "../interface/ToyMCSamplerOpt.h"
00016 #include "../interface/ProfileLikelihood.h"
00017 #include "../interface/CascadeMinimizer.h"
00018 #include "../interface/utils.h"
00019 #include "../interface/AsimovUtils.h"
00020
00021 using namespace RooStats;
00022
00023 double Asymptotic::rAbsAccuracy_ = 0.0005;
00024 double Asymptotic::rRelAccuracy_ = 0.005;
00025 std::string Asymptotic::what_ = "both";
00026 bool Asymptotic::qtilde_ = true;
00027 bool Asymptotic::picky_ = false;
00028 bool Asymptotic::noFitAsimov_ = false;
00029 bool Asymptotic::newExpected_ = false;
00030 std::string Asymptotic::minosAlgo_ = "stepping";
00031 std::string Asymptotic::minimizerAlgo_ = "Minuit2";
00032 float Asymptotic::minimizerTolerance_ = 0.01;
00033 int Asymptotic::minimizerStrategy_ = 0;
00034 double Asymptotic::rValue_ = 1.0;
00035
00036
00037 Asymptotic::Asymptotic() :
00038 LimitAlgo("Asymptotic specific options") {
00039 options_.add_options()
00040 ("rAbsAcc", boost::program_options::value<double>(&rAbsAccuracy_)->default_value(rAbsAccuracy_), "Absolute accuracy on r to reach to terminate the scan")
00041 ("rRelAcc", boost::program_options::value<double>(&rRelAccuracy_)->default_value(rRelAccuracy_), "Relative accuracy on r to reach to terminate the scan")
00042 ("run", boost::program_options::value<std::string>(&what_)->default_value(what_), "What to run: both (default), observed, expected, blind.")
00043 ("singlePoint", boost::program_options::value<double>(&rValue_), "Just compute CLs for the given value of r")
00044 ("minimizerAlgo", boost::program_options::value<std::string>(&minimizerAlgo_)->default_value(minimizerAlgo_), "Choice of minimizer used for profiling (Minuit vs Minuit2)")
00045 ("minimizerTolerance", boost::program_options::value<float>(&minimizerTolerance_)->default_value(minimizerTolerance_), "Tolerance for minimizer used for profiling")
00046 ("minimizerStrategy", boost::program_options::value<int>(&minimizerStrategy_)->default_value(minimizerStrategy_), "Stragegy for minimizer")
00047 ("qtilde", boost::program_options::value<bool>(&qtilde_)->default_value(qtilde_), "Allow only non-negative signal strengths (default is true).")
00048 ("picky", "Abort on fit failures")
00049 ("noFitAsimov", "Use the pre-fit asimov dataset")
00050 ("newExpected", "Use the new formula for expected limits")
00051 ("minosAlgo", boost::program_options::value<std::string>(&minosAlgo_)->default_value(minosAlgo_), "Algorithm to use to get the median expected limit: 'minos' (fastest), 'bisection', 'stepping' (default, most robust)")
00052 ;
00053 }
00054
00055 void Asymptotic::applyOptions(const boost::program_options::variables_map &vm) {
00056 if (vm.count("singlePoint") && !vm["singlePoint"].defaulted()) {
00057 if (!vm["run"].defaulted()) throw std::invalid_argument("Asymptotic: when using --singlePoint you can't use --run (at least for now)");
00058 what_ = "singlePoint";
00059 } else {
00060 if (what_ != "observed" && what_ != "expected" && what_ != "both" && what_ != "blind")
00061 throw std::invalid_argument("Asymptotic: option 'run' can only be 'observed', 'expected' or 'both' (the default) or 'blind' (a-priori expected)");
00062 }
00063 picky_ = vm.count("picky");
00064 noFitAsimov_ = vm.count("noFitAsimov");
00065 newExpected_ = vm.count("newExpected");
00066 if (what_ == "blind") { what_ = "expected"; noFitAsimov_ = true; }
00067 if (noFitAsimov_) std::cout << "Will use a-priori expected background instead of a-posteriori one." << std::endl;
00068 }
00069
00070 void Asymptotic::applyDefaultOptions() {
00071 what_ = "observed"; noFitAsimov_ = true;
00072 }
00073
00074 bool Asymptotic::run(RooWorkspace *w, RooStats::ModelConfig *mc_s, RooStats::ModelConfig *mc_b, RooAbsData &data, double &limit, double &limitErr, const double *hint) {
00075 RooFitGlobalKillSentry silence(verbose <= 1 ? RooFit::WARNING : RooFit::DEBUG);
00076 ProfileLikelihood::MinimizerSentry minimizerConfig(minimizerAlgo_, minimizerTolerance_);
00077 if (verbose > 0) std::cout << "Will compute " << what_ << " limit(s) using minimizer " << minimizerAlgo_
00078 << " with strategy " << minimizerStrategy_ << " and tolerance " << minimizerTolerance_ << std::endl;
00079
00080 bool ret = false;
00081 std::vector<std::pair<float,float> > expected;
00082 if (what_ == "both" || what_ == "expected") expected = runLimitExpected(w, mc_s, mc_b, data, limit, limitErr, hint);
00083 if (what_ != "expected") ret = runLimit(w, mc_s, mc_b, data, limit, limitErr, hint);
00084
00085 if (verbose >= 0) {
00086 const char *rname = mc_s->GetParametersOfInterest()->first()->GetName();
00087 std::cout << "\n -- Asymptotic -- " << "\n";
00088 if (what_ == "singlePoint") {
00089 printf("Observed CLs for %s = %.1f: %6.4f \n", rname, rValue_, limit);
00090 } else if (ret && what_ != "expected") {
00091 printf("Observed Limit: %s < %6.4f\n", rname, limit);
00092 }
00093 for (std::vector<std::pair<float,float> >::const_iterator it = expected.begin(), ed = expected.end(); it != ed; ++it) {
00094 printf("Expected %4.1f%%: %s < %6.4f\n", it->first*100, rname, it->second);
00095 }
00096 std::cout << std::endl;
00097 }
00098
00099
00100 return ret;
00101 }
00102
00103 bool Asymptotic::runLimit(RooWorkspace *w, RooStats::ModelConfig *mc_s, RooStats::ModelConfig *mc_b, RooAbsData &data, double &limit, double &limitErr, const double *hint) {
00104 RooRealVar *r = dynamic_cast<RooRealVar *>(mc_s->GetParametersOfInterest()->first());
00105 w->loadSnapshot("clean");
00106 RooAbsData &asimov = *asimovDataset(w, mc_s, mc_b, data);
00107
00108 r->setConstant(false);
00109 r->setVal(0.1*r->getMax());
00110 r->setMin(qtilde_ ? 0 : -r->getMax());
00111
00112 if (params_.get() == 0) params_.reset(mc_s->GetPdf()->getParameters(data));
00113
00114 hasFloatParams_ = false;
00115 std::auto_ptr<TIterator> itparam(params_->createIterator());
00116 for (RooAbsArg *a = (RooAbsArg *) itparam->Next(); a != 0; a = (RooAbsArg *) itparam->Next()) {
00117 RooRealVar *rrv = dynamic_cast<RooRealVar *>(a);
00118 if ( rrv != 0 && rrv != r && rrv->isConstant() == false ) { hasFloatParams_ = true; break; }
00119 }
00120
00121
00122 RooArgSet constraints; if (withSystematics) constraints.add(*mc_s->GetNuisanceParameters());
00123 nllD_.reset(mc_s->GetPdf()->createNLL(data, RooFit::Constrain(constraints)));
00124 nllA_.reset(mc_s->GetPdf()->createNLL(asimov, RooFit::Constrain(constraints)));
00125
00126 if (verbose > 0) std::cout << (qtilde_ ? "Restricting" : "Not restricting") << " " << r->GetName() << " to positive values." << std::endl;
00127 if (verbose > 1) params_->Print("V");
00128
00129 if (verbose > 0) std::cout << "\nMake global fit of real data" << std::endl;
00130 {
00131 CloseCoutSentry sentry(verbose < 3);
00132 *params_ = snapGlobalObsData;
00133 CascadeMinimizer minim(*nllD_, CascadeMinimizer::Unconstrained, r);
00134 minim.setStrategy(minimizerStrategy_);
00135 minim.minimize(verbose-2);
00136 fitFreeD_.readFrom(*params_);
00137 minNllD_ = nllD_->getVal();
00138 }
00139 if (verbose > 0) std::cout << "NLL at global minimum of data: " << minNllD_ << " (" << r->GetName() << " = " << r->getVal() << ")" << std::endl;
00140 double rErr = std::max<double>(r->getError(), 0.02 * (r->getMax() - r->getMin()));
00141
00142 r->setMin(0);
00143
00144 if (verbose > 1) fitFreeD_.Print("V");
00145 if (verbose > 0) std::cout << "\nMake global fit of asimov data" << std::endl;
00146 {
00147 CloseCoutSentry sentry(verbose < 3);
00148 *params_ = snapGlobalObsAsimov;
00149 CascadeMinimizer minim(*nllA_, CascadeMinimizer::Unconstrained, r);
00150 minim.setStrategy(minimizerStrategy_);
00151 minim.minimize(verbose-2);
00152 fitFreeA_.readFrom(*params_);
00153 minNllA_ = nllA_->getVal();
00154 }
00155 if (verbose > 0) std::cout << "NLL at global minimum of asimov: " << minNllA_ << " (" << r->GetName() << " = " << r->getVal() << ")" << std::endl;
00156 if (verbose > 1) fitFreeA_.Print("V");
00157
00158 fitFreeD_.writeTo(*params_);
00159 r->setConstant(true);
00160
00161 if (what_ == "singlePoint") {
00162 limit = getCLs(*r, rValue_, true, &limit, &limitErr);
00163 return true;
00164 }
00165
00166 double clsTarget = 1-cl;
00167 double rMin = std::max<double>(0, r->getVal()), rMax = rMin + 3 * rErr;
00168 double clsMax = 1, clsMin = 0;
00169 for (int tries = 0; tries < 5; ++tries) {
00170 double cls = getCLs(*r, rMax);
00171 if (cls == -999) { std::cerr << "Minimization failed in an unrecoverable way" << std::endl; break; }
00172 if (cls < clsTarget) { clsMin = cls; break; }
00173 rMax *= 2;
00174 }
00175
00176 do {
00177 if (clsMax < 3*clsTarget && clsMin > 0.3*clsTarget) {
00178 double rCross = rMin + (rMax-rMin)*log(clsMax/clsTarget)/log(clsMax/clsMin);
00179 if ((rCross-rMin) < (rMax - rCross)) {
00180 limit = 0.8*rCross + 0.2*rMax;
00181 } else {
00182 limit = 0.8*rCross + 0.2*rMin;
00183 }
00184 limitErr = 0.5*(rMax - rMin);
00185 } else {
00186 limit = 0.5*(rMin + rMax);
00187 limitErr = 0.5*(rMax - rMin);
00188 }
00189 double cls = getCLs(*r, limit);
00190 if (cls == -999) { std::cerr << "Minimization failed in an unrecoverable way" << std::endl; break; }
00191 if (cls > clsTarget) {
00192 clsMax = cls;
00193 rMin = limit;
00194 } else {
00195 clsMin = cls;
00196 rMax = limit;
00197 }
00198 } while (limitErr > std::max(rRelAccuracy_ * limit, rAbsAccuracy_));
00199
00200 return true;
00201 }
00202
00203 double Asymptotic::getCLs(RooRealVar &r, double rVal, bool getAlsoExpected, double *limit, double *limitErr) {
00204 r.setMax(1.1 * rVal);
00205 r.setConstant(true);
00206
00207 CloseCoutSentry sentry(verbose < 3);
00208
00209 CascadeMinimizer minimD(*nllD_, CascadeMinimizer::Constrained, &r);
00210 minimD.setStrategy(minimizerStrategy_);
00211
00212 (!fitFixD_.empty() ? fitFixD_ : fitFreeD_).writeTo(*params_);
00213 *params_ = snapGlobalObsData;
00214 r.setVal(rVal);
00215 r.setConstant(true);
00216 if (hasFloatParams_) {
00217 if (!minimD.improve(verbose-2) && picky_) return -999;
00218 fitFixD_.readFrom(*params_);
00219 if (verbose >= 2) fitFixD_.Print("V");
00220 }
00221 double qmu = 2*(nllD_->getVal() - minNllD_); if (qmu < 0) qmu = 0;
00222
00223 CascadeMinimizer minimA(*nllA_, CascadeMinimizer::Constrained, &r);
00224 minimA.setStrategy(minimizerStrategy_);
00225
00226 (!fitFixA_.empty() ? fitFixA_ : fitFreeA_).writeTo(*params_);
00227 *params_ = snapGlobalObsAsimov;
00228 r.setVal(rVal);
00229 r.setConstant(true);
00230 if (hasFloatParams_) {
00231 if (!minimA.improve(verbose-2) && picky_) return -999;
00232 fitFixA_.readFrom(*params_);
00233 if (verbose >= 2) fitFixA_.Print("V");
00234 }
00235 double qA = 2*(nllA_->getVal() - minNllA_); if (qA < 0) qA = 0;
00236
00237 double CLsb = ROOT::Math::normal_cdf_c(sqrt(qmu));
00238 double CLb = ROOT::Math::normal_cdf(sqrt(qA)-sqrt(qmu));
00239 if (qtilde_ && qmu > qA) {
00240
00241 double mos = sqrt(qA);
00242 CLsb = ROOT::Math::normal_cdf_c( (qmu + qA)/(2*mos) );
00243 CLb = ROOT::Math::normal_cdf_c( (qmu - qA)/(2*mos) );
00244 }
00245 double CLs = (CLb == 0 ? 0 : CLsb/CLb);
00246 sentry.clear();
00247 if (verbose > 0) printf("At %s = %f:\tq_mu = %.5f\tq_A = %.5f\tCLsb = %7.5f\tCLb = %7.5f\tCLs = %7.5f\n", r.GetName(), rVal, qmu, qA, CLsb, CLb, CLs);
00248
00249 if (getAlsoExpected) {
00250 const double quantiles[5] = { 0.025, 0.16, 0.50, 0.84, 0.975 };
00251 for (int iq = 0; iq < 5; ++iq) {
00252 double N = ROOT::Math::normal_quantile(quantiles[iq], 1.0);
00253 double clb = quantiles[iq];
00254 double clsplusb = ROOT::Math::normal_cdf_c( sqrt(qA) - N, 1.);
00255 *limit = (clb != 0 ? clsplusb/clb : 0); *limitErr = 0;
00256 Combine::commitPoint(true, quantiles[iq]);
00257 if (verbose > 0) printf("Expected %4.1f%%: CLsb = %.5f CLb = %.5f CLs = %.5f\n", quantiles[iq]*100, clsplusb, clb, clsplusb/clb);
00258 }
00259 }
00260 return CLs;
00261 }
00262
00263 std::vector<std::pair<float,float> > Asymptotic::runLimitExpected(RooWorkspace *w, RooStats::ModelConfig *mc_s, RooStats::ModelConfig *mc_b, RooAbsData &data, double &limit, double &limitErr, const double *hint) {
00264
00265
00266
00267
00268
00269
00270
00271
00272
00273
00274
00275
00276 RooArgSet poi(*mc_s->GetParametersOfInterest());
00277 RooRealVar *r = dynamic_cast<RooRealVar *>(poi.first());
00278
00279
00280 RooAbsData *asimov = asimovDataset(w, mc_s, mc_b, data);
00281
00282
00283 if (params_.get() == 0) params_.reset(mc_s->GetPdf()->getParameters(data));
00284 *params_ = snapGlobalObsAsimov;
00285
00286
00287 r->setConstant(false);
00288
00289 r->setMin(qtilde_ ? 0 : -r->getMax());
00290 r->setVal(0.01*r->getMax());
00291 r->setError(0.1*r->getMax());
00292
00293
00294 std::auto_ptr<RooAbsReal> nll(mc_s->GetPdf()->createNLL(*asimov, RooFit::Constrain(*mc_s->GetNuisanceParameters())));
00295 CascadeMinimizer minim(*nll, CascadeMinimizer::Unconstrained, r);
00296 minim.setStrategy(minimizerStrategy_);
00297 minim.setErrorLevel(0.5*pow(ROOT::Math::normal_quantile(1-0.5*(1-cl),1.0), 2));
00298
00299 CloseCoutSentry sentry(verbose < 3);
00300 minim.minimize(verbose-2);
00301 sentry.clear();
00302 if (verbose > 1) {
00303 std::cout << "Fit to asimov dataset:" << std::endl;
00304 std::auto_ptr<RooFitResult> res(minim.save());
00305 res->Print("V");
00306 }
00307 if (r->getVal()/r->getMax() > 1e-3) {
00308 if (verbose) printf("WARNING: Best fit of asimov dataset is at %s = %f (%f times %sMax), while it should be at zero\n",
00309 r->GetName(), r->getVal(), r->getVal()/r->getMax(), r->GetName());
00310 }
00311
00312
00313
00314 double nll0 = nll->getVal();
00315 double median = findExpectedLimitFromCrossing(*nll, r, r->getMin(), r->getMax(), nll0, 0.5);
00316 double sigma = median / ROOT::Math::normal_quantile(1-0.5*(1-cl),1.0);
00317 double alpha = 1-cl;
00318 if (verbose > 0) {
00319 std::cout << "Median for expected limits: " << median << std::endl;
00320 std::cout << "Sigma for expected limits: " << sigma << std::endl;
00321 }
00322
00323 std::vector<std::pair<float,float> > expected;
00324 const double quantiles[5] = { 0.025, 0.16, 0.50, 0.84, 0.975 };
00325 for (int iq = 0; iq < 5; ++iq) {
00326 double N = ROOT::Math::normal_quantile(quantiles[iq], 1.0);
00327 if (newExpected_ && iq != 2) {
00328 std::string minosAlgoBackup = minosAlgo_;
00329 if (minosAlgo_ == "stepping") minosAlgo_ = "bisection";
00330 switch (iq) {
00331 case 0: limit = findExpectedLimitFromCrossing(*nll, r, r->getMin(), median, nll0, quantiles[iq]); break;
00332 case 1: limit = findExpectedLimitFromCrossing(*nll, r, expected.back().second, median, nll0, quantiles[iq]); break;
00333 case 3: limit = findExpectedLimitFromCrossing(*nll, r, expected.back().second, median+2*sigma, nll0, quantiles[iq]); break;
00334 case 4: limit = findExpectedLimitFromCrossing(*nll, r, expected.back().second, median+4*sigma, nll0, quantiles[iq]); break;
00335 }
00336 minosAlgo_ = minosAlgoBackup;
00337 if (std::isnan(limit)) { expected.clear(); break; }
00338 } else {
00339 limit = sigma*(ROOT::Math::normal_quantile(1 - alpha * quantiles[iq], 1.0) + N);
00340 }
00341 limitErr = 0;
00342 Combine::commitPoint(true, quantiles[iq]);
00343 expected.push_back(std::pair<float,float>(quantiles[iq], limit));
00344 }
00345 return expected;
00346
00347 }
00348
00349 float Asymptotic::findExpectedLimitFromCrossing(RooAbsReal &nll, RooRealVar *r, double rMin, double rMax, double nll0, double clb) {
00350
00351
00352
00353
00354
00355
00356 double N = ROOT::Math::normal_quantile(clb, 1.0);
00357 double errorlevel = 0.5 * pow(N+ROOT::Math::normal_quantile_c(clb*(1-cl),1.0), 2);
00358 int minosStat = -1;
00359 if (minosAlgo_ == "minos") {
00360 double rMax0 = r->getMax();
00361
00362 CascadeMinimizer minim(nll, CascadeMinimizer::Unconstrained, r);
00363 minim.setStrategy(minimizerStrategy_);
00364 minim.setErrorLevel(errorlevel);
00365 CloseCoutSentry sentry(verbose < 3);
00366 minim.minimize(verbose-2);
00367 sentry.clear();
00368 for (int tries = 0; tries < 3; ++tries) {
00369 minosStat = minim.minimizer().minos(RooArgSet(*r));
00370 if (minosStat != -1) {
00371 while ((minosStat != -1) && (r->getVal()+r->getAsymErrorHi())/r->getMax() > 0.9) {
00372 if (r->getMax() >= 100*rMax0) { minosStat = -1; break; }
00373 r->setMax(2*r->getMax());
00374 CascadeMinimizer minim2(nll, CascadeMinimizer::Unconstrained, r);
00375 minim2.setStrategy(minimizerStrategy_);
00376 minim2.setErrorLevel(errorlevel);
00377 minim2.minimize(verbose-2);
00378 minosStat = minim2.minimizer().minos(RooArgSet(*r));
00379 }
00380 break;
00381 }
00382 minim.setStrategy(2);
00383 if (tries == 1) {
00384 if (minimizerAlgo_.find("Minuit2") != std::string::npos) {
00385 minim.minimizer().minimize("Minuit","minimize");
00386 } else {
00387 minim.minimizer().minimize("Minuit2","minmize");
00388 }
00389 }
00390 }
00391 if (minosStat != -1) return r->getVal()+r->getAsymErrorHi();
00392 } else {
00393 double threshold = nll0 + errorlevel;
00394 double rCross = 0.5*(rMin+rMax), rErr = 0.5*(rMax-rMin);
00395 r->setVal(rCross); r->setConstant(true);
00396 CascadeMinimizer minim2(nll, CascadeMinimizer::Constrained);
00397 minim2.setStrategy(minimizerStrategy_);
00398 if (minosAlgo_ == "bisection") {
00399 if (verbose > 1) printf("Will search for NLL crossing by bisection\n");
00400 while (rErr > std::max(rRelAccuracy_*rCross, rAbsAccuracy_)) {
00401 if (rCross >= r->getMax()) r->setMax(rCross*1.1);
00402 r->setVal(rCross);
00403 bool ok = true;
00404 {
00405 CloseCoutSentry sentry2(verbose < 3);
00406 ok = minim2.improve(verbose-2);
00407 }
00408 if (!ok && picky_) break; else minosStat = 0;
00409 double here = nll.getVal();
00410 if (verbose > 1) printf("At %s = %f:\tdelta(nll) = %.5f\n", r->GetName(), rCross, here-nll0);
00411 if (fabs(here - threshold) < 0.05*minimizerTolerance_) break;
00412 if (here < threshold) rMin = rCross; else rMax = rCross;
00413 rCross = 0.5*(rMin+rMax); rErr = 0.5*(rMax-rMin);
00414 }
00415 } else if (minosAlgo_ == "stepping") {
00416 if (verbose > 1) printf("Will search for NLL crossing by stepping.\n");
00417 rCross = 0.05 * rMax; rErr = rMax;
00418 double stride = rCross; bool overstepped = false;
00419 while (rErr > std::max(rRelAccuracy_*rCross, rAbsAccuracy_)) {
00420 if (rCross >= r->getMax()) r->setMax(rCross*1.1);
00421 double there = nll.getVal();
00422 r->setVal(rCross);
00423 bool ok = true;
00424 {
00425 CloseCoutSentry sentry2(verbose < 3);
00426 ok = minim2.improve(verbose-2);
00427 }
00428 if (!ok && picky_) break; else minosStat = 0;
00429 double here = nll.getVal();
00430 if (verbose > 1) printf("At %s = %f:\tdelta(nll) = %.5f\n", r->GetName(), rCross, here-nll0);
00431 if (fabs(here - threshold) < 0.05*minimizerTolerance_) break;
00432 if (here < threshold) {
00433 if ((threshold-here) < 0.5*fabs(threshold-there)) stride *= 0.5;
00434 rCross += stride;
00435 } else {
00436 stride *= 0.5; overstepped = true;
00437 rCross -= stride;
00438 }
00439 if (overstepped) rErr = stride;
00440 }
00441 } else if (minosAlgo_ == "new") {
00442 if (verbose > 1) printf("Will search for NLL crossing with new algorithm.\n");
00443
00444
00445
00446
00447
00448
00449
00450
00451
00452
00453
00454
00455
00456 double r_0 = rMin;
00457 r->setVal(rMin);
00458 double nll_0 = nll.getVal();
00459 double rMax0 = rMax*100;
00460 double kappa = 0;
00461
00462 double rStep = 0.05 * (rMax - r_0);
00463 double r_1 = r_0, nll_1 = nll_0;
00464 do {
00465 r_1 += rStep;
00466 if (r_1 >= r->getMax()) r->setMax(r_1*1.1);
00467 r->setVal(r_1);
00468 nll_1 = nll.getVal();
00469
00470 bool binNLLchange = (nll_1 < threshold && nll_1 - nll_0 > 0.5);
00471 bool aboveThresh = (nll_1 > threshold + kappa*std::pow(r_1-r_0,2));
00472 if (binNLLchange || aboveThresh) {
00473 if (verbose > 1) printf("At %s = %f:\tdelta(nll unprof) = %.5f\t \tkappa=%.5f\n", r->GetName(), r_1, nll_1-nll0, kappa);
00474 {
00475 CloseCoutSentry sentry2(verbose < 3);
00476 bool ok = minim2.improve(verbose-2);
00477 if (!ok && picky_) return std::numeric_limits<float>::quiet_NaN();
00478 }
00479 double nll_1_prof = nll.getVal();
00480 kappa = (nll_1 - nll_1_prof) / std::pow(r_1 - r_0,2);
00481 if (verbose > 1) printf("At %s = %f:\tdelta(nll unprof) = %.5f\tdelta(nll prof) = %.5f\tkappa=%.5f\n", r->GetName(), r_1, nll_1-nll0, nll.getVal()-nll0, kappa);
00482 if (nll_1_prof > threshold) {
00483 nll_1 = nll_1_prof;
00484 break;
00485 } else {
00486 r_0 = r_1;
00487 nll_0 = nll_1_prof;
00488 if (aboveThresh) rStep *= 2;
00489 }
00490 } else {
00491 if (verbose > 1) printf("At %s = %f:\tdelta(nll unprof) = %.5f\t \tkappa=%.5f\n", r->GetName(), r_1, nll_1-nll0, kappa);
00492 }
00493 if (r_1 > rMax0) return std::numeric_limits<float>::quiet_NaN();
00494 } while (true);
00495
00496 if (verbose > 1) printf("At %s = %f:\t \tdelta(nll prof) = %.5f\tkappa=%.5f\n", r->GetName(), r_0, nll_0-nll0, kappa);
00497 if (verbose > 1) printf("At %s = %f:\t \tdelta(nll prof) = %.5f\tkappa=%.5f\n", r->GetName(), r_1, nll_1-nll0, kappa);
00498 minosStat = 0;
00499 do {
00500
00501
00502
00503 double rEps = 0.2*std::max(rRelAccuracy_*r_1, rAbsAccuracy_);
00504
00505 double r_lo = std::min(r_0,r_1), r_hi = std::max(r_1,r_0);
00506 while (r_hi - r_lo > rEps) {
00507 double r_2 = 0.5*(r_hi+r_lo);
00508 r->setVal(r_2);
00509 double y0 = nll.getVal(), y = y0 - kappa*std::pow(r_2-r_1,2);
00510 if (verbose > 1) printf("At %s = %f:\tdelta(nll unprof) = %.5f\tdelta(nll appr) = %.5f\tkappa=%.5f\n", r->GetName(), r_2, y0-nll0, y-nll0, kappa);
00511 if (y < threshold) { r_lo = r_2; } else { r_hi = r_2; }
00512 }
00513
00514 rCross = r->getVal();
00515 double nll_unprof = nll.getVal();
00516 bool ok = true;
00517 {
00518 CloseCoutSentry sentry2(verbose < 3);
00519 ok = minim2.improve(verbose-2);
00520 }
00521 if (!ok && picky_) return std::numeric_limits<float>::quiet_NaN();
00522 double nll_prof = nll.getVal();
00523 if (verbose > 1) printf("At %s = %f:\tdelta(nll unprof) = %.5f\tdelta(nll prof) = %.5f\tdelta(nll appr) = %.5f\n", r->GetName(), rCross, nll_unprof-nll0, nll_prof-nll0, nll_unprof-nll0 - kappa*std::pow(rCross-r_1,2));
00524 if (fabs(nll_prof - threshold) < 0.1*minimizerTolerance_) { break; }
00525
00526 kappa = (nll_unprof - nll_prof)/std::pow(rCross-r_1,2);
00527
00528 if ((nll_prof < threshold) == (nll_0 < threshold)) {
00529 r_0 = r_1;
00530 nll_0 = nll_1;
00531 } else {
00532
00533 }
00534 r_1 = rCross; nll_1 = nll_prof;
00535 } while (fabs(r_1-r_0) > std::max(rRelAccuracy_*rCross, rAbsAccuracy_));
00536 }
00537 if (minosStat != -1) return rCross;
00538 }
00539 return std::numeric_limits<float>::quiet_NaN();
00540 }
00541
00542 RooAbsData * Asymptotic::asimovDataset(RooWorkspace *w, RooStats::ModelConfig *mc_s, RooStats::ModelConfig *mc_b, RooAbsData &data) {
00543
00544 if (w->data("_Asymptotic_asimovDataset_") != 0) {
00545 return w->data("_Asymptotic_asimovDataset_");
00546 }
00547
00548 RooArgSet gobs;
00549 if (withSystematics && mc_s->GetGlobalObservables()) {
00550 gobs.add(*mc_s->GetGlobalObservables());
00551 snapGlobalObsData.removeAll();
00552 utils::setAllConstant(gobs, true);
00553 gobs.snapshot(snapGlobalObsData);
00554 }
00555
00556 RooAbsData *asimovData = (noFitAsimov_ ? asimovutils::asimovDatasetNominal(mc_s, 0.0, verbose) :
00557 asimovutils::asimovDatasetWithFit(mc_s, data, snapGlobalObsAsimov, 0.0, verbose));
00558 asimovData->SetName("_Asymptotic_asimovDataset_");
00559 w->import(*asimovData);
00560 delete asimovData;
00561 return w->data("_Asymptotic_asimovDataset_");
00562 }