1 from __future__
import print_function
2 from __future__
import absolute_import
3 from builtins
import range
4 import FWCore.ParameterSet.Config
as cms
7 import FWCore.ParameterSet.VarParsing
as VarParsing
10 from .dqmPythonTypes
import *
13 from Configuration.Applications.ConfigBuilder
import filesFromDASQuery
20 VarParsing.VarParsing.multiplicity.singleton,
21 VarParsing.VarParsing.varType.string,
26 options.register(
'runUniqueKey',
28 VarParsing.VarParsing.multiplicity.singleton,
29 VarParsing.VarParsing.varType.string,
30 "Unique run key from RCMS for Frontier")
32 options.register(
'runNumber',
34 VarParsing.VarParsing.multiplicity.singleton,
35 VarParsing.VarParsing.varType.int,
36 "Run number. This run number has to be present in the dataset configured with the dataset option.")
38 options.register(
'maxLumi',
40 VarParsing.VarParsing.multiplicity.singleton,
41 VarParsing.VarParsing.varType.int,
42 "Only lumisections up to maxLumi are processed.")
44 options.register(
'minLumi',
46 VarParsing.VarParsing.multiplicity.singleton,
47 VarParsing.VarParsing.varType.int,
48 "Only lumisections starting from minLumi are processed.")
50 options.register(
'lumiPattern',
52 VarParsing.VarParsing.multiplicity.singleton,
53 VarParsing.VarParsing.varType.string,
54 "Only lumisections with numbers matching lumiPattern are processed.")
56 options.register(
'dataset',
58 VarParsing.VarParsing.multiplicity.singleton,
59 VarParsing.VarParsing.varType.string,
60 "Dataset name like '/ExpressPhysicsPA/PARun2016D-Express-v1/FEVT', or 'auto' to guess it with a DAS query. A dataset_cfi.py that defines 'readFiles' and 'secFiles' (like a DAS Python snippet) will override this, to avoid DAS queries.")
62 options.register(
'noDB',
64 VarParsing.VarParsing.multiplicity.singleton,
65 VarParsing.VarParsing.varType.bool,
66 "Don't upload the BeamSpot conditions to the DB")
68 options.parseArguments()
72 from dataset_cfi
import readFiles, secFiles
73 print(
"Using filenames from dataset_cfi.py.")
75 if options.dataset ==
'auto':
76 print(
"Querying DAS for a dataset...")
78 out = subprocess.check_output(
"dasgoclient --query 'dataset run=%d dataset=/*Express*/*/*FEVT*'" % options.runNumber, shell=
True)
79 dataset = out.splitlines()[-1]
80 print(
"Using dataset=%s." % dataset)
82 dataset = options.dataset
84 print(
"Querying DAS for files...")
85 readFiles = cms.untracked.vstring()
86 secFiles = cms.untracked.vstring()
88 read, sec =
filesFromDASQuery(
"file run=%d dataset=%s" % (options.runNumber, dataset), option=
" --limit 10000 ")
89 readFiles.extend(read)
92 print(
"Got %d files." % len(readFiles))
94 runstr =
str(options.runNumber)
95 runpattern =
"*" + runstr[0:3] +
"/" + runstr[3:] +
"*" 96 readFiles = cms.untracked.vstring([f
for f
in readFiles
if fnmatch.fnmatch(f, runpattern)])
97 secFiles = cms.untracked.vstring([f
for f
in secFiles
if fnmatch.fnmatch(f, runpattern)])
98 lumirange = cms.untracked.VLuminosityBlockRange(
99 [
str(options.runNumber) +
":" +
str(ls)
100 for ls
in range(options.minLumi, options.maxLumi+1)
101 if fnmatch.fnmatch(
str(ls), options.lumiPattern)
105 print(
"Selected %d files and %d LS." % (len(readFiles), len(lumirange)))
107 source = cms.Source (
"PoolSource",fileNames = readFiles, secondaryFileNames = secFiles, lumisToProcess = lumirange)
108 maxEvents = cms.untracked.PSet(
109 input = cms.untracked.int32(-1)
117 if not options.runkey.strip():
118 options.runkey =
"pp_run" 120 runType.setRunType(options.runkey.strip())
def filesFromDASQuery(query, option="", s=None)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)