1 from __future__
import print_function
2 from __future__
import absolute_import
3 from builtins
import range
4 import FWCore.ParameterSet.Config
as cms
7 import FWCore.ParameterSet.VarParsing
as VarParsing
10 from .dqmPythonTypes
import *
13 from Configuration.Applications.ConfigBuilder
import filesFromDASQuery
20 VarParsing.VarParsing.multiplicity.singleton,
21 VarParsing.VarParsing.varType.string,
26 options.register(
'runUniqueKey',
28 VarParsing.VarParsing.multiplicity.singleton,
29 VarParsing.VarParsing.varType.string,
30 "Unique run key from RCMS for Frontier")
32 options.register(
'runNumber',
34 VarParsing.VarParsing.multiplicity.singleton,
35 VarParsing.VarParsing.varType.int,
36 "Run number. This run number has to be present in the dataset configured with the dataset option.")
38 options.register(
'maxLumi',
40 VarParsing.VarParsing.multiplicity.singleton,
41 VarParsing.VarParsing.varType.int,
42 "Only lumisections up to maxLumi are processed.")
44 options.register(
'minLumi',
46 VarParsing.VarParsing.multiplicity.singleton,
47 VarParsing.VarParsing.varType.int,
48 "Only lumisections starting from minLumi are processed.")
50 options.register(
'lumiPattern',
52 VarParsing.VarParsing.multiplicity.singleton,
53 VarParsing.VarParsing.varType.string,
54 "Only lumisections with numbers matching lumiPattern are processed.")
56 options.register(
'dataset',
58 VarParsing.VarParsing.multiplicity.singleton,
59 VarParsing.VarParsing.varType.string,
60 "Dataset name like '/ExpressPhysicsPA/PARun2016D-Express-v1/FEVT', or 'auto' to guess it with a DAS query. A dataset_cfi.py that defines 'readFiles' and 'secFiles' (like a DAS Python snippet) will override this, to avoid DAS queries.")
62 options.register(
'transDelay',
64 VarParsing.VarParsing.multiplicity.singleton,
65 VarParsing.VarParsing.varType.int,
66 "delay in seconds for the commit of the db transaction")
68 options.register(
'noDB',
70 VarParsing.VarParsing.multiplicity.singleton,
71 VarParsing.VarParsing.varType.bool,
72 "Don't upload the BeamSpot conditions to the DB")
74 options.parseArguments()
78 from dataset_cfi
import readFiles, secFiles
79 print(
"Using filenames from dataset_cfi.py.")
81 if options.dataset ==
'auto':
82 print(
"Querying DAS for a dataset...")
84 out = subprocess.check_output(
"dasgoclient --query 'dataset run=%d dataset=/*Express*/*/*FEVT*'" % options.runNumber, shell=
True)
85 dataset = out.splitlines()[-1]
86 print(
"Using dataset=%s." % dataset)
88 dataset = options.dataset
90 print(
"Querying DAS for files...")
91 readFiles = cms.untracked.vstring()
92 secFiles = cms.untracked.vstring()
94 read, sec =
filesFromDASQuery(
"file run=%d dataset=%s" % (options.runNumber, dataset), option=
" --limit 10000 ")
95 readFiles.extend(read)
98 print(
"Got %d files." % len(readFiles))
100 runstr =
str(options.runNumber)
101 runpattern =
"*" + runstr[0:3] +
"/" + runstr[3:] +
"*"
102 readFiles = cms.untracked.vstring([f
for f
in readFiles
if fnmatch.fnmatch(f, runpattern)])
103 secFiles = cms.untracked.vstring([f
for f
in secFiles
if fnmatch.fnmatch(f, runpattern)])
104 lumirange = cms.untracked.VLuminosityBlockRange(
105 [
str(options.runNumber) +
":" +
str(ls)
106 for ls
in range(options.minLumi, options.maxLumi+1)
107 if fnmatch.fnmatch(
str(ls), options.lumiPattern)
111 print(
"Selected %d files and %d LS." % (len(readFiles), len(lumirange)))
113 source = cms.Source (
"PoolSource",fileNames = readFiles, secondaryFileNames = secFiles, lumisToProcess = lumirange)
114 maxEvents = cms.untracked.PSet(
115 input = cms.untracked.int32(-1)
123 if not options.runkey.strip():
124 options.runkey =
"pp_run"
126 runType.setRunType(options.runkey.strip())