CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
List of all members | Public Member Functions | Private Member Functions | Private Attributes | Static Private Attributes
dataset.Dataset Class Reference

Public Member Functions

def __init__
 
def convertTimeToRun
 
def datasetSnippet
 
def dataType
 
def dump_cff
 
def fileInfoList
 
def fileList
 
def name
 
def predefined
 
def runList
 

Private Member Functions

def __chunks
 
def __createSnippet
 
def __find_ge
 
def __find_lt
 
def __getData
 
def __getDataType
 
def __getFileInfoList
 
def __getRunList
 

Private Attributes

 __dasLimit
 
 __dataType
 
 __fileInfoList
 
 __fileList
 
 __name
 
 __predefined
 
 __runList
 

Static Private Attributes

tuple __dummy_source_template
 

Detailed Description

Definition at line 13 of file dataset.py.

Constructor & Destructor Documentation

def dataset.Dataset.__init__ (   self,
  datasetName,
  dasLimit = 0 
)

Definition at line 14 of file dataset.py.

14 
15  def __init__( self, datasetName, dasLimit = 0 ):
16  self.__name = datasetName
17  # check, if dataset name matches CMS dataset naming scheme
18  if re.match( r'/.+/.+/.+', self.__name ):
19  self.__dataType = self.__getDataType()
20  self.__predefined = False
21  else:
22  fileName = self.__name + "_cff.py"
23  searchPath1 = os.path.join( os.environ["CMSSW_BASE"], "python",
24  "Alignment", "OfflineValidation",
25  fileName )
26  searchPath2 = os.path.join( os.environ["CMSSW_BASE"], "src",
27  "Alignment", "OfflineValidation",
28  "python", fileName )
29  searchPath3 = os.path.join( os.environ["CMSSW_RELEASE_BASE"],
30  "python", "Alignment",
31  "OfflineValidation", fileName )
32  if os.path.exists( searchPath1 ):
33  pass
34  elif os.path.exists( searchPath2 ):
35  msg = ("The predefined dataset '%s' does exist in '%s', but "
36  "you need to run 'scram b' first."
37  %( self.__name, searchPath2 ))
38  raise AllInOneError( msg )
39  elif os.path.exists( searchPath3 ):
40  pass
41  else:
42  msg = ("The predefined dataset '%s' does not exist. Please "
43  "create it first or check for typos."%( self.__name ))
44  raise AllInOneError( msg )
45  self.__dataType = "unknown"
46  self.__predefined = True
47  self.__dasLimit = dasLimit
48  self.__fileList = None
49  self.__fileInfoList = None
50  self.__runList = None
def __getDataType
Definition: dataset.py:182

Member Function Documentation

def dataset.Dataset.__chunks (   self,
  theList,
  n 
)
private
Yield successive n-sized chunks from theList.

Definition at line 51 of file dataset.py.

References dataset.Dataset.__createSnippet().

Referenced by dataset.Dataset.__createSnippet().

51 
52  def __chunks( self, theList, n ):
53  """ Yield successive n-sized chunks from theList.
54  """
55  for i in xrange( 0, len( theList ), n ):
56  yield theList[i:i+n]
def dataset.Dataset.__createSnippet (   self,
  jsonPath = None,
  begin = None,
  end = None,
  firstRun = None,
  lastRun = None,
  repMap = None,
  crab = False 
)
private

Definition at line 59 of file dataset.py.

References dataset.Dataset.__chunks(), dataset.Dataset.__dummy_source_template, dataset.Dataset.__getRunList(), dataset.Dataset.convertTimeToRun(), dataset.Dataset.fileList(), join(), list(), and split.

Referenced by dataset.Dataset.__chunks(), dataset.Dataset.datasetSnippet(), and dataset.Dataset.dump_cff().

59 
60  crab = False ):
61  if firstRun:
62  firstRun = int( firstRun )
63  if lastRun:
64  lastRun = int( lastRun )
65  if ( begin and firstRun ) or ( end and lastRun ):
66  msg = ( "The Usage of "
67  + "'begin' & 'firstRun' " * int( bool( begin and
68  firstRun ) )
69  + "and " * int( bool( ( begin and firstRun ) and
70  ( end and lastRun ) ) )
71  + "'end' & 'lastRun' " * int( bool( end and lastRun ) )
72  + "is ambigous." )
73  raise AllInOneError( msg )
74  if begin or end:
75  ( firstRun, lastRun ) = self.convertTimeToRun(
76  begin = begin, end = end, firstRun = firstRun,
77  lastRun = lastRun )
78  if ( firstRun and lastRun ) and ( firstRun > lastRun ):
79  msg = ( "The lower time/runrange limit ('begin'/'firstRun') "
80  "chosen is greater than the upper time/runrange limit "
81  "('end'/'lastRun').")
82  raise AllInOneError( msg )
83  goodLumiSecStr = ""
84  lumiStr = ""
85  lumiSecExtend = ""
86  if firstRun or lastRun:
87  goodLumiSecStr = ( "lumiSecs = cms.untracked."
88  "VLuminosityBlockRange()\n" )
89  lumiStr = " lumisToProcess = lumiSecs,\n"
90  if not jsonPath:
91  selectedRunList = self.__getRunList()
92  if firstRun:
93  selectedRunList = [ run for run in selectedRunList \
94  if run["run_number"] >= firstRun ]
95  if lastRun:
96  selectedRunList = [ run for run in selectedRunList \
97  if run["run_number"] <= lastRun ]
98  lumiList = [ str( run["run_number"] ) + ":1-" \
99  + str( run["run_number"] ) + ":max" \
100  for run in selectedRunList ]
101  splitLumiList = list( self.__chunks( lumiList, 255 ) )
102  else:
103  theLumiList = LumiList ( filename = jsonPath )
104  allRuns = theLumiList.getRuns()
105  runsToRemove = []
106  for run in allRuns:
107  if firstRun and int( run ) < firstRun:
108  runsToRemove.append( run )
109  if lastRun and int( run ) > lastRun:
110  runsToRemove.append( run )
111  theLumiList.removeRuns( runsToRemove )
112  splitLumiList = list( self.__chunks(
113  theLumiList.getCMSSWString().split(','), 255 ) )
114  if not len(splitLumiList[0][0]) == 0:
115  lumiSecStr = [ "',\n'".join( lumis ) \
116  for lumis in splitLumiList ]
117  lumiSecStr = [ "lumiSecs.extend( [\n'" + lumis + "'\n] )" \
118  for lumis in lumiSecStr ]
119  lumiSecExtend = "\n".join( lumiSecStr )
120  elif jsonPath:
121  goodLumiSecStr = ( "goodLumiSecs = LumiList.LumiList(filename"
122  "= '%(json)s').getCMSSWString().split(',')\n"
123  "lumiSecs = cms.untracked"
124  ".VLuminosityBlockRange()\n"
125  )
126  lumiStr = " lumisToProcess = lumiSecs,\n"
127  lumiSecExtend = "lumiSecs.extend(goodLumiSecs)\n"
128  if crab:
129  files = ""
130  else:
131  splitFileList = list( self.__chunks( self.fileList(), 255 ) )
132  fileStr = [ "',\n'".join( files ) for files in splitFileList ]
133  fileStr = [ "readFiles.extend( [\n'" + files + "'\n] )" \
134  for files in fileStr ]
135  files = "\n".join( fileStr )
136  theMap = repMap
137  theMap["files"] = files
138  theMap["json"] = jsonPath
139  theMap["lumiStr"] = lumiStr
140  theMap["goodLumiSecStr"] = goodLumiSecStr%( theMap )
141  theMap["lumiSecExtend"] = lumiSecExtend
142  if crab:
143  dataset_snippet = self.__dummy_source_template%( theMap )
144  else:
145  dataset_snippet = self.__source_template%( theMap )
146  return dataset_snippet
def convertTimeToRun
Definition: dataset.py:256
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
tuple __dummy_source_template
Definition: dataset.py:147
def __getRunList
Definition: dataset.py:225
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def dataset.Dataset.__find_ge (   self,
  a,
  x 
)
private

Definition at line 165 of file dataset.py.

Referenced by dataset.Dataset.convertTimeToRun().

166  def __find_ge( self, a, x):
167  'Find leftmost item greater than or equal to x'
168  i = bisect.bisect_left( a, x )
169  if i != len( a ):
170  return i
171  raise ValueError
def dataset.Dataset.__find_lt (   self,
  a,
  x 
)
private

Definition at line 158 of file dataset.py.

Referenced by dataset.Dataset.convertTimeToRun().

159  def __find_lt( self, a, x ):
160  'Find rightmost value less than x'
161  i = bisect.bisect_left( a, x )
162  if i:
163  return i-1
164  raise ValueError
def dataset.Dataset.__getData (   self,
  dasQuery,
  dasLimit = 0 
)
private

Definition at line 172 of file dataset.py.

Referenced by dataset.Dataset.__getDataType(), dataset.Dataset.__getFileInfoList(), and dataset.Dataset.__getRunList().

173  def __getData( self, dasQuery, dasLimit = 0 ):
174  dasData = das_client.get_data( 'https://cmsweb.cern.ch',
175  dasQuery, 0, dasLimit, False )
176  jsondict = json.loads( dasData )
177  # Check, if the DAS query fails
178  if jsondict["status"] != 'ok':
179  msg = "Status not 'ok', but:", jsondict["status"]
180  raise AllInOneError(msg)
181  return jsondict["data"]
def dataset.Dataset.__getDataType (   self)
private

Definition at line 182 of file dataset.py.

References dataset.Dataset.__getData(), and dataset.Dataset.__name.

183  def __getDataType( self ):
184  dasQuery_type = ( 'dataset dataset=%s | grep dataset.datatype,'
185  'dataset.name'%( self.__name ) )
186  data = self.__getData( dasQuery_type )
187  return data[0]["dataset"][0]["datatype"]
def __getDataType
Definition: dataset.py:182
def dataset.Dataset.__getFileInfoList (   self,
  dasLimit 
)
private

Definition at line 188 of file dataset.py.

References dataset.Dataset.__fileInfoList, dataset.Dataset.__getData(), dataset.Dataset.__name, entry.name, genericValidation.GenericValidation.name, TmModule.name, listHistos.plotInfo.name, ora::RecordSpecImpl::Item.name, cond::persistency::GTEditorData.name, FWTGeoRecoGeometry::Info.name, Types._Untracked.name, alignment.Alignment.name, PixelDCSObject< class >::Item.name, HistoDef.name(), fit::RootMinuitCommand.name, ParameterSet.name, DQMRivetClient::LumiOption.name, MagCylinder.name, PFTauMVAInputDiscriminantTranslator::DiscriminantInfo.name, PrintSensitive.name, cond::persistency::GTProxyData.name, ALIFileOut.name(), RHStopTracer::StopPoint.name, CombinedMVAJetTagComputer::Computer.name, DQMRivetClient::ScaleFactorOption.name, runEdmFileComparison.EdmObject.name, SingleObjectCondition.name, EgHLTOfflineSummaryClient::SumHistBinData.name, XMLRBXPedestalsLoader::_loaderBaseConfig.name, PhysicsTools::Source.name, XMLHTRZeroSuppressionLoader::_loaderBaseConfig.name, MyWatcher.name, CSCDCCExaminer::OStream.name, lumi::TriggerInfo.name, edm::PathTimingSummary.name, dirstructure.Weighted.name, DQMGenericClient::EfficOption.name, cond::TimeTypeSpecs.name, edm::PathSummary.name, PrintMaterialBudgetInfo.name, ALIFileIn.name(), perftools::EdmEventSize::BranchRecord.name, PixelEndcapLinkMaker::Item.name, TrajectorySeedProducer::LayerSpec.name, FWTableViewManager::TableEntry.name, PixelBarrelLinkMaker::Item.name, options.ConnectionHLTMenu.name, Mapper::definition< ScannerT >.name, EcalLogicID.name, MEtoEDM< T >::MEtoEDMObject.name, ExpressionHisto< T >.name, LinuxElapsedTime.name, McSelector.name, python.rootplot.utilities.Hist2D.name, SensitiveDetector.name, RecoSelector.name, XMLProcessor::_loaderBaseConfig.name, TreeCrawler.Package.name, Entry.name(), CaloTrkProcessing::Detector.name, HcalForwardLibWriter::FileHandle.name, PrintGeomInfoAction.name, MagGeoBuilderFromDDD::volumeHandle.name, DQMGenericClient::NormOption.name, PrintGeomMatInfo.name, PhysicsTools::Calibration::Variable.name, OpticalObject.name(), DQMGenericClient::CDOption.name, cond::TagInfo_t.name, h4DSegm.name, PhysicsTools::Variable::Value.name, options.HLTProcessOptions.name, EDMtoMEConverter.name, ProcTMVA::Method.name, TreeSaver::Var.name, python.rootplot.tree2hists.Plot.name, PhysicsTools::TreeReader::Value.name, PhysicsTools::TrainProcessor.name, MuonGeometrySanityCheckPoint.name, Measurement.name(), TotemSD.name, PhysicsTools::MVAModuleHelper< Record, Object, Filler >::Value.name, HistoData.name, PhysicsTools::ProcessRegistry< Base_t, CalibBase_t, Parent_t >.name, utils.StatisticalTest.name, PhysicsTools::MVATrainer.name, EfficiencyHandler.name, h2DSegm.name, python.rootplot.utilities.Hist.name, BscSD.name, IntegratedCalibrationBase.name(), DQMNet::WaitObject.name, AlpgenParameterName.name, SiStripMonitorDigi.name, FP420SD.name, public_plots_tools.ColorScheme.name, PhysicsTools::Variable.name, HRes1DHit.name, PhysicsTools::TrainerMonitoring::Object.name, FastTimerService::LuminosityDescription.name, utils.KS.name, utils.Chi2.name, utils_v2.StatisticalTest.name, utils.BinToBin.name, HEff1DHit.name, plotscripts.SawTeethFunction.name, utils_v2.KolmogorovTest.name, dirstructure.Comparison.name, utils_v2.Chi2Test.name, @16027::Id.name, dqm_interfaces.DirID.name, utils.BinToBin1percent.name, FastTimerService::ProcessDescription.name, dataset.Dataset.name(), python.rootplot.utilities.RootFile.name, hTMaxCell.name, HRes2DHit.name, cscdqm::ParHistoDef.name, dqm_interfaces.DirWalkerFile.name, BeautifulSoup.Tag.name, @16023::Id.name, HEff2DHit.name, TiXmlAttribute.name, BeautifulSoup.SoupStrainer.name, HRes4DHit.name, and HEff4DHit.name.

Referenced by dataset.Dataset.fileInfoList().

189  def __getFileInfoList( self, dasLimit ):
190  if self.__fileInfoList:
191  return self.__fileInfoList
192  dasQuery_files = ( 'file dataset=%s | grep file.name, file.nevents, '
193  'file.creation_time, '
194  'file.modification_time'%( self.__name ) )
195  print "Requesting file information for '%s' from DAS..."%( self.__name ),
196  data = self.__getData( dasQuery_files, dasLimit )
197  print "Done."
198  data = [ entry["file"] for entry in data ]
199  if len( data ) == 0:
200  msg = ("No files are available for the dataset '%s'. This can be "
201  "due to a typo or due to a DAS problem. Please check the "
202  "spelling of the dataset and/or retry to run "
203  "'validateAlignments.py'."%( self.name() ))
204  raise AllInOneError( msg )
205  fileInformationList = []
206  for file in data:
207  fileName = file[0]["name"]
208  fileCreationTime = file[0]["creation_time"]
209  for ii in range(3):
210  try:
211  fileNEvents = file[ii]["nevents"]
212  except KeyError:
213  continue
214  break
215  # select only non-empty files
216  if fileNEvents == 0:
217  continue
218  fileDict = { "name": fileName,
219  "creation_time": fileCreationTime,
220  "nevents": fileNEvents
221  }
222  fileInformationList.append( fileDict )
223  fileInformationList.sort( key=lambda info: info["name"] )
224  return fileInformationList
def __getFileInfoList
Definition: dataset.py:188
def dataset.Dataset.__getRunList (   self)
private

Definition at line 225 of file dataset.py.

References dataset.Dataset.__getData(), dataset.Dataset.__name, dataset.Dataset.__runList, and dataset.Dataset.convertTimeToRun().

Referenced by dataset.Dataset.__createSnippet(), dataset.Dataset.convertTimeToRun(), and dataset.Dataset.runList().

226  def __getRunList( self ):
227  if self.__runList:
228  return self.__runList
229  dasQuery_runs = ( 'run dataset=%s | grep run.run_number,'
230  'run.creation_time'%( self.__name ) )
231  print "Requesting run information for '%s' from DAS..."%( self.__name ),
232  data = self.__getData( dasQuery_runs )
233  print "Done."
234  data = [ entry["run"][0] for entry in data ]
235  data.sort( key = lambda run: run["creation_time"] )
236  self.__runList = data
237  return data
def __getRunList
Definition: dataset.py:225
def dataset.Dataset.convertTimeToRun (   self,
  begin = None,
  end = None,
  firstRun = None,
  lastRun = None,
  shortTuple = True 
)

Definition at line 256 of file dataset.py.

References dataset.Dataset.__find_ge(), dataset.Dataset.__find_lt(), dataset.Dataset.__getRunList(), and dataset.Dataset.__name.

Referenced by dataset.Dataset.__createSnippet(), and dataset.Dataset.__getRunList().

257  shortTuple = True ):
258  if ( begin and firstRun ) or ( end and lastRun ):
259  msg = ( "The Usage of "
260  + "'begin' & 'firstRun' " * int( bool( begin and
261  firstRun ) )
262  + "and " * int( bool( ( begin and firstRun ) and
263  ( end and lastRun ) ) )
264  + "'end' & 'lastRun' " * int( bool( end and lastRun ) )
265  + "is ambigous." )
266  raise AllInOneError( msg )
267 
268  runList = [ run["run_number"] for run in self.__getRunList() ]
269  runTimeList = [ run["creation_time"] for run in self.__getRunList() ]
270  if begin:
271  try:
272  runIndex = self.__find_ge( runTimeList, begin )
273  except ValueError:
274  msg = ( "Your 'begin' is after the creation time of the last "
275  "run in the dataset\n'%s'"%( self.__name ) )
276  raise AllInOneError( msg )
277  firstRun = runList[runIndex]
278  begin = None
279  if end:
280  try:
281  runIndex = self.__find_lt( runTimeList, end )
282  except ValueError:
283  msg = ( "Your 'end' is before the creation time of the first "
284  "run in the dataset\n'%s'"%( self.__name ) )
285  raise AllInOneError( msg )
286  lastRun = runList[runIndex]
287  end = None
288  if shortTuple:
289  return firstRun, lastRun
290  else:
291  return begin, end, firstRun, lastRun
def __getRunList
Definition: dataset.py:225
def dataset.Dataset.datasetSnippet (   self,
  jsonPath = None,
  begin = None,
  end = None,
  firstRun = None,
  lastRun = None,
  nEvents = None,
  crab = False 
)

Definition at line 297 of file dataset.py.

References dataset.Dataset.__createSnippet(), dataset.Dataset.__name, dataset.Dataset.__predefined, and dataset.Dataset.dump_cff().

Referenced by dataset.Dataset.dataType().

298  crab = False ):
299  if self.__predefined:
300  return ("process.load(\"Alignment.OfflineValidation.%s_cff\")\n"
301  "process.maxEvents = cms.untracked.PSet(\n"
302  " input = cms.untracked.int32(%s)\n"
303  ")"
304  %( self.__name, nEvents ))
305  theMap = { "process": "process.",
306  "tab": " " * len( "process." ),
307  "nEvents": str( nEvents ),
308  "importCms": ""
309  }
310  datasetSnippet = self.__createSnippet( jsonPath = jsonPath,
311  begin = begin,
312  end = end,
313  firstRun = firstRun,
314  lastRun = lastRun,
315  repMap = theMap,
316  crab = crab )
317  return datasetSnippet
def __createSnippet
Definition: dataset.py:59
def dataset.Dataset.dataType (   self)

Definition at line 292 of file dataset.py.

References dataset.Dataset.__dataType, and dataset.Dataset.datasetSnippet().

293  def dataType( self ):
294  return self.__dataType
def dataset.Dataset.dump_cff (   self,
  outName = None,
  jsonPath = None,
  begin = None,
  end = None,
  firstRun = None,
  lastRun = None 
)

Definition at line 319 of file dataset.py.

References dataset.Dataset.__createSnippet().

Referenced by dataset.Dataset.datasetSnippet().

320  end = None, firstRun = None, lastRun = None ):
321  if outName == None:
322  outName = "Dataset"
323  packageName = os.path.join( "Alignment", "OfflineValidation" )
324  if not os.path.exists( os.path.join(
325  os.environ["CMSSW_BASE"], "src", packageName ) ):
326  msg = ("You try to store the predefined dataset'%s'.\n"
327  "For that you need to check out the package '%s' to your "
328  "private relase area in\n"%( outName, packageName )
329  + os.environ["CMSSW_BASE"] )
330  raise AllInOneError( msg )
331  theMap = { "process": "",
332  "tab": "",
333  "nEvents": str( -1 ),
334  "importCms": "import FWCore.ParameterSet.Config as cms\n" }
335  dataset_cff = self.__createSnippet( jsonPath = jsonPath,
336  begin = begin,
337  end = end,
338  firstRun = firstRun,
339  lastRun = lastRun,
340  repMap = theMap)
341  filePath = os.path.join( os.environ["CMSSW_BASE"], "src", packageName,
342  "python", outName + "_cff.py" )
343  if os.path.exists( filePath ):
344  existMsg = "The predefined dataset '%s' already exists.\n"%( outName )
345  askString = "Do you want to overwrite it? [y/n]\n"
346  inputQuery = existMsg + askString
347  while True:
348  userInput = raw_input( inputQuery ).lower()
349  if userInput == "y":
350  break
351  elif userInput == "n":
352  return
353  else:
354  inputQuery = askString
355  print ( "The predefined dataset '%s' will be stored in the file\n"
356  %( outName )
357  + filePath +
358  "\nFor future use you have to do 'scram b'." )
359  print
360  theFile = open( filePath, "w" )
361  theFile.write( dataset_cff )
362  theFile.close()
363  return
def __createSnippet
Definition: dataset.py:59
def dataset.Dataset.fileInfoList (   self)

Definition at line 372 of file dataset.py.

References dataset.Dataset.__dasLimit, and dataset.Dataset.__getFileInfoList().

Referenced by dataset.Dataset.fileList().

373  def fileInfoList( self ):
374  return self.__getFileInfoList( self.__dasLimit )
def __getFileInfoList
Definition: dataset.py:188
def fileInfoList
Definition: dataset.py:372
def dataset.Dataset.fileList (   self)

Definition at line 364 of file dataset.py.

References dataset.Dataset.__fileList, and dataset.Dataset.fileInfoList().

Referenced by dataset.Dataset.__createSnippet().

365  def fileList( self ):
366  if self.__fileList:
367  return self.__fileList
368  fileList = [ fileInfo["name"] \
369  for fileInfo in self.fileInfoList() ]
370  self.__fileList = fileList
371  return fileList
def fileInfoList
Definition: dataset.py:372
def dataset.Dataset.name (   self)

Definition at line 375 of file dataset.py.

References dataset.Dataset.__name.

Referenced by dataset.Dataset.__getFileInfoList(), cuy.divideElement.__init__(), cuy.plotElement.__init__(), cuy.additionElement.__init__(), cuy.superimposeElement.__init__(), cuy.graphElement.__init__(), and Vispa.Views.PropertyView.Property.valueChanged().

376  def name( self ):
377  return self.__name
def dataset.Dataset.predefined (   self)

Definition at line 378 of file dataset.py.

References dataset.Dataset.__predefined.

379  def predefined( self ):
380  return self.__predefined
def dataset.Dataset.runList (   self)

Definition at line 381 of file dataset.py.

References dataset.Dataset.__getRunList(), and dataset.Dataset.__runList.

382  def runList( self ):
383  if self.__runList:
384  return self.__runList
385  return self.__getRunList()
386 
def __getRunList
Definition: dataset.py:225

Member Data Documentation

dataset.Dataset.__dasLimit
private

Definition at line 46 of file dataset.py.

Referenced by dataset.Dataset.fileInfoList().

dataset.Dataset.__dataType
private

Definition at line 18 of file dataset.py.

Referenced by dataset.Dataset.dataType().

tuple dataset.Dataset.__dummy_source_template
staticprivate
Initial value:
1 = ("%(process)smaxEvents = cms.untracked.PSet( "
2  "input = cms.untracked.int32(%(nEvents)s) )\n"
3  "readFiles = cms.untracked.vstring()\n"
4  "secFiles = cms.untracked.vstring()\n"
5  "%(process)ssource = cms.Source(\"PoolSource\",\n"
6  "%(tab)s secondaryFileNames ="
7  "secFiles,\n"
8  "%(tab)s fileNames = readFiles\n"
9  ")\n"
10  "readFiles.extend(['dummy_File.root'])\n")

Definition at line 147 of file dataset.py.

Referenced by dataset.Dataset.__createSnippet().

dataset.Dataset.__fileInfoList
private

Definition at line 48 of file dataset.py.

Referenced by dataset.Dataset.__getFileInfoList().

dataset.Dataset.__fileList
private

Definition at line 47 of file dataset.py.

Referenced by dataset.Dataset.fileList().

dataset.Dataset.__name
private

Definition at line 15 of file dataset.py.

Referenced by dataset.Dataset.__getDataType(), dataset.Dataset.__getFileInfoList(), dataset.Dataset.__getRunList(), dataset.Dataset.convertTimeToRun(), dataset.Dataset.datasetSnippet(), Config.Process.dumpConfig(), Config.Process.dumpPython(), dataset.Dataset.name(), and Config.Process.name_().

dataset.Dataset.__predefined
private

Definition at line 19 of file dataset.py.

Referenced by dataset.Dataset.datasetSnippet(), and dataset.Dataset.predefined().

dataset.Dataset.__runList
private

Definition at line 49 of file dataset.py.

Referenced by dataset.Dataset.__getRunList(), and dataset.Dataset.runList().