test
CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
MatrixInjector.py
Go to the documentation of this file.
1 import sys
2 import json
3 import os
4 import copy
5 import multiprocessing
6 
8  if opt.show:
9  print 'Not injecting to wmagent in --show mode. Need to run the worklfows.'
10  sys.exit(-1)
11  if opt.wmcontrol=='init':
12  #init means it'll be in test mode
13  opt.nProcs=0
14  if opt.wmcontrol=='test':
15  #means the wf were created already, and we just dryRun it.
16  opt.dryRun=True
17  if opt.wmcontrol=='submit' and opt.nProcs==0:
18  print 'Not injecting to wmagent in -j 0 mode. Need to run the worklfows.'
19  sys.exit(-1)
20  if opt.wmcontrol=='force':
21  print "This is an expert setting, you'd better know what you're doing"
22  opt.dryRun=True
23 
24 def upload_to_couch_oneArg(arguments):
25  from modules.wma import upload_to_couch
26  (filePath,labelInCouch,user,group,where) = arguments
27  cacheId=upload_to_couch(filePath,
28  labelInCouch,
29  user,
30  group,
31  test_mode=False,
32  url=where)
33  return cacheId
34 
35 
36 class MatrixInjector(object):
37 
38  def __init__(self,opt,mode='init',options=''):
39  self.count=1040
40 
41  self.dqmgui=None
42  self.wmagent=None
43  for k in options.split(','):
44  if k.startswith('dqm:'):
45  self.dqmgui=k.split(':',1)[-1]
46  elif k.startswith('wma:'):
47  self.wmagent=k.split(':',1)[-1]
48 
49  self.testMode=((mode!='submit') and (mode!='force'))
50  self.version =1
51  self.keep = opt.keep
52  self.memoryOffset = opt.memoryOffset
53  self.memPerCore = opt.memPerCore
54 
55  #wagemt stuff
56  if not self.wmagent:
57  self.wmagent=os.getenv('WMAGENT_REQMGR')
58  if not self.wmagent:
59  if not opt.testbed :
60  self.wmagent = 'cmsweb.cern.ch'
61  self.DbsUrl = "https://"+self.wmagent+"/dbs/prod/global/DBSReader"
62  else :
63  self.wmagent = 'cmsweb-testbed.cern.ch'
64  self.DbsUrl = "https://"+self.wmagent+"/dbs/int/global/DBSReader"
65 
66  if not self.dqmgui:
67  self.dqmgui="https://cmsweb.cern.ch/dqm/relval"
68  #couch stuff
69  self.couch = 'https://'+self.wmagent+'/couchdb'
70 # self.couchDB = 'reqmgr_config_cache'
71  self.couchCache={} # so that we do not upload like crazy, and recyle cfgs
72  self.user = os.getenv('USER')
73  self.group = 'ppd'
74  self.label = 'RelValSet_'+os.getenv('CMSSW_VERSION').replace('-','')+'_v'+str(self.version)
75  self.speciallabel=''
76  if opt.label:
77  self.speciallabel= '_'+opt.label
78 
79 
80  if not os.getenv('WMCORE_ROOT'):
81  print '\n\twmclient is not setup properly. Will not be able to upload or submit requests.\n'
82  if not self.testMode:
83  print '\n\t QUIT\n'
84  sys.exit(-18)
85  else:
86  print '\n\tFound wmclient\n'
87 
88  self.defaultChain={
89  "RequestType" : "TaskChain", #this is how we handle relvals
90  "SubRequestType" : "RelVal", #this is how we handle relvals, now that TaskChain is also used for central MC production
91  "RequestPriority": 500000,
92  "Requestor": self.user, #Person responsible
93  "Group": self.group, #group for the request
94  "CMSSWVersion": os.getenv('CMSSW_VERSION'), #CMSSW Version (used for all tasks in chain)
95  "Campaign": os.getenv('CMSSW_VERSION'), # = AcquisitionEra, will be reset later to the one of first task, will both be the CMSSW_VERSION
96  "ScramArch": os.getenv('SCRAM_ARCH'), #Scram Arch (used for all tasks in chain)
97  "ProcessingVersion": self.version, #Processing Version (used for all tasks in chain)
98  "GlobalTag": None, #Global Tag (overridden per task)
99  "CouchURL": self.couch, #URL of CouchDB containing Config Cache
100  "ConfigCacheURL": self.couch, #URL of CouchDB containing Config Cache
101  "DbsUrl": self.DbsUrl,
102  #- Will contain all configs for all Tasks
103  #"SiteWhitelist" : ["T2_CH_CERN", "T1_US_FNAL"], #Site whitelist
104  "TaskChain" : None, #Define number of tasks in chain.
105  "nowmTasklist" : [], #a list of tasks as we put them in
106  "unmergedLFNBase" : "/store/unmerged",
107  "mergedLFNBase" : "/store/relval",
108  "dashboardActivity" : "relval",
109  "Multicore" : 1, # do not set multicore for the whole chain
110  "Memory" : 3000,
111  "SizePerEvent" : 1234,
112  "TimePerEvent" : 0.1
113  }
114 
116  "EnableHarvesting" : "True",
117  "DQMUploadUrl" : self.dqmgui,
118  "DQMConfigCacheID" : None,
119  "Multicore" : 1 # hardcode Multicore to be 1 for Harvest
120  }
121 
123  "TaskName" : None, #Task Name
124  "ConfigCacheID" : None, #Generator Config id
125  "GlobalTag": None,
126  "SplittingAlgo" : "EventBased", #Splitting Algorithm
127  "EventsPerJob" : None, #Size of jobs in terms of splitting algorithm
128  "RequestNumEvents" : None, #Total number of events to generate
129  "Seeding" : "AutomaticSeeding", #Random seeding method
130  "PrimaryDataset" : None, #Primary Dataset to be created
131  "nowmIO": {},
132  "Multicore" : opt.nThreads, # this is the per-taskchain Multicore; it's the default assigned to a task if it has no value specified
133  "KeepOutput" : False
134  }
136  "TaskName" : "DigiHLT", #Task Name
137  "ConfigCacheID" : None, #Processing Config id
138  "GlobalTag": None,
139  "InputDataset" : None, #Input Dataset to be processed
140  "SplittingAlgo" : "LumiBased", #Splitting Algorithm
141  "LumisPerJob" : 10, #Size of jobs in terms of splitting algorithm
142  "nowmIO": {},
143  "Multicore" : opt.nThreads, # this is the per-taskchain Multicore; it's the default assigned to a task if it has no value specified
144  "KeepOutput" : False
145  }
146  self.defaultTask={
147  "TaskName" : None, #Task Name
148  "InputTask" : None, #Input Task Name (Task Name field of a previous Task entry)
149  "InputFromOutputModule" : None, #OutputModule name in the input task that will provide files to process
150  "ConfigCacheID" : None, #Processing Config id
151  "GlobalTag": None,
152  "SplittingAlgo" : "LumiBased", #Splitting Algorithm
153  "LumisPerJob" : 10, #Size of jobs in terms of splitting algorithm
154  "nowmIO": {},
155  "Multicore" : opt.nThreads, # this is the per-taskchain Multicore; it's the default assigned to a task if it has no value specified
156  "KeepOutput" : False
157  }
158 
159  self.chainDicts={}
160 
161 
162  def prepare(self,mReader, directories, mode='init'):
163  try:
164  #from Configuration.PyReleaseValidation.relval_steps import wmsplit
165  wmsplit = {}
166  wmsplit['DIGIHI']=5
167  wmsplit['RECOHI']=5
168  wmsplit['HLTD']=5
169  wmsplit['RECODreHLT']=2
170  wmsplit['DIGIPU']=4
171  wmsplit['DIGIPU1']=4
172  wmsplit['RECOPU1']=1
173  wmsplit['DIGIUP15_PU50']=1
174  wmsplit['RECOUP15_PU50']=1
175  wmsplit['DIGIUP15_PU25']=1
176  wmsplit['RECOUP15_PU25']=1
177  wmsplit['DIGIUP15_PU25HS']=1
178  wmsplit['RECOUP15_PU25HS']=1
179  wmsplit['DIGIHIMIX']=5
180  wmsplit['RECOHIMIX']=5
181  wmsplit['RECODSplit']=1
182  wmsplit['SingleMuPt10_UP15_ID']=1
183  wmsplit['DIGIUP15_ID']=1
184  wmsplit['RECOUP15_ID']=1
185  wmsplit['TTbar_13_ID']=1
186  wmsplit['SingleMuPt10FS_ID']=1
187  wmsplit['TTbarFS_ID']=1
188  wmsplit['RECODR2_50nsreHLT']=5
189  wmsplit['RECODR2_25nsreHLT']=5
190  wmsplit['RECODR2_2016reHLT']=5
191  wmsplit['RECODR2_50nsreHLT_HIPM']=5
192  wmsplit['RECODR2_25nsreHLT_HIPM']=5
193  wmsplit['RECODR2_2016reHLT_HIPM']=5
194  wmsplit['RECODR2_2016reHLT_skimSingleMu']=5
195  wmsplit['RECODR2_2016reHLT_skimDoubleEG']=5
196  wmsplit['RECODR2_2016reHLT_skimMuonEG']=5
197  wmsplit['RECODR2_2016reHLT_skimJetHT']=5
198  wmsplit['RECODR2_2016reHLT_skimMET']=5
199  wmsplit['RECODR2_2016reHLT_skimSinglePh']=5
200  wmsplit['RECODR2_2016reHLT_skimMuOnia']=5
201  wmsplit['RECODR2_2016reHLT_skimSingleMu_HIPM']=5
202  wmsplit['RECODR2_2016reHLT_skimDoubleEG_HIPM']=5
203  wmsplit['RECODR2_2016reHLT_skimMuonEG_HIPM']=5
204  wmsplit['RECODR2_2016reHLT_skimJetHT_HIPM']=5
205  wmsplit['RECODR2_2016reHLT_skimMET_HIPM']=5
206  wmsplit['RECODR2_2016reHLT_skimSinglePh_HIPM']=5
207  wmsplit['RECODR2_2016reHLT_skimMuOnia_HIPM']=5
208  wmsplit['HLTDR2_50ns']=1
209  wmsplit['HLTDR2_25ns']=1
210  wmsplit['HLTDR2_2016']=1
211  wmsplit['Hadronizer']=1
212  wmsplit['DIGIUP15']=1
213  wmsplit['RECOUP15']=1
214  wmsplit['RECOAODUP15']=5
215  wmsplit['DBLMINIAODMCUP15NODQM']=5
216  wmsplit['DigiFull']=5
217  wmsplit['RecoFull']=5
218  wmsplit['DigiFullPU']=1
219  wmsplit['RecoFullPU']=1
220 
221 
222  #import pprint
223  #pprint.pprint(wmsplit)
224  except:
225  print "Not set up for step splitting"
226  wmsplit={}
227 
228  acqEra=False
229  for (n,dir) in directories.items():
230  chainDict=copy.deepcopy(self.defaultChain)
231  print "inspecting",dir
232  nextHasDSInput=None
233  for (x,s) in mReader.workFlowSteps.items():
234  #x has the format (num, prefix)
235  #s has the format (num, name, commands, stepList)
236  if x[0]==n:
237  #print "found",n,s[3]
238  #chainDict['RequestString']='RV'+chainDict['CMSSWVersion']+s[1].split('+')[0]
239  index=0
240  splitForThisWf=None
241  thisLabel=self.speciallabel
242  #if 'HARVESTGEN' in s[3]:
243  if len( [step for step in s[3] if "HARVESTGEN" in step] )>0:
244  chainDict['TimePerEvent']=0.01
245  thisLabel=thisLabel+"_gen"
246  # for double miniAOD test
247  if len( [step for step in s[3] if "DBLMINIAODMCUP15NODQM" in step] )>0:
248  thisLabel=thisLabel+"_dblMiniAOD"
249  processStrPrefix=''
250  setPrimaryDs=None
251  for step in s[3]:
252 
253  if 'INPUT' in step or (not isinstance(s[2][index],str)):
254  nextHasDSInput=s[2][index]
255 
256  else:
257 
258  if (index==0):
259  #first step and not input -> gen part
260  chainDict['nowmTasklist'].append(copy.deepcopy(self.defaultScratch))
261  try:
262  chainDict['nowmTasklist'][-1]['nowmIO']=json.loads(open('%s/%s.io'%(dir,step)).read())
263  except:
264  print "Failed to find",'%s/%s.io'%(dir,step),".The workflows were probably not run on cfg not created"
265  return -15
266 
267  chainDict['nowmTasklist'][-1]['PrimaryDataset']='RelVal'+s[1].split('+')[0]
268  if not '--relval' in s[2][index]:
269  print 'Impossible to create task from scratch without splitting information with --relval'
270  return -12
271  else:
272  arg=s[2][index].split()
273  ns=map(int,arg[arg.index('--relval')+1].split(','))
274  chainDict['nowmTasklist'][-1]['RequestNumEvents'] = ns[0]
275  chainDict['nowmTasklist'][-1]['EventsPerJob'] = ns[1]
276  if 'FASTSIM' in s[2][index] or '--fast' in s[2][index]:
277  thisLabel+='_FastSim'
278  if 'lhe' in s[2][index] in s[2][index]:
279  chainDict['nowmTasklist'][-1]['LheInputFiles'] =True
280 
281  elif nextHasDSInput:
282  chainDict['nowmTasklist'].append(copy.deepcopy(self.defaultInput))
283  try:
284  chainDict['nowmTasklist'][-1]['nowmIO']=json.loads(open('%s/%s.io'%(dir,step)).read())
285  except:
286  print "Failed to find",'%s/%s.io'%(dir,step),".The workflows were probably not run on cfg not created"
287  return -15
288  chainDict['nowmTasklist'][-1]['InputDataset']=nextHasDSInput.dataSet
289  splitForThisWf=nextHasDSInput.split
290  chainDict['nowmTasklist'][-1]['LumisPerJob']=splitForThisWf
291  if step in wmsplit:
292  chainDict['nowmTasklist'][-1]['LumisPerJob']=wmsplit[step]
293  # get the run numbers or #events
294  if len(nextHasDSInput.run):
295  chainDict['nowmTasklist'][-1]['RunWhitelist']=nextHasDSInput.run
296  if len(nextHasDSInput.ls):
297  chainDict['nowmTasklist'][-1]['LumiList']=nextHasDSInput.ls
298  #print "what is s",s[2][index]
299  if '--data' in s[2][index] and nextHasDSInput.label:
300  thisLabel+='_RelVal_%s'%nextHasDSInput.label
301  if 'filter' in chainDict['nowmTasklist'][-1]['nowmIO']:
302  print "This has an input DS and a filter sequence: very likely to be the PyQuen sample"
303  processStrPrefix='PU_'
304  setPrimaryDs = 'RelVal'+s[1].split('+')[0]
305  if setPrimaryDs:
306  chainDict['nowmTasklist'][-1]['PrimaryDataset']=setPrimaryDs
307  nextHasDSInput=None
308  else:
309  #not first step and no inputDS
310  chainDict['nowmTasklist'].append(copy.deepcopy(self.defaultTask))
311  try:
312  chainDict['nowmTasklist'][-1]['nowmIO']=json.loads(open('%s/%s.io'%(dir,step)).read())
313  except:
314  print "Failed to find",'%s/%s.io'%(dir,step),".The workflows were probably not run on cfg not created"
315  return -15
316  if splitForThisWf:
317  chainDict['nowmTasklist'][-1]['LumisPerJob']=splitForThisWf
318  if step in wmsplit:
319  chainDict['nowmTasklist'][-1]['LumisPerJob']=wmsplit[step]
320 
321  # change LumisPerJob for Hadronizer steps.
322  if 'Hadronizer' in step:
323  chainDict['nowmTasklist'][-1]['LumisPerJob']=wmsplit['Hadronizer']
324 
325  #print step
326  chainDict['nowmTasklist'][-1]['TaskName']=step
327  if setPrimaryDs:
328  chainDict['nowmTasklist'][-1]['PrimaryDataset']=setPrimaryDs
329  chainDict['nowmTasklist'][-1]['ConfigCacheID']='%s/%s.py'%(dir,step)
330  chainDict['nowmTasklist'][-1]['GlobalTag']=chainDict['nowmTasklist'][-1]['nowmIO']['GT'] # copy to the proper parameter name
331  chainDict['GlobalTag']=chainDict['nowmTasklist'][-1]['nowmIO']['GT'] #set in general to the last one of the chain
332  if 'pileup' in chainDict['nowmTasklist'][-1]['nowmIO']:
333  chainDict['nowmTasklist'][-1]['MCPileup']=chainDict['nowmTasklist'][-1]['nowmIO']['pileup']
334  if '--pileup ' in s[2][index]: # catch --pileup (scenarion) and not --pileup_ (dataset to be mixed) => works also making PRE-MIXed dataset
335  processStrPrefix='PU_' # take care of pu overlay done with GEN-SIM mixing
336  if ( s[2][index].split()[ s[2][index].split().index('--pileup')+1 ] ).find('25ns') > 0 :
337  processStrPrefix='PU25ns_'
338  elif ( s[2][index].split()[ s[2][index].split().index('--pileup')+1 ] ).find('50ns') > 0 :
339  processStrPrefix='PU50ns_'
340  if 'DIGIPREMIX_S2' in s[2][index] : # take care of pu overlay done with DIGI mixing of premixed events
341  if s[2][index].split()[ s[2][index].split().index('--pileup_input')+1 ].find('25ns') > 0 :
342  processStrPrefix='PUpmx25ns_'
343  elif s[2][index].split()[ s[2][index].split().index('--pileup_input')+1 ].find('50ns') > 0 :
344  processStrPrefix='PUpmx50ns_'
345 
346  if acqEra:
347  #chainDict['AcquisitionEra'][step]=(chainDict['CMSSWVersion']+'-PU_'+chainDict['nowmTasklist'][-1]['GlobalTag']).replace('::All','')+thisLabel
348  chainDict['AcquisitionEra'][step]=chainDict['CMSSWVersion']
349  chainDict['ProcessingString'][step]=processStrPrefix+chainDict['nowmTasklist'][-1]['GlobalTag'].replace('::All','')+thisLabel
350  else:
351  #chainDict['nowmTasklist'][-1]['AcquisitionEra']=(chainDict['CMSSWVersion']+'-PU_'+chainDict['nowmTasklist'][-1]['GlobalTag']).replace('::All','')+thisLabel
352  chainDict['nowmTasklist'][-1]['AcquisitionEra']=chainDict['CMSSWVersion']
353  chainDict['nowmTasklist'][-1]['ProcessingString']=processStrPrefix+chainDict['nowmTasklist'][-1]['GlobalTag'].replace('::All','')+thisLabel
354 
355  # specify different ProcessingString for double miniAOD dataset
356  if ('DBLMINIAODMCUP15NODQM' in step):
357  chainDict['nowmTasklist'][-1]['ProcessingString']=chainDict['nowmTasklist'][-1]['ProcessingString']+'_miniAOD'
358 
359  if( chainDict['nowmTasklist'][-1]['Multicore'] ):
360  # the scaling factor of 1.2GB / thread is empirical and measured on a SECOND round of tests with PU samples
361  # the number of threads is NO LONGER assumed to be the same for all tasks
362  # https://hypernews.cern.ch/HyperNews/CMS/get/edmFramework/3509/1/1/1.html
363  # now change to 1.5GB / additional thread according to discussion:
364  # https://hypernews.cern.ch/HyperNews/CMS/get/relval/4817/1/1.html
365 # chainDict['nowmTasklist'][-1]['Memory'] = 3000 + int( chainDict['nowmTasklist'][-1]['Multicore'] -1 )*1500
366  chainDict['nowmTasklist'][-1]['Memory'] = self.memoryOffset + int( chainDict['nowmTasklist'][-1]['Multicore'] -1 ) * self.memPerCore
367 
368  index+=1
369  #end of loop through steps
370  chainDict['RequestString']='RV'+chainDict['CMSSWVersion']+s[1].split('+')[0]
371  if processStrPrefix or thisLabel:
372  chainDict['RequestString']+='_'+processStrPrefix+thisLabel
373 
374 
375 
376  #wrap up for this one
377  import pprint
378  #print 'wrapping up'
379  #pprint.pprint(chainDict)
380  #loop on the task list
381  for i_second in reversed(range(len(chainDict['nowmTasklist']))):
382  t_second=chainDict['nowmTasklist'][i_second]
383  #print "t_second taskname", t_second['TaskName']
384  if 'primary' in t_second['nowmIO']:
385  #print t_second['nowmIO']['primary']
386  primary=t_second['nowmIO']['primary'][0].replace('file:','')
387  for i_input in reversed(range(0,i_second)):
388  t_input=chainDict['nowmTasklist'][i_input]
389  for (om,o) in t_input['nowmIO'].items():
390  if primary in o:
391  #print "found",primary,"procuced by",om,"of",t_input['TaskName']
392  t_second['InputTask'] = t_input['TaskName']
393  t_second['InputFromOutputModule'] = om
394  #print 't_second',pprint.pformat(t_second)
395  if t_second['TaskName'].startswith('HARVEST'):
396  chainDict.update(copy.deepcopy(self.defaultHarvest))
397  chainDict['DQMConfigCacheID']=t_second['ConfigCacheID']
398  ## the info are not in the task specific dict but in the general dict
399  #t_input.update(copy.deepcopy(self.defaultHarvest))
400  #t_input['DQMConfigCacheID']=t_second['ConfigCacheID']
401  break
402 
403  # agreed changes for wm injection:
404  # - Campaign: *optional* string during creation. It will default to AcqEra value if possible.
405  # Otherwise it will be empty.
406  # - AcquisitionEra: *mandatory* string at request level during creation. *optional* string
407  # at task level during creation. "optional" during assignment.
408  # - ProcessingString: *mandatory* string at request level during creation. *optional* string
409  # at task level during creation. "optional" during assignment.
410  # - ProcessingVersion: *optional* during creation (default 1). *optional* during assignment.
411  #
412  # Which requires following changes here:
413  # - reset Global AcuisitionEra, ProcessingString to be the one in the first task
414  # - and also Campaign to be always the same as the AcquisitionEra
415 
416  if acqEra:
417  chainDict['AcquisitionEra'] = chainDict['AcquisitionEra'].values()[0]
418  chainDict['ProcessingString'] = chainDict['ProcessingString'].values()[0]
419  else:
420  chainDict['AcquisitionEra'] = chainDict['nowmTasklist'][0]['AcquisitionEra']
421  chainDict['ProcessingString'] = chainDict['nowmTasklist'][0]['ProcessingString']
422 
423  chainDict['Campaign'] = chainDict['AcquisitionEra']
424 
425  ## clean things up now
426  itask=0
427  if self.keep:
428  for i in self.keep:
429  if type(i)==int and i < len(chainDict['nowmTasklist']):
430  chainDict['nowmTasklist'][i]['KeepOutput']=True
431  for (i,t) in enumerate(chainDict['nowmTasklist']):
432  if t['TaskName'].startswith('HARVEST'):
433  continue
434  if not self.keep:
435  t['KeepOutput']=True
436  elif t['TaskName'] in self.keep:
437  t['KeepOutput']=True
438  t.pop('nowmIO')
439  itask+=1
440  chainDict['Task%d'%(itask)]=t
441 
442 
443  ##
444 
445 
446  ## provide the number of tasks
447  chainDict['TaskChain']=itask#len(chainDict['nowmTasklist'])
448 
449  chainDict.pop('nowmTasklist')
450  self.chainDicts[n]=chainDict
451 
452 
453  return 0
454 
455  def uploadConf(self,filePath,label,where):
456  labelInCouch=self.label+'_'+label
457  cacheName=filePath.split('/')[-1]
458  if self.testMode:
459  self.count+=1
460  print '\tFake upload of',filePath,'to couch with label',labelInCouch
461  return self.count
462  else:
463  try:
464  from modules.wma import upload_to_couch,DATABASE_NAME
465  except:
466  print '\n\tUnable to find wmcontrol modules. Please include it in your python path\n'
467  print '\n\t QUIT\n'
468  sys.exit(-16)
469 
470  if cacheName in self.couchCache:
471  print "Not re-uploading",filePath,"to",where,"for",label
472  cacheId=self.couchCache[cacheName]
473  else:
474  print "Loading",filePath,"to",where,"for",label
475  ## totally fork the upload to couch to prevent cross loading of process configurations
476  pool = multiprocessing.Pool(1)
477  cacheIds = pool.map( upload_to_couch_oneArg, [(filePath,labelInCouch,self.user,self.group,where)] )
478  cacheId = cacheIds[0]
479  self.couchCache[cacheName]=cacheId
480  return cacheId
481 
482  def upload(self):
483  for (n,d) in self.chainDicts.items():
484  for it in d:
485  if it.startswith("Task") and it!='TaskChain':
486  #upload
487  couchID=self.uploadConf(d[it]['ConfigCacheID'],
488  str(n)+d[it]['TaskName'],
489  d['CouchURL']
490  )
491  print d[it]['ConfigCacheID']," uploaded to couchDB for",str(n),"with ID",couchID
492  d[it]['ConfigCacheID']=couchID
493  if it =='DQMConfigCacheID':
494  couchID=self.uploadConf(d['DQMConfigCacheID'],
495  str(n)+'harvesting',
496  d['CouchURL']
497  )
498  print d['DQMConfigCacheID'],"uploaded to couchDB for",str(n),"with ID",couchID
499  d['DQMConfigCacheID']=couchID
500 
501 
502  def submit(self):
503  try:
504  from modules.wma import makeRequest,approveRequest
505  from wmcontrol import random_sleep
506  print '\n\tFound wmcontrol\n'
507  except:
508  print '\n\tUnable to find wmcontrol modules. Please include it in your python path\n'
509  if not self.testMode:
510  print '\n\t QUIT\n'
511  sys.exit(-17)
512 
513  import pprint
514  for (n,d) in self.chainDicts.items():
515  if self.testMode:
516  print "Only viewing request",n
517  print pprint.pprint(d)
518  else:
519  #submit to wmagent each dict
520  print "For eyes before submitting",n
521  print pprint.pprint(d)
522  print "Submitting",n,"..........."
523  workFlow=makeRequest(self.wmagent,d,encodeDict=True)
524  approveRequest(self.wmagent,workFlow)
525  print "...........",n,"submitted"
526  random_sleep()
527 
528 
529 
boost::dynamic_bitset append(const boost::dynamic_bitset<> &bs1, const boost::dynamic_bitset<> &bs2)
this method takes two bitsets bs1 and bs2 and returns result of bs2 appended to the end of bs1 ...
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:7
def performInjectionOptionTest
if(dp >Float(M_PI)) dp-
def upload_to_couch_oneArg
double split
Definition: MVATrainer.cc:139