CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
Functions | Variables
models Namespace Reference

Functions

def date_args_to_days
 
def generate
 
def status_full_name
 
def to_timestamp
 

Variables

 authentication
 
 global_tag_object
 
 session
 
 tags_to_use
 

Detailed Description

Using Audrius' models from flask browser.

This file contains models that are used with SQLAlchemy.

Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
      so these will not work in a normal context outside the framework.

Function Documentation

def models.date_args_to_days (   radius)

Definition at line 31 of file models.py.

Referenced by generate().

31 
32 def date_args_to_days(**radius):
33  days = radius.get("days")
34  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
35  days += radius.get("months")*28 if radius.get("months") != None else 0
36  days += radius.get("years")+365 if radius.get("years") != None else 0
37  return days
def date_args_to_days
Definition: models.py:31
def models.generate ( )

Definition at line 38 of file models.py.

References data_formats._dicts_to_orm_objects(), Vispa.Plugins.EdmBrowser.EdmDataAccessor.all(), CustomConfigs.Base(), configurableAnalysis.Candidate, date_args_to_days(), cmsPerfStripChart.dict, alcazmumu_cfi.filter, join(), MessageLogger_cff.limit, data_sources.json_data_node.make(), o2o.tags, data_formats.to_datatables(), to_timestamp(), and ComparisonHelper.zip().

Referenced by data_sources.json_list.as_table(), and condformats_serialization_generate.main().

38 
39 def generate():
40 
41  Base = declarative_base()
42 
43  class GlobalTag(Base):
44  __tablename__ = 'GLOBAL_TAG'
45 
46  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
47 
48  name = Column(String(100), unique=True, nullable=False, primary_key=True)
49  validity = Column(Integer, nullable=False)
50  description = Column(String(4000), nullable=False)
51  release = Column(String(100), nullable=False)
52  insertion_time = Column(DateTime, nullable=False)
53  snapshot_time = Column(DateTime, nullable=False)
54  scenario = Column(String(100))
55  workflow = Column(String(100))
56  type = Column(String(1))
57  tag_map = relationship('GlobalTagMap', backref='global_tag')
58 
59  def __init__(self, dictionary={}, convert_timestamps=True):
60  # assign each entry in a kwargs
61  for key in dictionary:
62  try:
63  if convert_timestamps:
64  self.__dict__[key] = to_timestamp(dictionary[key])
65  else:
66  self.__dict__[key] = dictionary[key]
67  except KeyError as k:
68  continue
69 
70  def __repr__(self):
71  return '<GlobalTag %r>' % self.name
72 
73  def as_dicts(self):
74  json_gt = {
75  'name': self.name,
76  'validity': self.validity,
77  'description': self.description,
78  'release': self.release,
79  'insertion_time': self.insertion_time,
80  'snapshot_time': self.snapshot_time,
81  'scenario': self.scenario,
82  'workflow': self.workflow,
83  'type': self.type
84  }
85  return json_gt
86 
87  def to_array(self):
88  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
89 
90  @staticmethod
91  def to_datatables(global_tags):
92  gt_data = {
93  'headers': ['Global Tag', 'Release', 'Insertion Time', 'Snapshot Time', 'Description'],
94  'data': [ g.to_array() for g in global_tags ],
95  }
96  return gt_data
97 
98  # get all global tags
99  def all(self, amount=10):
100  gts = data_sources.json_data_node.make(self.session.query(GlobalTag).order_by(GlobalTag.name).limit(amount).all())
101  return gts
102 
103  def tags(self, amount=10):
104  """gt_maps = self.session.query(GlobalTagMap).filter(GlobalTagMap.global_tag_name == self.name).limit(amount).subquery()
105  all_tags = self.session.query(gt_maps.c.record, gt_maps.c.label,\
106  Tag.name, Tag.time_type, Tag.object_type,\
107  Tag.synchronization, Tag.end_of_validity, Tag.description,\
108  Tag.last_validated_time, Tag.insertion_time,\
109  Tag.modification_time)\
110  .join(gt_maps, Tag.name == gt_maps.c.tag_name).order_by(Tag.name.asc()).limit(amount).all()"""
111  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)\
112  .filter(GlobalTagMap.global_tag_name == self.name)\
113  .order_by(GlobalTagMap.tag_name).limit(amount).all()
114  column_names = ["global_tag_name", "record", "label", "tag_name"]
115  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
116  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
117  return data_sources.json_data_node.make(all_tags)
118 
119  # inefficient
120  def tags_full(self, amount=10):
121  tags = self.session.query(Tag).order_by(Tag.name).subquery()
122  all_tags = self.session.query(GlobalTagMap.global_tag_name,\
123  GlobalTagMap.record,\
124  GlobalTagMap.label,\
125  tags.c.name, tags.c.time_type, tags.c.object_type,\
126  tags.c.synchronization, tags.c.end_of_validity, tags.c.description,\
127  tags.c.last_validated_time, tags.c.insertion_time,\
128  tags.c.modification_time)\
129  .join(tags, GlobalTagMap.tag_name == tags.c.name).filter(GlobalTagMap.global_tag_name == self.name)
130  if amount != None:
131  all_tags = all_tags.limit(amount)
132  all_tags = all_tags.all()
133  column_names = ["global_tag_name", "record", "label", "name", "time_type", "object_type", "synchronization",\
134  "end_of_validity", "description", "last_validated_time", "insertion_time", "modification_time"]
135  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
136  all_tags = data_formats._dicts_to_orm_objects(Tag, all_tags)
137  return data_sources.json_data_node.make(all_tags)
138 
139  # insertion_time is a datetime.datetime string, radius the time to add on each side
140  # note radius is a list of keyword arguments, and will be passed to datetime.timedelta
141  def insertion_time_interval(self, insertion_time, **radius):
142  # convert all arguments in radius into day scale
143  # may need to change this to add the number of days in each month in the interval
144  days = date_args_to_days(**radius)
145  minus = insertion_time - datetime.timedelta(days=days)
146  plus = insertion_time + datetime.timedelta(days=days)
147  gts = self.session.query(GlobalTag).filter(and_(GlobalTag.insertion_time >= minus, GlobalTag.insertion_time <= plus)).order_by(GlobalTag.name).all()
149 
150  def snapshot_time_interval(self, snapshot_time, **radius):
151  days = date_args_to_days(**radius)
152  minus = snapshot_time - datetime.timedelta(days=days)
153  plus = snapshot_time + datetime.timedelta(days=days)
154  gts = self.session.query(GlobalTag).filter(and_(GlobalTag.snapshot_time >= minus, GlobalTag.snapshot_time <= plus)).order_by(GlobalTag.name).all()
156 
157  # gets all iovs belonging to this global tag with insertion times <= this global tag's snapshot time
158  def iovs(self, amount=10, valid=False):
159  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
160  valid_iovs_all_tags = self.session.query(IOV)
161  if valid:
162  valid_iovs_all_tags = valid_iovs_all_tags.filter(IOV.insertion_time < self.snapshot_time)
163  valid_iovs_all_tags = valid_iovs_all_tags.subquery()
164  valid_iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, valid_iovs_all_tags.c.since,\
165  valid_iovs_all_tags.c.payload_hash, valid_iovs_all_tags.c.insertion_time)\
166  .join(valid_iovs_all_tags, GlobalTagMap.tag_name == valid_iovs_all_tags.c.tag_name)\
167  .filter(GlobalTagMap.global_tag_name == self.name)\
168  .order_by(valid_iovs_all_tags.c.insertion_time).limit(amount).all()
169  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
170  all_iovs = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), valid_iovs_gt_tags)
171  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
172  return data_sources.json_data_node.make(all_iovs)
173 
174  def pending_tag_requests(self):
175  if self.empty:
176  return None
177  # get a json_list of all global_tag_map requests associated with this global tag
178  gt_map_requests = self.session.query(GlobalTagMapRequest.queue, GlobalTagMapRequest.record, GlobalTagMapRequest.label,\
179  GlobalTagMapRequest.tag, GlobalTagMapRequest.status)\
180  .filter(and_(GlobalTagMapRequest.queue == self.name, GlobalTagMapRequest.status.in_(["P", "R"]))).all()
181  #column_names = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
182  column_names = ["queue", "record", "label", "tag", "status"]
183  gt_map_requests = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), gt_map_requests)
184  gt_map_requests = data_formats._dicts_to_orm_objects(GlobalTagMapRequest, gt_map_requests)
185  return data_sources.json_data_node.make(gt_map_requests)
186 
187  # creates and returns a new candidate object
188  def candidate(self, gt_map_requests):
189  if self.empty:
190  return None
191  new_candidate = Candidate(self, gt_map_requests)
192  return new_candidate
193 
194  # not an ORM class, but corresponds to a table
195  class Candidate():
196  global_tag_object = None
197  tags_to_use = None
198  authentication = None
199 
200  def __init__(self, queue, gt_map_requests):
202  self.session = queue.session
203  self.authentication = queue.authentication
204 
205  # verify that queue is in fact a queue
206  if queue.type != "Q":
207  return None
208  else:
209  self.global_tag_object = queue
210 
211  # validate the list of tags - make sure the list of tags contains unique (record, label) pairs
212  found_record_label_pairs = []
213  # whether tags is a list of a json_list, it is iteraexitble
214  for gt_map in gt_map_requests:
215  if (gt_map.record, gt_map.label) in found_record_label_pairs:
216  # reset iterator before we return
217  if gt_map_requests.__class__.__name__ == "json_list":
218  gt_map_requests.reset()
219  return None
220  else:
221  found_record_label_pairs.append((gt_map.record, gt_map.label))
222  # reset iterator
223  if gt_map_requests.__class__.__name__ == "json_list":
224  gt_map_requests.reset()
225 
226  # if we're here, the tags list is valid
227  self.tags_to_use = gt_map_requests
228 
229  # write the candidate to the database, and catch any errors
230  # Note: errors may be thrown if the user does not have write permissions for the database
231  def cut(self):
232  CANDIDATE_TIME_FORMAT = "%Y_%m_%d_%H_%M_%S"
233  TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
234  # send a post request to dbAccess service to write the new candidate
235  candidate_name = self.global_tag_object.name.replace("Queue", "Candidate")
236  candidate_name += "_%s" % datetime.datetime.now().strftime(CANDIDATE_TIME_FORMAT)
237  time_now = datetime.datetime.now().strftime(TIME_FORMAT)
238  candidate_release = self.global_tag_object.release
239  candidate_description = "Candidate created from the queue: '%s' at: '%s'" % (self.global_tag_object.name, time_now)
240 
241  extra_records = data_formats._objects_as_dicts(self.tags_to_use)
242  for record in extra_records:
243  for key in ["submitter_id", "description", "time_submitted", "last_edited"]:
244  del record[key]
245 
246  params = {
247  "c_name" : candidate_name,
248  "snapshot_time" : time_now,
249  "from_gt" : self.global_tag_object.name,
250  "release" : candidate_release,
251  "desc" : candidate_description,
252  "validity" : 18446744073709551615,
253  "extra_records" : json.dumps(extra_records.data())
254  }
255 
256  # send http request to dbAccess
257  # get username and password from netrc
258  credentials = self.authentication.authenticators("dbAccess")
259  print(credentials)
260  #headers = {"user":credentials[0], "password":credentials[2]}
261 
262  auth = base64.encodestring("%s:%s" % (credentials[0], credentials[1])).replace('\n', '')
263  print(auth)
264 
265  params = urllib.urlencode(params)
266  print(params)
267 
268  # send http request to dbAccess once requests library is installed in cmssw
269 
270  class GlobalTagMap(Base):
271  __tablename__ = 'GLOBAL_TAG_MAP'
272 
273  headers = ["global_tag_name", "record", "label", "tag_name"]
274 
275  global_tag_name = Column(String(100), ForeignKey('GLOBAL_TAG.name'), primary_key=True, nullable=False)
276  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
277  label = Column(String(100), primary_key=True, nullable=False)
278  tag_name = Column(String(100), ForeignKey('TAG.name'), nullable=False)
279 
280  def __init__(self, dictionary={}, convert_timestamps=True):
281  # assign each entry in a kwargs
282  for key in dictionary:
283  try:
284  if convert_timestamps:
285  self.__dict__[key] = to_timestamp(dictionary[key])
286  else:
287  self.__dict__[key] = dictionary[key]
288  except KeyError as k:
289  continue
290 
291  def __repr__(self):
292  return '<GlobalTagMap %r>' % self.global_tag_name
293 
294  def as_dicts(self):
295  json_gtm = {
296  "global_tag_name" : str(self.global_tag_name),
297  "record" : str(self.record),
298  "label" : str(self.label),
299  "tag_name" : str(self.tag_name)
300  }
301  return json_gtm
302 
303 
304  class GlobalTagMapRequest(Base):
305  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
306 
307  queue = Column(String(100), primary_key=True, nullable=False)
308  tag = Column(String(100), ForeignKey('TAG.name'), primary_key=True, nullable=False)
309  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
310  label = Column(String(100), primary_key=True, nullable=False)
311  status = Column(String(1), nullable=False)
312  description = Column(String(4000), nullable=False)
313  submitter_id = Column(Integer, nullable=False)
314  time_submitted = Column(DateTime, nullable=False)
315  last_edited = Column(DateTime, nullable=False)
316 
317  def __init__(self, dictionary={}, convert_timestamps=True):
318  # assign each entry in a kwargs
319  for key in dictionary:
320  try:
321  if convert_timestamps:
322  self.__dict__[key] = to_timestamp(dictionary[key])
323  else:
324  self.__dict__[key] = dictionary[key]
325  except KeyError as k:
326  continue
327 
328  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
329 
330  def as_dicts(self):
331  return {
332  "queue" : self.queue,
333  "tag" : self.tag,
334  "record" : self.record,
335  "label" : self.label,
336  "status" : self.status,
337  "description" : self.description,
338  "submitter_id" : self.submitter_id,
339  "time_submitted" : self.time_submitted,
340  "last_edited" : self.last_edited
341  }
342 
343  def __repr__(self):
344  return '<GlobalTagMapRequest %r>' % self.queue
345 
346  def to_array(self):
347  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
348 
349  @staticmethod
350  def to_datatables(requests):
351  user_requests = {
352  'headers': ['Queue', 'Tag', 'Record', 'Label', 'Status', 'Submitted', 'Modified'],
353  'data': [ r.to_array() for r in requests ],
354  }
355  return user_requests
356 
357  class IOV(Base):
358  __tablename__ = 'IOV'
359 
360  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
361 
362  tag_name = Column(String(4000), ForeignKey('TAG.name'), primary_key=True, nullable=False)
363  since = Column(Integer, primary_key=True, nullable=False)
364  payload_hash = Column(String(40), ForeignKey('PAYLOAD.hash'), primary_key=True, nullable=False)
365  insertion_time = Column(DateTime, nullable=False)
366 
367  def __init__(self, dictionary={}, convert_timestamps=True):
368  # assign each entry in a kwargs
369  for key in dictionary:
370  try:
371  if convert_timestamps:
372  self.__dict__[key] = to_timestamp(dictionary[key])
373  else:
374  self.__dict__[key] = dictionary[key]
375  except KeyError as k:
376  continue
377 
378  def as_dicts(self):
379  return {
380  "tag_name" : self.tag_name,
381  "since" : self.since,
382  "payload_hash" : self.payload_hash,
383  "insertion_time" : self.insertion_time
384  }
385 
386  def __repr__(self):
387  return '<IOV %r>' % self.tag_name
388 
389  def to_array(self):
390  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
391 
392  @staticmethod
393  def to_datatables(iovs):
394  iovs_data = {
395  'headers': ['Since', 'Insertion Time', 'Payload'],
396  'data': [ i.to_array() for i in iovs ],
397  }
398  return iovs_data
399 
400  def all(self, amount=10):
401  return data_sources.json_data_node.make(self.session.query(IOV).order_by(IOV.tag_name).limit(amount).all())
402 
403 
404  class Payload(Base):
405  __tablename__ = 'PAYLOAD'
406 
407  headers = ["hash", "object_type", "version", "insertion_time"]
408 
409  hash = Column(String(40), primary_key=True, nullable=False)
410  object_type = Column(String(4000), nullable=False)
411  version = Column(String(4000), nullable=False)
412  insertion_time = Column(DateTime, nullable=False)
413 
414  def __init__(self, dictionary={}, convert_timestamps=True):
415  # assign each entry in a kwargs
416  for key in dictionary:
417  try:
418  if convert_timestamps:
419  self.__dict__[key] = to_timestamp(dictionary[key])
420  else:
421  self.__dict__[key] = dictionary[key]
422  except KeyError as k:
423  continue
424 
425  def as_dicts(self):
426  return {
427  "hash" : self.hash,
428  "object_type" : self.object_type,
429  "version" : self.version,
430  "insertion_time" : self.insertion_time
431  }
432 
433  def __repr__(self):
434  return '<Payload %r>' % self.hash
435 
436  def to_array(self):
437  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
438 
439  @staticmethod
440  def to_datatables(payloads):
441  payloads_data = {
442  'headers': ["Payload", "Object Type", "Version", "Insertion Time"],
443  'data': [ p.to_array() for p in payloads ],
444  }
445  return payloads_data
446 
447  def parent_tags(self):
448  # check if this payload is empty
449  if self.empty:
450  return None
451  else:
452  tag_names = map(lambda entry : entry[0],\
453  self.session.query(IOV.tag_name).filter(IOV.payload_hash == self.hash).all())
454  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).all()
456 
457  def all(self, amount=10):
458  return data_sources.json_data_node.make(self.session.query(Payload).order_by(Payload.hash).limit(amount).all())
459 
460 
461  class Record(Base):
462  __tablename__ = 'RECORDS'
463 
464  headers = ["record", "object", "type"]
465 
466  record = Column(String(100), primary_key=True, nullable=False)
467  object = Column(String(200), nullable=False)
468  type = Column(String(20), nullable=False)
469 
470  def as_dicts(self):
471  return {
472  "record" : self.record,
473  "object" : self.object,
474  "type" : self.type
475  }
476 
477  def __repr__(self):
478  return '<Record %r>' % self.record
479 
480  def to_array(self):
481  return [self.record, self.object]
482 
483  @staticmethod
484  def to_datatables(records):
485  records_data = {
486  'headers': ["Record", "Object"],
487  'data': [ r.to_array() for r in records ],
488  }
489  return records_data
490 
491  def all(self, amount=10):
492  return data_sources.json_data_node.make(self.session.query(Record).order_by(Record.record).limit(amount).all())
493 
494 
495  class RecordReleases(Base):
496  __tablename__ = 'RECORD_RELEASES'
497 
498  record = Column(String(100), ForeignKey('RECORDS.record'), nullable=False)
499  release_cycle = Column(String(100), primary_key=True, nullable=False)
500  release = Column(String(100), nullable=False)
501  release_int = Column(String(100), nullable=False)
502 
503  def as_dicts(self):
504  return {
505  "release_cycle" : self.release_cycle,
506  "release" : self.release,
507  "release_int" : self.release_int
508  }
509 
510  def __repr__(self):
511  return '<RecordReleases %r>' % self.record
512 
513  def to_array(self):
514  return [self.release_cycle, self.release, self.release_int]
515 
516  @staticmethod
517  def to_datatables(recordReleases):
518  record_releases_data = {
519  'headers': ["Release Cycle", "Starting Release", "Starting Release Number"],
520  'data': [ r.to_array() for r in recordReleases ],
521  }
522  return record_releases_data
523 
524 
525  class ParsedReleases(Base):
526  __tablename__ = 'PARSED_RELEASES'
527 
528  release_cycle = Column(String(100), primary_key=True, nullable=False)
529  release = Column(String(100), nullable=False)
530  release_int = Column(String(100), nullable=False)
531 
532  def as_dicts(self):
533  return {
534  "release_cycle" : self.release_cycle,
535  "release" : self.release,
536  "release_int" : self.release_int
537  }
538 
539  def __repr__(self):
540  return '<ParsedReleases %r>' % self.release_cycle
541 
542  def to_array(self):
543  return [self.release_cycle, self.release, self.release_int]
544 
545  @staticmethod
546  def to_datatables(parsedReleases):
547  parsed_releases_data = {
548  'headers': ["Release Cycle", "Starting Release", "Starting Release Number"],
549  'data': [ p.to_array() for p in parsedReleases ],
550  }
551  return parsed_releases_data
552 
553 
554  class Tag(Base):
555  __tablename__ = 'TAG'
556 
557  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
558  "description", "last_validated_time", "insertion_time", "modification_time"]
559 
560  name = Column(String(4000), primary_key=True, nullable=False)
561  time_type = Column(String(4000), nullable=False)
562  object_type = Column(String(4000), nullable=False)
563  synchronization = Column(String(4000), nullable=False)
564  end_of_validity = Column(Integer, nullable=False)
565  description = Column(String(4000), nullable=False)
566  last_validated_time = Column(Integer, nullable=False)
567  insertion_time = Column(DateTime, nullable=False)
568  modification_time = Column(DateTime, nullable=False)
569 
570  record = None
571  label = None
572 
573  def __init__(self, dictionary={}, convert_timestamps=True):
574  # assign each entry in a kwargs
575  for key in dictionary:
576  try:
577  if convert_timestamps:
578  self.__dict__[key] = to_timestamp(dictionary[key])
579  else:
580  self.__dict__[key] = dictionary[key]
581  except KeyError as k:
582  continue
583 
584  def as_dicts(self):
585  return {
586  "name" : self.name,
587  "time_type" : self.time_type,
588  "object_type" : self.object_type,
589  "synchronization" : self.synchronization,
590  "end_of_validity" : self.end_of_validity,
591  "description" : self.description,
592  "last_validated_time" : self.last_validated_time,
593  "insertion_time" : self.insertion_time,
594  "modification_time" : self.modification_time,
595  "record" : self.record,
596  "label" : self.label
597  }
598 
599  def __repr__(self):
600  return '<Tag %r>' % self.name
601 
602  def to_array(self):
603  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
604 
605  @staticmethod
606  def to_datatables(tags):
607  tags_data = {
608  'headers': ["Tag", "Time Type", "Object Type", "Synchronization", "Insertion Time", "Description"],
609  'data': [ t.to_array() for t in tags ],
610  }
611  return tags_data
612 
613  def parent_global_tags(self):
614  if self.empty:
615  return None
616  else:
617  global_tag_names = map(lambda entry : entry[0], self.session.query(GlobalTagMap.global_tag_name).filter(GlobalTagMap.tag_name == self.name).all())
618  if len(global_tag_names) != 0:
619  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).all()
620  else:
621  global_tags = []
622  return data_sources.json_data_node.make(global_tags)
623 
624  def all(self, amount=10):
625  return data_sources.json_data_node.make(self.session.query(Tag).order_by(Tag.name).limit(amount).all())
626 
627  def insertion_time_interval(self, insertion_time, **radius):
628  days = date_args_to_days(**radius)
629  minus = insertion_time - datetime.timedelta(days=days)
630  plus = insertion_time + datetime.timedelta(days=days)
631  tags = self.session.query(Tag).filter(and_(Tag.insertion_time >= minus, Tag.insertion_time <= plus)).order_by(Tag.name).all()
633 
634  def modification_time_interval(self, modification_time, **radius):
635  days = date_args_to_days(**radius)
636  minus = modification_time - datetime.timedelta(days=days)
637  plus = modification_time + datetime.timedelta(days=days)
638  tags = self.session.query(Tag).filter(and_(Tag.modification_time >= minus, Tag.modification_time <= plus)).order_by(Tag.name).all()
640 
641  # Note: setting pretty to true changes the return type of the method
642  def iovs(self, pretty=False):
643  # get iovs in this tag
644  iovs = self.session.query(IOV).filter(IOV.tag_name == self.name).all()
645  if pretty:
646  iovs = data_formats._objects_to_dicts(iovs).data()
647  for n in range(0, len(iovs)):
648  iovs[n]["since"] = "{:>6}".format(str(iovs[n]["since"])) + " - " + ("{:<6}".format(str(iovs[n+1]["since"]-1)) if n != len(iovs)-1 else "")
649 
651 
652  return {"globaltag" : GlobalTag, "candidate" : Candidate, "globaltagmap" : GlobalTagMap, "globaltagmaprequest" : GlobalTagMapRequest, "iov" : IOV,\
"payload" : Payload, "record" : Record, "recordreleases" : RecordReleases, "parsedreleases" : ParsedReleases, "tag" : Tag, "Base" : Base}
std::string print(const Track &, edm::Verbosity=edm::Concise)
Track print utility.
Definition: print.cc:10
def to_timestamp
Definition: models.py:20
def _dicts_to_orm_objects
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def _objects_to_dicts
Definition: data_formats.py:96
def generate
Definition: models.py:38
tuple tags
Definition: o2o.py:248
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def date_args_to_days
Definition: models.py:31
def status_full_name
Definition: models.py:23
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:82
def models.status_full_name (   status)

Definition at line 23 of file models.py.

23 
24 def status_full_name(status):
25  full_status = {
26  'P': 'Pending',
27  'R': 'Rejected',
28  'A': 'Accepted'
29  }
30  return full_status[status]
def status_full_name
Definition: models.py:23
def models.to_timestamp (   obj)

Definition at line 20 of file models.py.

Referenced by generate().

20 
21 def to_timestamp(obj):
22  return obj.strftime('%Y-%m-%d %H:%M:%S,%f') if isinstance(obj, datetime.datetime) else obj
def to_timestamp
Definition: models.py:20

Variable Documentation

models.authentication

Definition at line 202 of file models.py.

models.global_tag_object

Definition at line 208 of file models.py.

models.session

Definition at line 201 of file models.py.

Referenced by LumiProducer.beginRun(), ora::SchemaUtils.cleanUp(), RPCDBCom.connect(), TestBase.connect(), ora::ConnectionPool.connect(), popcon::L1RPCHwConfigSourceHandler.ConnectOnlineDB(), popcon::RPCEMapSourceHandler.ConnectOnlineDB(), RPCFw.createFEB(), RPCFw.createGAS(), RPCFw.createIDMAP(), RPCFw.createIMON(), RPCFw.createMix(), cond::CredentialStore.createSchema(), RPCFw.createSTATUS(), RPCFw.createT(), RPCFw.createUXC(), RPCFw.createVMON(), lumi::service::DBService.disconnect(), popcon::L1RPCHwConfigSourceHandler.DisconnectOnlineDB(), popcon::RPCEMapSourceHandler.DisconnectOnlineDB(), cond::CredentialStore.drop(), L1TriggerScalerRead.dropTable(), cond::CredentialStore.exportAll(), DIPLumiProducer.filldetailcache(), ExpressLumiProducer.fillLSCache(), LumiProducer.fillLSCache(), LumiCorrectionSource.fillparamcache(), DIPLumiProducer.fillsummarycache(), CondDBESSource.fillTagCollectionFromGT(), FillInfoPopConSourceHandler.getNewObjects(), cond::CredentialStore.importForPrincipal(), cond::CredentialStore.installAdmin(), cond::CredentialStore.listConnections(), cond::CredentialStore.listPrincipals(), l1t::DataWriter.payloadToken(), l1t::DataWriterExt.payloadToken(), L1RCTOmdsFedVectorProducer.produce(), RunInfoRead.readData(), DQMSummaryReader.readData(), RunSummaryRead.readData(), L1TriggerScalerRead.readData(), popcon::RPCEMapSourceHandler.readEMap1(), popcon::L1RPCHwConfigSourceHandler.readHwConfig1(), l1t::DataWriterExt.readObject(), l1t::DataWriter.readObject(), cond::CredentialStore.removeConnection(), cond::CredentialStore.removePrincipal(), lumi::CMSRunSummaryDummy2DB.retrieveData(), lumi::HLTDummy2DB.retrieveData(), lumi::HLTConfDummy2DB.retrieveData(), lumi::TRGDummy2DB.retrieveData(), lumi::LumiDummy2DB.retrieveData(), lumi::Lumi2DB.retrieveData(), l1t::WriterProxyT< Record, Type >.save(), cond::CredentialStore.selectForUser(), cond::CredentialStore.selectPermissions(), cond::CredentialStore.setPermission(), cond::CredentialStore.unsetPermission(), cond::CredentialStore.updateConnection(), cond::CredentialStore.updatePrincipal(), l1t::DataWriterExt.writeKeyList(), and l1t::DataWriter.writeKeyList().

models.tags_to_use

Definition at line 226 of file models.py.