CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
models.py
Go to the documentation of this file.
1 """
2 
3 Using Audrius' models from flask browser.
4 
5 This file contains models that are used with SQLAlchemy.
6 
7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
8  so these will not work in a normal context outside the framework.
9 
10 """
11 import json
12 import datetime
13 from sqlalchemy.orm import relationship, backref
14 from sqlalchemy.ext.declarative import declarative_base
15 from sqlalchemy import Column, String, Integer, DateTime, ForeignKey, and_
16 import data_sources, data_formats
17 import urllib, urllib2, base64
18 from copy import deepcopy
19 
20 def to_timestamp(obj):
21  return obj.strftime('%Y-%m-%d %H:%M:%S,%f') if isinstance(obj, datetime.datetime) else obj
22 
23 def status_full_name(status):
24  full_status = {
25  'P': 'Pending',
26  'R': 'Rejected',
27  'A': 'Accepted'
28  }
29  return full_status[status]
30 
31 def date_args_to_days(**radius):
32  days = radius.get("days")
33  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
34  days += radius.get("months")*28 if radius.get("months") != None else 0
35  days += radius.get("years")+365 if radius.get("years") != None else 0
36  return days
37 
38 def generate():
39 
40  Base = declarative_base()
41 
42  class GlobalTag(Base):
43  __tablename__ = 'GLOBAL_TAG'
44 
45  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
46 
47  name = Column(String(100), unique=True, nullable=False, primary_key=True)
48  validity = Column(Integer, nullable=False)
49  description = Column(String(4000), nullable=False)
50  release = Column(String(100), nullable=False)
51  insertion_time = Column(DateTime, nullable=False)
52  snapshot_time = Column(DateTime, nullable=False)
53  scenario = Column(String(100))
54  workflow = Column(String(100))
55  type = Column(String(1))
56  tag_map = relationship('GlobalTagMap', backref='global_tag')
57 
58  def __init__(self, dictionary={}, convert_timestamps=True):
59  # assign each entry in a kwargs
60  for key in dictionary:
61  try:
62  if convert_timestamps:
63  self.__dict__[key] = to_timestamp(dictionary[key])
64  else:
65  self.__dict__[key] = dictionary[key]
66  except KeyError as k:
67  continue
68 
69  def __repr__(self):
70  return '<GlobalTag %r>' % self.name
71 
72  def as_dicts(self):
73  json_gt = {
74  'name': self.name,
75  'validity': self.validity,
76  'description': self.description,
77  'release': self.release,
78  'insertion_time': self.insertion_time,
79  'snapshot_time': self.snapshot_time,
80  'scenario': self.scenario,
81  'workflow': self.workflow,
82  'type': self.type
83  }
84  return json_gt
85 
86  def to_array(self):
87  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
88 
89  @staticmethod
90  def to_datatables(global_tags):
91  gt_data = {
92  'headers': ['Global Tag', 'Release', 'Insertion Time', 'Snapshot Time', 'Description'],
93  'data': [ g.to_array() for g in global_tags ],
94  }
95  return gt_data
96 
97  # get all global tags
98  def all(self, amount=10):
99  gts = data_sources.json_data_node.make(self.session.query(GlobalTag).order_by(GlobalTag.name).limit(amount).all())
100  return gts
101 
102  def tags(self, amount=10):
103  """gt_maps = self.session.query(GlobalTagMap).filter(GlobalTagMap.global_tag_name == self.name).limit(amount).subquery()
104  all_tags = self.session.query(gt_maps.c.record, gt_maps.c.label,\
105  Tag.name, Tag.time_type, Tag.object_type,\
106  Tag.synchronization, Tag.end_of_validity, Tag.description,\
107  Tag.last_validated_time, Tag.insertion_time,\
108  Tag.modification_time)\
109  .join(gt_maps, Tag.name == gt_maps.c.tag_name).order_by(Tag.name.asc()).limit(amount).all()"""
110  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)\
111  .filter(GlobalTagMap.global_tag_name == self.name)\
112  .order_by(GlobalTagMap.tag_name).limit(amount).all()
113  column_names = ["global_tag_name", "record", "label", "tag_name"]
114  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
115  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
116  return data_sources.json_data_node.make(all_tags)
117 
118  # inefficient
119  def tags_full(self, amount=10):
120  tags = self.session.query(Tag).order_by(Tag.name).subquery()
121  all_tags = self.session.query(GlobalTagMap.global_tag_name,\
122  GlobalTagMap.record,\
123  GlobalTagMap.label,\
124  tags.c.name, tags.c.time_type, tags.c.object_type,\
125  tags.c.synchronization, tags.c.end_of_validity, tags.c.description,\
126  tags.c.last_validated_time, tags.c.insertion_time,\
127  tags.c.modification_time)\
128  .join(tags, GlobalTagMap.tag_name == tags.c.name).filter(GlobalTagMap.global_tag_name == self.name)
129  if amount != None:
130  all_tags = all_tags.limit(amount)
131  all_tags = all_tags.all()
132  column_names = ["global_tag_name", "record", "label", "name", "time_type", "object_type", "synchronization",\
133  "end_of_validity", "description", "last_validated_time", "insertion_time", "modification_time"]
134  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
135  all_tags = data_formats._dicts_to_orm_objects(Tag, all_tags)
136  return data_sources.json_data_node.make(all_tags)
137 
138  # insertion_time is a datetime.datetime string, radius the time to add on each side
139  # note radius is a list of keyword arguments, and will be passed to datetime.timedelta
140  def insertion_time_interval(self, insertion_time, **radius):
141  # convert all arguments in radius into day scale
142  # may need to change this to add the number of days in each month in the interval
143  days = date_args_to_days(**radius)
144  minus = insertion_time - datetime.timedelta(days=days)
145  plus = insertion_time + datetime.timedelta(days=days)
146  gts = self.session.query(GlobalTag).filter(and_(GlobalTag.insertion_time >= minus, GlobalTag.insertion_time <= plus)).order_by(GlobalTag.name).all()
148 
149  def snapshot_time_interval(self, snapshot_time, **radius):
150  days = date_args_to_days(**radius)
151  minus = snapshot_time - datetime.timedelta(days=days)
152  plus = snapshot_time + datetime.timedelta(days=days)
153  gts = self.session.query(GlobalTag).filter(and_(GlobalTag.snapshot_time >= minus, GlobalTag.snapshot_time <= plus)).order_by(GlobalTag.name).all()
155 
156  # gets all iovs belonging to this global tag with insertion times <= this global tag's snapshot time
157  def iovs(self, amount=10, valid=False):
158  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
159  valid_iovs_all_tags = self.session.query(IOV)
160  if valid:
161  valid_iovs_all_tags = valid_iovs_all_tags.filter(IOV.insertion_time < self.snapshot_time)
162  valid_iovs_all_tags = valid_iovs_all_tags.subquery()
163  valid_iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, valid_iovs_all_tags.c.since,\
164  valid_iovs_all_tags.c.payload_hash, valid_iovs_all_tags.c.insertion_time)\
165  .join(valid_iovs_all_tags, GlobalTagMap.tag_name == valid_iovs_all_tags.c.tag_name)\
166  .filter(GlobalTagMap.global_tag_name == self.name)\
167  .order_by(valid_iovs_all_tags.c.insertion_time).limit(amount).all()
168  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
169  all_iovs = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), valid_iovs_gt_tags)
170  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
171  return data_sources.json_data_node.make(all_iovs)
172 
173  def pending_tag_requests(self):
174  if self.empty:
175  return None
176  # get a json_list of all global_tag_map requests associated with this global tag
177  gt_map_requests = self.session.query(GlobalTagMapRequest.queue, GlobalTagMapRequest.record, GlobalTagMapRequest.label,\
178  GlobalTagMapRequest.tag, GlobalTagMapRequest.status)\
179  .filter(and_(GlobalTagMapRequest.queue == self.name, GlobalTagMapRequest.status.in_(["P", "R"]))).all()
180  #column_names = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
181  column_names = ["queue", "record", "label", "tag", "status"]
182  gt_map_requests = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), gt_map_requests)
183  gt_map_requests = data_formats._dicts_to_orm_objects(GlobalTagMapRequest, gt_map_requests)
184  return data_sources.json_data_node.make(gt_map_requests)
185 
186  # creates and returns a new candidate object
187  def candidate(self, gt_map_requests):
188  if self.empty:
189  return None
190  new_candidate = Candidate(self, gt_map_requests)
191  return new_candidate
192 
193  # not an ORM class, but corresponds to a table
194  class Candidate():
195  global_tag_object = None
196  tags_to_use = None
197  authentication = None
198 
199  def __init__(self, queue, gt_map_requests):
200 
201  self.session = queue.session
202  self.authentication = queue.authentication
203 
204  # verify that queue is in fact a queue
205  if queue.type != "Q":
206  return None
207  else:
208  self.global_tag_object = queue
209 
210  # validate the list of tags - make sure the list of tags contains unique (record, label) pairs
211  found_record_label_pairs = []
212  # whether tags is a list of a json_list, it is iteraexitble
213  for gt_map in gt_map_requests:
214  if (gt_map.record, gt_map.label) in found_record_label_pairs:
215  # reset iterator before we return
216  if gt_map_requests.__class__.__name__ == "json_list":
217  gt_map_requests.reset()
218  return None
219  else:
220  found_record_label_pairs.append((gt_map.record, gt_map.label))
221  # reset iterator
222  if gt_map_requests.__class__.__name__ == "json_list":
223  gt_map_requests.reset()
224 
225  # if we're here, the tags list is valid
226  self.tags_to_use = gt_map_requests
227 
228  # write the candidate to the database, and catch any errors
229  # Note: errors may be thrown if the user does not have write permissions for the database
230  def cut(self):
231  CANDIDATE_TIME_FORMAT = "%Y_%m_%d_%H_%M_%S"
232  TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
233  # send a post request to dbAccess service to write the new candidate
234  candidate_name = self.global_tag_object.name.replace("Queue", "Candidate")
235  candidate_name += "_%s" % datetime.datetime.now().strftime(CANDIDATE_TIME_FORMAT)
236  time_now = datetime.datetime.now().strftime(TIME_FORMAT)
237  candidate_release = self.global_tag_object.release
238  candidate_description = "Candidate created from the queue: '%s' at: '%s'" % (self.global_tag_object.name, time_now)
239 
240  extra_records = data_formats._objects_as_dicts(self.tags_to_use)
241  for record in extra_records:
242  for key in ["submitter_id", "description", "time_submitted", "last_edited"]:
243  del record[key]
244 
245  params = {
246  "c_name" : candidate_name,
247  "snapshot_time" : time_now,
248  "from_gt" : self.global_tag_object.name,
249  "release" : candidate_release,
250  "desc" : candidate_description,
251  "validity" : 18446744073709551615,
252  "extra_records" : json.dumps(extra_records.data())
253  }
254 
255  # send http request to dbAccess
256  # get username and password from netrc
257  credentials = self.authentication.authenticators("dbAccess")
258  print(credentials)
259  #headers = {"user":credentials[0], "password":credentials[2]}
260 
261  auth = base64.encodestring("%s:%s" % (credentials[0], credentials[1])).replace('\n', '')
262  print(auth)
263 
264  params = urllib.urlencode(params)
265  print(params)
266 
267  # send http request to dbAccess once requests library is installed in cmssw
268 
269  class GlobalTagMap(Base):
270  __tablename__ = 'GLOBAL_TAG_MAP'
271 
272  headers = ["global_tag_name", "record", "label", "tag_name"]
273 
274  global_tag_name = Column(String(100), ForeignKey('GLOBAL_TAG.name'), primary_key=True, nullable=False)
275  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
276  label = Column(String(100), primary_key=True, nullable=False)
277  tag_name = Column(String(100), ForeignKey('TAG.name'), nullable=False)
278 
279  def __init__(self, dictionary={}, convert_timestamps=True):
280  # assign each entry in a kwargs
281  for key in dictionary:
282  try:
283  if convert_timestamps:
284  self.__dict__[key] = to_timestamp(dictionary[key])
285  else:
286  self.__dict__[key] = dictionary[key]
287  except KeyError as k:
288  continue
289 
290  def __repr__(self):
291  return '<GlobalTagMap %r>' % self.global_tag_name
292 
293  def as_dicts(self):
294  json_gtm = {
295  "global_tag_name" : str(self.global_tag_name),
296  "record" : str(self.record),
297  "label" : str(self.label),
298  "tag_name" : str(self.tag_name)
299  }
300  return json_gtm
301 
302 
303  class GlobalTagMapRequest(Base):
304  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
305 
306  queue = Column(String(100), primary_key=True, nullable=False)
307  tag = Column(String(100), ForeignKey('TAG.name'), primary_key=True, nullable=False)
308  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
309  label = Column(String(100), primary_key=True, nullable=False)
310  status = Column(String(1), nullable=False)
311  description = Column(String(4000), nullable=False)
312  submitter_id = Column(Integer, nullable=False)
313  time_submitted = Column(DateTime, nullable=False)
314  last_edited = Column(DateTime, nullable=False)
315 
316  def __init__(self, dictionary={}, convert_timestamps=True):
317  # assign each entry in a kwargs
318  for key in dictionary:
319  try:
320  if convert_timestamps:
321  self.__dict__[key] = to_timestamp(dictionary[key])
322  else:
323  self.__dict__[key] = dictionary[key]
324  except KeyError as k:
325  continue
326 
327  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
328 
329  def as_dicts(self):
330  return {
331  "queue" : self.queue,
332  "tag" : self.tag,
333  "record" : self.record,
334  "label" : self.label,
335  "status" : self.status,
336  "description" : self.description,
337  "submitter_id" : self.submitter_id,
338  "time_submitted" : self.time_submitted,
339  "last_edited" : self.last_edited
340  }
341 
342  def __repr__(self):
343  return '<GlobalTagMapRequest %r>' % self.queue
344 
345  def to_array(self):
346  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
347 
348  @staticmethod
349  def to_datatables(requests):
350  user_requests = {
351  'headers': ['Queue', 'Tag', 'Record', 'Label', 'Status', 'Submitted', 'Modified'],
352  'data': [ r.to_array() for r in requests ],
353  }
354  return user_requests
355 
356  class IOV(Base):
357  __tablename__ = 'IOV'
358 
359  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
360 
361  tag_name = Column(String(4000), ForeignKey('TAG.name'), primary_key=True, nullable=False)
362  since = Column(Integer, primary_key=True, nullable=False)
363  payload_hash = Column(String(40), ForeignKey('PAYLOAD.hash'), primary_key=True, nullable=False)
364  insertion_time = Column(DateTime, nullable=False)
365 
366  def __init__(self, dictionary={}, convert_timestamps=True):
367  # assign each entry in a kwargs
368  for key in dictionary:
369  try:
370  if convert_timestamps:
371  self.__dict__[key] = to_timestamp(dictionary[key])
372  else:
373  self.__dict__[key] = dictionary[key]
374  except KeyError as k:
375  continue
376 
377  def as_dicts(self):
378  return {
379  "tag_name" : self.tag_name,
380  "since" : self.since,
381  "payload_hash" : self.payload_hash,
382  "insertion_time" : self.insertion_time
383  }
384 
385  def __repr__(self):
386  return '<IOV %r>' % self.tag_name
387 
388  def to_array(self):
389  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
390 
391  @staticmethod
392  def to_datatables(iovs):
393  iovs_data = {
394  'headers': ['Since', 'Insertion Time', 'Payload'],
395  'data': [ i.to_array() for i in iovs ],
396  }
397  return iovs_data
398 
399  def all(self, amount=10):
400  return data_sources.json_data_node.make(self.session.query(IOV).order_by(IOV.tag_name).limit(amount).all())
401 
402 
403  class Payload(Base):
404  __tablename__ = 'PAYLOAD'
405 
406  headers = ["hash", "object_type", "version", "insertion_time"]
407 
408  hash = Column(String(40), primary_key=True, nullable=False)
409  object_type = Column(String(4000), nullable=False)
410  version = Column(String(4000), nullable=False)
411  insertion_time = Column(DateTime, nullable=False)
412 
413  def __init__(self, dictionary={}, convert_timestamps=True):
414  # assign each entry in a kwargs
415  for key in dictionary:
416  try:
417  if convert_timestamps:
418  self.__dict__[key] = to_timestamp(dictionary[key])
419  else:
420  self.__dict__[key] = dictionary[key]
421  except KeyError as k:
422  continue
423 
424  def as_dicts(self):
425  return {
426  "hash" : self.hash,
427  "object_type" : self.object_type,
428  "version" : self.version,
429  "insertion_time" : self.insertion_time
430  }
431 
432  def __repr__(self):
433  return '<Payload %r>' % self.hash
434 
435  def to_array(self):
436  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
437 
438  @staticmethod
439  def to_datatables(payloads):
440  payloads_data = {
441  'headers': ["Payload", "Object Type", "Version", "Insertion Time"],
442  'data': [ p.to_array() for p in payloads ],
443  }
444  return payloads_data
445 
446  def parent_tags(self):
447  # check if this payload is empty
448  if self.empty:
449  return None
450  else:
451  tag_names = map(lambda entry : entry[0],\
452  self.session.query(IOV.tag_name).filter(IOV.payload_hash == self.hash).all())
453  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).all()
455 
456  def all(self, amount=10):
457  return data_sources.json_data_node.make(self.session.query(Payload).order_by(Payload.hash).limit(amount).all())
458 
459 
460  class Record(Base):
461  __tablename__ = 'RECORDS'
462 
463  headers = ["record", "object", "type"]
464 
465  record = Column(String(100), primary_key=True, nullable=False)
466  object = Column(String(200), nullable=False)
467  type = Column(String(20), nullable=False)
468 
469  def as_dicts(self):
470  return {
471  "record" : self.record,
472  "object" : self.object,
473  "type" : self.type
474  }
475 
476  def __repr__(self):
477  return '<Record %r>' % self.record
478 
479  def to_array(self):
480  return [self.record, self.object]
481 
482  @staticmethod
483  def to_datatables(records):
484  records_data = {
485  'headers': ["Record", "Object"],
486  'data': [ r.to_array() for r in records ],
487  }
488  return records_data
489 
490  def all(self, amount=10):
491  return data_sources.json_data_node.make(self.session.query(Record).order_by(Record.record).limit(amount).all())
492 
493 
494  class RecordReleases(Base):
495  __tablename__ = 'RECORD_RELEASES'
496 
497  record = Column(String(100), ForeignKey('RECORDS.record'), nullable=False)
498  release_cycle = Column(String(100), primary_key=True, nullable=False)
499  release = Column(String(100), nullable=False)
500  release_int = Column(String(100), nullable=False)
501 
502  def as_dicts(self):
503  return {
504  "release_cycle" : self.release_cycle,
505  "release" : self.release,
506  "release_int" : self.release_int
507  }
508 
509  def __repr__(self):
510  return '<RecordReleases %r>' % self.record
511 
512  def to_array(self):
513  return [self.release_cycle, self.release, self.release_int]
514 
515  @staticmethod
516  def to_datatables(recordReleases):
517  record_releases_data = {
518  'headers': ["Release Cycle", "Starting Release", "Starting Release Number"],
519  'data': [ r.to_array() for r in recordReleases ],
520  }
521  return record_releases_data
522 
523 
524  class ParsedReleases(Base):
525  __tablename__ = 'PARSED_RELEASES'
526 
527  release_cycle = Column(String(100), primary_key=True, nullable=False)
528  release = Column(String(100), nullable=False)
529  release_int = Column(String(100), nullable=False)
530 
531  def as_dicts(self):
532  return {
533  "release_cycle" : self.release_cycle,
534  "release" : self.release,
535  "release_int" : self.release_int
536  }
537 
538  def __repr__(self):
539  return '<ParsedReleases %r>' % self.release_cycle
540 
541  def to_array(self):
542  return [self.release_cycle, self.release, self.release_int]
543 
544  @staticmethod
545  def to_datatables(parsedReleases):
546  parsed_releases_data = {
547  'headers': ["Release Cycle", "Starting Release", "Starting Release Number"],
548  'data': [ p.to_array() for p in parsedReleases ],
549  }
550  return parsed_releases_data
551 
552 
553  class Tag(Base):
554  __tablename__ = 'TAG'
555 
556  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
557  "description", "last_validated_time", "insertion_time", "modification_time"]
558 
559  name = Column(String(4000), primary_key=True, nullable=False)
560  time_type = Column(String(4000), nullable=False)
561  object_type = Column(String(4000), nullable=False)
562  synchronization = Column(String(4000), nullable=False)
563  end_of_validity = Column(Integer, nullable=False)
564  description = Column(String(4000), nullable=False)
565  last_validated_time = Column(Integer, nullable=False)
566  insertion_time = Column(DateTime, nullable=False)
567  modification_time = Column(DateTime, nullable=False)
568 
569  record = None
570  label = None
571 
572  def __init__(self, dictionary={}, convert_timestamps=True):
573  # assign each entry in a kwargs
574  for key in dictionary:
575  try:
576  if convert_timestamps:
577  self.__dict__[key] = to_timestamp(dictionary[key])
578  else:
579  self.__dict__[key] = dictionary[key]
580  except KeyError as k:
581  continue
582 
583  def as_dicts(self):
584  return {
585  "name" : self.name,
586  "time_type" : self.time_type,
587  "object_type" : self.object_type,
588  "synchronization" : self.synchronization,
589  "end_of_validity" : self.end_of_validity,
590  "description" : self.description,
591  "last_validated_time" : self.last_validated_time,
592  "insertion_time" : self.insertion_time,
593  "modification_time" : self.modification_time,
594  "record" : self.record,
595  "label" : self.label
596  }
597 
598  def __repr__(self):
599  return '<Tag %r>' % self.name
600 
601  def to_array(self):
602  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
603 
604  @staticmethod
605  def to_datatables(tags):
606  tags_data = {
607  'headers': ["Tag", "Time Type", "Object Type", "Synchronization", "Insertion Time", "Description"],
608  'data': [ t.to_array() for t in tags ],
609  }
610  return tags_data
611 
612  def parent_global_tags(self):
613  if self.empty:
614  return None
615  else:
616  global_tag_names = map(lambda entry : entry[0], self.session.query(GlobalTagMap.global_tag_name).filter(GlobalTagMap.tag_name == self.name).all())
617  if len(global_tag_names) != 0:
618  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).all()
619  else:
620  global_tags = []
621  return data_sources.json_data_node.make(global_tags)
622 
623  def all(self, amount=10):
624  return data_sources.json_data_node.make(self.session.query(Tag).order_by(Tag.name).limit(amount).all())
625 
626  def insertion_time_interval(self, insertion_time, **radius):
627  days = date_args_to_days(**radius)
628  minus = insertion_time - datetime.timedelta(days=days)
629  plus = insertion_time + datetime.timedelta(days=days)
630  tags = self.session.query(Tag).filter(and_(Tag.insertion_time >= minus, Tag.insertion_time <= plus)).order_by(Tag.name).all()
632 
633  def modification_time_interval(self, modification_time, **radius):
634  days = date_args_to_days(**radius)
635  minus = modification_time - datetime.timedelta(days=days)
636  plus = modification_time + datetime.timedelta(days=days)
637  tags = self.session.query(Tag).filter(and_(Tag.modification_time >= minus, Tag.modification_time <= plus)).order_by(Tag.name).all()
639 
640  # Note: setting pretty to true changes the return type of the method
641  def iovs(self, pretty=False):
642  # get iovs in this tag
643  iovs = self.session.query(IOV).filter(IOV.tag_name == self.name).all()
644  if pretty:
645  iovs = data_formats._objects_to_dicts(iovs).data()
646  for n in range(0, len(iovs)):
647  iovs[n]["since"] = "{:>6}".format(str(iovs[n]["since"])) + " - " + ("{:<6}".format(str(iovs[n+1]["since"]-1)) if n != len(iovs)-1 else "")
648 
650 
651  return {"globaltag" : GlobalTag, "candidate" : Candidate, "globaltagmap" : GlobalTagMap, "globaltagmaprequest" : GlobalTagMapRequest, "iov" : IOV,\
652  "payload" : Payload, "record" : Record, "recordreleases" : RecordReleases, "parsedreleases" : ParsedReleases, "tag" : Tag, "Base" : Base}
std::string print(const Track &, edm::Verbosity=edm::Concise)
Track print utility.
Definition: print.cc:10
def to_timestamp
Definition: models.py:20
def _dicts_to_orm_objects
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def _objects_to_dicts
Definition: data_formats.py:96
def generate
Definition: models.py:38
tuple tags
Definition: o2o.py:248
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def date_args_to_days
Definition: models.py:31
def status_full_name
Definition: models.py:23
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:82