CMS 3D CMS Logo

models.py
Go to the documentation of this file.
1 """
2 
3 Using Audrius' models from flask browser.
4 
5 This file contains models that are used with SQLAlchemy.
6 
7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
8  so these will not work in a normal context outside the framework.
9 
10 """
11 import json
12 import datetime
13 
14 try:
15  import sqlalchemy
16  from sqlalchemy.orm import relationship, backref
17  from sqlalchemy.ext.declarative import declarative_base
18  # Note: Binary is only used for blobs, if they are mapped
19  from sqlalchemy import Column, String, Integer, DateTime, Binary, ForeignKey, BigInteger, and_
20 except ImportError:
21  print("You must be working inside a CMSSW environment. Try running 'cmsenv'.")
22  exit()
23 
24 import data_sources, data_formats
25 import urllib, urllib2, base64
26 from copy import deepcopy
27 
28 # get utility functions
29 from utils import to_timestamp, to_datetime, friendly_since
30 
31 def session_independent_object(object, schema=None):
32  # code original taken from write method in querying
33  # will result in a new object that isn't attached to any session
34  # hence, SQLAlchemy won't track changes
35 
36  if object.__class__.__name__.lower() == "payload":
37  map_blobs = object.blobs_mapped
38  else:
39  map_blobs = False
40  # need to change this to only generate the required class - can be slow...
41  # extract class name of object
42  cls = object.__class__
43  class_name = class_name_to_column(cls).lower()
44  new_class = generate(map_blobs=map_blobs, class_name=class_name)
45  new_class.__table__.schema = schema
46  new_object = new_class(object.as_dicts(), convert_timestamps=False)
47 
48  return new_object
49 
50 def session_independent(objects):
51  if type(objects) == list:
52  return map(session_independent_object, objects)
53  else:
54  # assume objects is a single object (not a list)
55  return session_independent_object(objects)
56 
58  class_name = cls.__name__
59  all_upper_case = True
60  for character in class_name:
61  all_upper_case = character.isupper()
62  if all_upper_case:
63  return class_name
64  for n in range(0, len(class_name)):
65  if class_name[n].isupper() and n != 0:
66  class_name = str(class_name[0:n]) + "".join(["_", class_name[n].lower()]) + str(class_name[n+1:])
67  elif class_name[n].isupper() and n == 0:
68  class_name = str(class_name[0:n]) + "".join([class_name[n].lower()]) + str(class_name[n+1:])
69  return class_name
70 
71 def status_full_name(status):
72  full_status = {
73  'P': 'Pending',
74  'R': 'Rejected',
75  'A': 'Accepted'
76  }
77  return full_status[status]
78 
79 def date_args_to_days(**radius):
80  days = radius.get("days")
81  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
82  days += radius.get("months")*28 if radius.get("months") != None else 0
83  days += radius.get("years")+365 if radius.get("years") != None else 0
84  return days
85 
87  """
88  Base class for Radius and Range - used for checking by apply_filter function
89  """
90 
91  def __init__(self):
92  pass
93 
94  def get_start(self):
95  return self._start
96 
97  def get_end(self):
98  return self._end
99 
101  """
102  Used to tell proxy methods that a range of values defined by a centre and a radius should be queried for - special case of filter clauses.
103  """
104  def __init__(self, centre, radius):
105  """
106  centre and radius should be objects that can be added and subtracted.
107  eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta
108 
109  Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type.
110  """
111  self._centre = centre
112  self._radius = radius
113  self._start = self._centre - self._radius
114  self._end = self._centre + self._radius
115 
117  """
118  Used to tell proxy methods that a range of values defined by a start and end point should be queried for - special case of filter clauses.
119  """
120  def __init__(self, start, end):
121  """
122  centre and radius should be objects that can be added and subtracted.
123  eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta
124 
125  Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type.
126  """
127  self._start = start
128  self._end = end
129 
130 class RegExp(object):
131  """
132  Used to tell proxy methods that a regular expression should be used to query the column.
133  """
134  def __init__(self, regexp):
135  self._regexp = regexp
136 
137  def get_regexp(self):
138  return self._regexp
139 
140  def apply(self):
141  # uses code from conddb tool
142  if self.database_type in ["oracle", "frontier"]:
143  return sqlalchemy.func.regexp_like(field, regexp)
144  elif self.database_type == "sqlite":
145  # Relies on being a SingletonThreadPool
146  self.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
147 
148  return sqlalchemy.func.regexp(field, regexp)
149  else:
150  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
151 
152 def apply_filter(orm_query, orm_class, attribute, value):
153  filter_attribute = getattr(orm_class, attribute)
154  if type(value) == list:
155  orm_query = orm_query.filter(filter_attribute.in_(value))
156  elif type(value) == data_sources.json_list:
157  orm_query = orm_query.filter(filter_attribute.in_(value.data()))
158  elif type(value) in [Range, Radius]:
159 
160  minus = value.get_start()
161  plus = value.get_end()
162  orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
163 
164  elif type(value) == RegExp:
165 
166  # Relies on being a SingletonThreadPool
167 
168  if value.database_type in ["oracle", "frontier"]:
169  regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
170  elif value.database_type == "sqlite":
171  value.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
172  regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
173  else:
174  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
175  orm_query = orm_query.filter(regexp)
176 
177  else:
178  orm_query = orm_query.filter(filter_attribute == value)
179  return orm_query
180 
181 def apply_filters(orm_query, orm_class, **filters):
182  for (key, value) in filters.items():
183  if not(key in ["amount"]):
184  orm_query = apply_filter(orm_query, orm_class, key, value)
185  return orm_query
186 
187 def generate(map_blobs=False, class_name=None):
188 
189  Base = declarative_base()
190 
191  class GlobalTag(Base):
192  __tablename__ = 'GLOBAL_TAG'
193 
194  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
195 
196  name = Column(String(100), unique=True, nullable=False, primary_key=True)
197  validity = Column(Integer, nullable=False)
198  description = Column(String(4000), nullable=False)
199  release = Column(String(100), nullable=False)
200  insertion_time = Column(DateTime, nullable=False)
201  snapshot_time = Column(DateTime, nullable=False)
202  scenario = Column(String(100))
203  workflow = Column(String(100))
204  type = Column(String(1))
205  tag_map = relationship('GlobalTagMap', backref='global_tag')
206 
207  def __init__(self, dictionary={}, convert_timestamps=True):
208  # assign each entry in a kwargs
209  for key in dictionary:
210  try:
211  if convert_timestamps:
212  self.__dict__[key] = to_timestamp(dictionary[key])
213  else:
214  self.__dict__[key] = dictionary[key]
215  except KeyError as k:
216  continue
217 
218  def __repr__(self):
219  return '<GlobalTag %r>' % self.name
220 
221  def as_dicts(self, convert_timestamps=False):
222  """
223  Returns dictionary form of Global Tag object.
224  """
225  json_gt = {
226  'name': self.name,
227  'validity': self.validity,
228  'description': self.description,
229  'release': self.release,
230  'insertion_time': to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
231  'snapshot_time': to_timestamp(self.snapshot_time) if convert_timestamps else self.snapshot_time,
232  'scenario': self.scenario,
233  'workflow': self.workflow,
234  'type': self.type
235  }
236  return json_gt
237 
238  def to_array(self):
239  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
240 
241  def all(self, **kwargs):
242  """
243  Returns `amount` Global Tags ordered by Global Tag name.
244  """
245  query = self.session.query(GlobalTag)
246  query = apply_filters(query, self.__class__, **kwargs)
247  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
248  query_result = query.order_by(GlobalTag.name).limit(amount).all()
249  gts = data_sources.json_data_node.make(query_result)
250  return gts
251 
252  def tags(self, **kwargs):
253  """
254  Returns `amount` *Global Tag Maps* belonging to this Global Tag.
255  """
256  kwargs["global_tag_name"] = self.name
257  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
258  all_tags = apply_filters(all_tags, GlobalTagMap, **kwargs)
259  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
260  all_tags = all_tags.order_by(GlobalTagMap.tag_name).limit(amount).all()
261  column_names = ["global_tag_name", "record", "label", "tag_name"]
262  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
263  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
264  return data_sources.json_data_node.make(all_tags)
265 
266  def iovs(self, **kwargs):
267  """
268  Returns `amount` IOVs belonging to all Tags held in this Global Tag.
269  For large Global Tags (which is most of them), VERY slow.
270  Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names,
271  and then get IOVs from each Tag name.
272 
273  At some point, this method may replace the method currently used.
274  """
275  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
276  # first get only the IOVs that belong to Tags that are contained by this Global Tag
277 
278  # get IOVs belonging to a Tag contained by this Global Tag
279  tag_names = self.tags().get_members("tag_name").data()
280  iovs_all_tags = self.session.query(IOV).filter(IOV.tag_name.in_(tag_names))
281  iovs_all_tags = apply_filters(iovs_all_tags, IOV, **kwargs)
282  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
283  iovs_all_tags = iovs_all_tags.limit(amount).subquery()
284 
285  # now, join Global Tag Map table onto IOVs
286  iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
287  iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
288  .filter(GlobalTagMap.global_tag_name == self.name)\
289  .join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
290 
291  iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).all()
292 
293  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
294  all_iovs = map(lambda row : dict(zip(column_names, row)), iovs_gt_tags)
295  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
296 
297  return data_sources.json_data_node.make(all_iovs)
298 
299  def __sub__(self, other):
300  """
301  Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference.
302  Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1))
303  """
304  return self.diff(other)
305 
306  def diff(self, gt):
307  """
308  Returns the json_list of differences in the form of tuples:
309 
310  (record, label, tag name of gt1 (self), tag name of gt2 (gt))
311  """
312 
313  record_label_to_tag_name1 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in self.tags().data()])
314  record_label_to_tag_name2 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in gt.tags().data()])
315 
316  record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
317 
318  table = []
319  tags_pairs_with_differences = []
320 
321  for record_label in record_label_pairs:
322  tag_name1 = record_label_to_tag_name1.get(record_label)
323  tag_name2 = record_label_to_tag_name2.get(record_label)
324 
325  if tag_name1 == None or tag_name2 == None or tag_name1 != tag_name2:
326  table.append({
327  "Record" : record_label[0],
328  "Label" : record_label[1],
329  ("%s Tag" % self.name) : tag_name1,
330  ("%s Tag" % gt.name) : tag_name2
331  })
332 
334 
335  class GlobalTagMap(Base):
336  __tablename__ = 'GLOBAL_TAG_MAP'
337 
338  headers = ["global_tag_name", "record", "label", "tag_name"]
339 
340  global_tag_name = Column(String(100), ForeignKey('GLOBAL_TAG.name'), primary_key=True, nullable=False)
341  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
342  label = Column(String(100), primary_key=True, nullable=False)
343  tag_name = Column(String(100), ForeignKey('TAG.name'), nullable=False)
344 
345  def __init__(self, dictionary={}, convert_timestamps=True):
346  # assign each entry in a kwargs
347  for key in dictionary:
348  try:
349  if convert_timestamps:
350  self.__dict__[key] = to_timestamp(dictionary[key])
351  else:
352  self.__dict__[key] = dictionary[key]
353  except KeyError as k:
354  continue
355 
356  def __repr__(self):
357  return '<GlobalTagMap %r>' % self.global_tag_name
358 
359  def as_dicts(self, convert_timestamps=False):
360  """
361  Returns dictionary form of this Global Tag Map.
362  """
363  json_gtm = {
364  "global_tag_name" : str(self.global_tag_name),
365  "record" : str(self.record),
366  "label" : str(self.label),
367  "tag_name" : str(self.tag_name)
368  }
369  return json_gtm
370 
371 
372  class GlobalTagMapRequest(Base):
373  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
374 
375  queue = Column(String(100), primary_key=True, nullable=False)
376  tag = Column(String(100), ForeignKey('TAG.name'), primary_key=True, nullable=False)
377  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
378  label = Column(String(100), primary_key=True, nullable=False)
379  status = Column(String(1), nullable=False)
380  description = Column(String(4000), nullable=False)
381  submitter_id = Column(Integer, nullable=False)
382  time_submitted = Column(DateTime, nullable=False)
383  last_edited = Column(DateTime, nullable=False)
384 
385  def __init__(self, dictionary={}, convert_timestamps=True):
386  # assign each entry in a kwargs
387  for key in dictionary:
388  try:
389  if convert_timestamps:
390  self.__dict__[key] = to_timestamp(dictionary[key])
391  else:
392  self.__dict__[key] = dictionary[key]
393  except KeyError as k:
394  continue
395 
396  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
397 
398  def as_dicts(self):
399  """
400  Returns dictionary form of this Global Tag Map Request.
401  """
402  return {
403  "queue" : self.queue,
404  "tag" : self.tag,
405  "record" : self.record,
406  "label" : self.label,
407  "status" : self.status,
408  "description" : self.description,
409  "submitter_id" : self.submitter_id,
410  "time_submitted" : self.time_submitted,
411  "last_edited" : self.last_edited
412  }
413 
414  def __repr__(self):
415  return '<GlobalTagMapRequest %r>' % self.queue
416 
417  def to_array(self):
418  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
419 
420  class IOV(Base):
421  __tablename__ = 'IOV'
422 
423  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
424 
425  tag_name = Column(String(4000), ForeignKey('TAG.name'), primary_key=True, nullable=False)
426  since = Column(Integer, primary_key=True, nullable=False)
427  payload_hash = Column(String(40), ForeignKey('PAYLOAD.hash'), nullable=False)
428  insertion_time = Column(DateTime, primary_key=True, nullable=False)
429 
430  def __init__(self, dictionary={}, convert_timestamps=True):
431  # assign each entry in a kwargs
432  for key in dictionary:
433  try:
434  if convert_timestamps:
435  self.__dict__[key] = to_timestamp(dictionary[key])
436  else:
437  self.__dict__[key] = dictionary[key]
438  except KeyError as k:
439  continue
440 
441  def as_dicts(self, convert_timestamps=False):
442  """
443  Returns dictionary form of this IOV.
444  """
445  return {
446  "tag_name" : self.tag_name,
447  "since" : self.since,
448  "payload_hash" : self.payload_hash,
449  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
450  }
451 
452  def __repr__(self):
453  return '<IOV %r>' % self.tag_name
454 
455  def to_array(self):
456  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
457 
458  def all(self, **kwargs):
459  """
460  Returns `amount` IOVs ordered by since.
461  """
462  query = self.session.query(IOV)
463  query = apply_filters(query, IOV, **kwargs)
464  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
465  query_result = query.order_by(IOV.tag_name).order_by(IOV.since).limit(amount).all()
466  return data_sources.json_data_node.make(query_result)
467 
468 
469  class Payload(Base):
470  __tablename__ = 'PAYLOAD'
471 
472  headers = ["hash", "object_type", "version", "insertion_time"]
473 
474  hash = Column(String(40), primary_key=True, nullable=False)
475  object_type = Column(String(4000), nullable=False)
476  version = Column(String(4000), nullable=False)
477  insertion_time = Column(DateTime, nullable=False)
478  if map_blobs:
479  data = Column(Binary, nullable=False)
480  streamer_info = Column(Binary, nullable=False)
481  blobs_mapped = map_blobs
482 
483  def __init__(self, dictionary={}, convert_timestamps=True):
484  # assign each entry in a kwargs
485  for key in dictionary:
486  try:
487  if convert_timestamps:
488  self.__dict__[key] = to_timestamp(dictionary[key])
489  else:
490  self.__dict__[key] = dictionary[key]
491  except KeyError as k:
492  continue
493 
494  if map_blobs:
495  def as_dicts(self, convert_timestamps=False):
496  """
497  Returns dictionary form of this Payload's metadata (not the actual Payload).
498  """
499  return {
500  "hash" : self.hash,
501  "object_type" : self.object_type,
502  "version" : self.version,
503  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
504  "data" : self.data,
505  "streamer_info" : self.streamer_info
506  }
507  else:
508  def as_dicts(self, convert_timestamps=False):
509  """
510  Returns dictionary form of this Payload's metadata (not the actual Payload).
511  """
512  return {
513  "hash" : self.hash,
514  "object_type" : self.object_type,
515  "version" : self.version,
516  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
517  }
518 
519  def __repr__(self):
520  return '<Payload %r>' % self.hash
521 
522  def to_array(self):
523  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
524 
525  def parent_tags(self, **kwargs):
526  """
527  Returns `amount` parent Tags ordered by Tag name.
528  """
529  # check if this payload is empty
530  if self.empty:
531  return None
532  else:
533  kwargs["payload_hash"] = self.hash
534  query = self.session.query(IOV.tag_name)
535  query = apply_filters(query, IOV, **kwargs)
536  query_result = query.all()
537  tag_names = map(lambda entry : entry[0], query_result)
538  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
539  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).limit(amount).all()
541 
542  def all(self, **kwargs):
543  """
544  Returns `amount` Payloads ordered by Payload hash.
545  """
546  query = self.session.query(Payload)
547  query = apply_filters(query, Payload, **kwargs)
548  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
549  query_result = query.order_by(Payload.hash).limit(amount).all()
550  return data_sources.json_data_node.make(query_result)
551 
552 
553  class Record(Base):
554  __tablename__ = 'RECORDS'
555 
556  headers = ["record", "object", "type"]
557 
558  record = Column(String(100), primary_key=True, nullable=False)
559  object = Column(String(200), nullable=False)
560  type = Column(String(20), nullable=False)
561 
562  def as_dicts(self):
563  """
564  Returns dictionary form of this Record.
565  """
566  return {
567  "record" : self.record,
568  "object" : self.object,
569  "type" : self.type
570  }
571 
572  def __repr__(self):
573  return '<Record %r>' % self.record
574 
575  def to_array(self):
576  return [self.record, self.object]
577 
578  def all(self, **kwargs):
579  """
580  Returns `amount` Records ordered by Record record.
581  """
582  query = self.session.query(Record)
583  query = apply_filters(query, Record, kwargs)
584  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
585  query_result = query.order_by(Record.record).limit(amount).all()
586  return data_sources.json_data_node.make(query_result)
587 
588 
589  class Tag(Base):
590  __tablename__ = 'TAG'
591 
592  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
593  "description", "last_validated_time", "insertion_time", "modification_time"]
594 
595  name = Column(String(4000), primary_key=True, nullable=False)
596  time_type = Column(String(4000), nullable=False)
597  object_type = Column(String(4000), nullable=False)
598  synchronization = Column(String(4000), nullable=False)
599  end_of_validity = Column(Integer, nullable=False)
600  description = Column(String(4000), nullable=False)
601  last_validated_time = Column(BigInteger, nullable=False)
602  insertion_time = Column(DateTime, nullable=False)
603  modification_time = Column(DateTime, nullable=False)
604 
605  record = None
606  label = None
607 
608  iovs_list = relationship('IOV', backref='tag')
609 
610  def __init__(self, dictionary={}, convert_timestamps=True):
611  # assign each entry in a kwargs
612  for key in dictionary:
613  try:
614  if convert_timestamps:
615  self.__dict__[key] = to_timestamp(dictionary[key])
616  else:
617  self.__dict__[key] = dictionary[key]
618  except KeyError as k:
619  continue
620 
621  def as_dicts(self, convert_timestamps=False):
622  """
623  Returns dictionary form of this Tag.
624  """
625  return {
626  "name" : self.name,
627  "time_type" : self.time_type,
628  "object_type" : self.object_type,
629  "synchronization" : self.synchronization,
630  "end_of_validity" : self.end_of_validity,
631  "description" : self.description,
632  "last_validated_time" : self.last_validated_time,
633  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
634  "modification_time" : to_timestamp(self.modification_time) if convert_timestamps else self.modification_time,
635  "record" : self.record,
636  "label" : self.label
637  }
638 
639  def __repr__(self):
640  return '<Tag %r>' % self.name
641 
642  def to_array(self):
643  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
644 
645  def parent_global_tags(self, **kwargs):
646  """
647  Returns `amount` Global Tags that contain this Tag.
648  """
649  if self.empty:
650  return None
651  else:
652  kwargs["tag_name"] = self.name
653  query = self.session.query(GlobalTagMap.global_tag_name)
654  query = apply_filters(query, GlobalTagMap, **kwargs)
655  query_result = query.all()
656  if len(query_result) != 0:
657  global_tag_names = map(lambda entry : entry[0], query_result)
658  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
659  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).limit(amount).all()
660  else:
661  global_tags = None
662  return data_sources.json_data_node.make(global_tags)
663 
664  def all(self, **kwargs):
665  """
666  Returns `amount` Tags ordered by Tag name.
667  """
668  query = self.session.query(Tag)
669  query = apply_filters(query, Tag, **kwargs)
670  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
671  query_result = query.order_by(Tag.name).limit(amount).all()
672  return data_sources.json_data_node.make(query_result)
673 
674  def iovs(self, **kwargs):
675  """
676  Returns `amount` IOVs that belong to this Tag ordered by IOV since.
677  """
678  # filter_params contains a list of columns to filter the iovs by
679  iovs_query = self.session.query(IOV).filter(IOV.tag_name == self.name)
680  iovs_query = apply_filters(iovs_query, IOV, **kwargs)
681  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
682  iovs = iovs_query.order_by(IOV.since).limit(amount).all()
684 
685  def latest_iov(self):
686  """
687  Returns the single highest since held by this Tag.
688  Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since.
689  """
690  iov = self.session.query(IOV).filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).first()
691  return iov
692 
693  def __sub__(self, other):
694  """
695  Allows the arithmetic operator "-" to be applied to find the difference between two tags.
696  Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1.
697  """
698  return self.diff(other)
699 
700  def diff(self, tag, short=False):
701  """
702  Returns the `diff` of the first Tag, and the Tag given.
703  Summary of algorithm:
704 
705  Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash).
706  Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list.
707  Note: depending on where each Tag's IOVs start, 1 or both of these values can be None.
708  Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags.
709  For each triple (since, hash1, hash2),
710 
711  If the first_since_in_equality_range = None,
712  We are at the first since in the merged list, so set first_since... = since
713  Note: this is so set the previous... values for the second row, since the first row will never result in a print because
714  a row is only printed when past iovs have been processed.
715 
716  If either hash1 or hash2 is None, set it to the previous hash found
717  Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this.
718 
719  If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since)
720  Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since
721  because we have no looked forward, but we do know about the previous hashes.
722 
723  If hash1 != hash2,
724  The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags
725  Hence, print that for this range we have equal hashes denoted by "=" in each hash column.
726 
727  Else:
728 
729  The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical...
730  If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2),
731  then we have not found the end of a region of equality!
732  If the hashes have changed, print a row.
733 
734  """
735  if tag.__class__.__name__ != "Tag":
736  raise TypeError("Tag given must be a CondDBFW Tag object.")
737 
738  # get lists of iovs
739  iovs1 = dict(map(lambda iov : (iov.since, iov.payload_hash), self.iovs().data()))
740  iovs2 = dict(map(lambda iov : (iov.since, iov.payload_hash), tag.iovs().data()))
741 
742  iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
743  iovs.append(("Infinity", 1, 2))
744  table = []
745 
746  previous_hash1 = None
747  previous_hash2 = None
748  first_since_in_equality_range = None
749  previous_equal = False
750 
751  for since, hash1, hash2 in iovs:
752 
753  if first_since_in_equality_range == None:
754  # if no start of a region of equality has been found,
755  # set it to the first since in the merged list
756  # then set the previous hashes and equality status to the current
757  # and continue to the next iteration of the loop
758  first_since_in_equality_range = since
759  previous_hash1 = hash1
760  previous_hash2 = hash2
761  previous_equal = hash1 == hash2
762  continue
763 
764  # if previous_payload1 is also None, comparisons still matters
765  # eg, if hash1 = None and hash2 != None, they are different and so should be shown in the table
766  if hash1 == None:
767  hash1 = previous_hash1
768  if hash2 == None:
769  hash2 = previous_hash2
770 
771  if previous_equal:
772  # previous hashes were equal, but only say they were if we have found an end of the region of equality
773  if hash1 != hash2:
774  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : "=", tag.name : "="})
775  # this is the start of a new equality range - might only be one row if the next row has unequal hashes!
776  first_since_in_equality_range = since
777  else:
778  # if the payloads are not equal, the equality range has ended and we should print a row
779  # we only print if EITHER hash has changed
780  # if both hashes are equal to the previous row, skip to the next row to try to find the beginning
781  # of a region of equality
782  if not(hash1 == previous_hash1 and hash2 == previous_hash2):
783  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
784  first_since_in_equality_range = since
785 
786  previous_hash1 = hash1
787  previous_hash2 = hash2
788  previous_equal = hash1 == hash2
789 
790  final_list = data_sources.json_data_node.make(table)
791  return final_list
792 
793  def merge_into(self, tag, range_object):
794  """
795  Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag
796  into the IOVs of the other Tag.
797 
798  tag : CondDBFW Tag object that the IOVs from this Tag should be merged into.
799 
800  range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied
801  from the database this Tag belongs to.
802 
803  Script originally written by Joshua Dawes,
804  and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni.
805  """
806 
807  oracle_tag = self
808  merged_tag_name = oracle_tag.name + "_merged"
809 
810  #since_range = Range(6285191841738391552,6286157702573850624)
811  since_range = range_object
812 
813  #sqlite = shell.connect("sqlite://EcallaserTag_80X_2016_prompt_corr20160519_2.db")
814 
815  #sqlite_tag = sqlite.tag().all().data()[0]
816  sqlite_tag = tag
817  if sqlite_tag == None:
818  raise TypeError("Tag to be merged cannot be None.")
819 
820  sqlite_iovs = sqlite_tag.iovs().data()
821  sqlite_tag.iovs().as_table()
822 
823  new_tag = self.connection.models["tag"](sqlite_tag.as_dicts(convert_timestamps=False), convert_timestamps=False)
824  new_tag.name = merged_tag_name
825 
826  imported_iovs = oracle_tag.iovs(since=since_range).data()
827 
828  for i in range(0, len(imported_iovs)):
829  imported_iovs[i].source = "oracle"
830 
831  sqlite_iovs_sinces=[]
832  for i in range(0, len(sqlite_iovs)):
833  sqlite_iovs[i].source = "sqlite"
834  sqlite_iovs_sinces.append(sqlite_iovs[i].since)
835 
836 
837  print sqlite_iovs_sinces
838 
839  new_iovs_list = imported_iovs + sqlite_iovs
840  new_iovs_list = sorted(new_iovs_list, key=lambda iov : iov.since)
841 
842  for (n, iov) in enumerate(new_iovs_list):
843  # if iov is from oracle, change its hash
844  if iov.source == "oracle":
845  if new_iovs_list[n].since in sqlite_iovs_sinces:
846  # if its since is already defined in the target iovs
847  # ignore it
848  iov.source = "tobedeleted"
849  else:
850  # otherwise, iterate down from n to find the last sqlite iov,
851  # and assign that hash
852  for i in reversed(range(0,n)):
853  if new_iovs_list[i].source == "sqlite":
854  print("change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
855  iov.payload_hash = new_iovs_list[i].payload_hash
856  break
857 
858 
859  new_iov_list_copied = []
860 
861  for iov in new_iovs_list:
862  # only append IOVs that are not already defined in the target tag
863  if iov.source != "tobedeleted":
864  new_iov_list_copied.append(iov)
865 
866  new_iov_list_copied = sorted(new_iov_list_copied, key=lambda iov : iov.since)
867 
868  now = datetime.datetime.now()
869 
870  new_iovs = []
871  for iov in new_iov_list_copied:
872  new_iovs.append( self.connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False) )
873  for iov in new_iovs:
874  iov.insertion_time = now
875  iov.tag_name = merged_tag_name
876 
877  new_tag.iovs_list = new_iovs
878 
879  return new_tag
880  #sqlite.write_and_commit(new_iovs)
881 
882  classes = {"globaltag" : GlobalTag, "iov" : IOV, "globaltagmap" : GlobalTagMap,\
883  "payload" : Payload, "tag" : Tag, "Base" : Base}
884 
885  if class_name == None:
886  return classes
887  else:
888  return classes[class_name]
def _dicts_to_orm_objects(model, data)
def to_timestamp(obj)
Definition: utils.py:6
def get_start(self)
Definition: models.py:94
def session_independent_object(object, schema=None)
Definition: models.py:31
def Base(process)
S & print(S &os, JobReport::InputFile const &f)
Definition: JobReport.cc:65
def apply(self)
Definition: models.py:140
def get_regexp(self)
Definition: models.py:137
def generate(map_blobs=False, class_name=None)
Definition: models.py:187
def __init__(self)
Definition: models.py:91
def class_name_to_column(cls)
Definition: models.py:57
def __init__(self, regexp)
Definition: models.py:134
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def __init__(self, start, end)
Definition: models.py:120
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def session_independent(objects)
Definition: models.py:50
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:82
def apply_filters(orm_query, orm_class, filters)
Definition: models.py:181
def status_full_name(status)
Definition: models.py:71
def __init__(self, centre, radius)
Definition: models.py:104
def date_args_to_days(radius)
Definition: models.py:79
def get_end(self)
Definition: models.py:97
def apply_filter(orm_query, orm_class, attribute, value)
Definition: models.py:152