CMS 3D CMS Logo

models.py
Go to the documentation of this file.
1 """
2 
3 Using Audrius' models from flask browser.
4 
5 This file contains models that are used with SQLAlchemy.
6 
7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
8  so these will not work in a normal context outside the framework.
9 
10 """
11 from __future__ import print_function
12 import json
13 import datetime
14 
15 try:
16  import sqlalchemy
17  from sqlalchemy.orm import relationship, backref
18  from sqlalchemy.ext.declarative import declarative_base
19  # Note: Binary is only used for blobs, if they are mapped
20  from sqlalchemy import Column, String, Integer, DateTime, Binary, ForeignKey, BigInteger, and_
21 except ImportError:
22  print("You must be working inside a CMSSW environment. Try running 'cmsenv'.")
23  exit()
24 
25 import data_sources, data_formats
26 import urllib, urllib2, base64
27 from copy import deepcopy
28 
29 # get utility functions
30 from utils import to_timestamp, to_datetime, friendly_since
31 
32 def session_independent_object(object, schema=None):
33  # code original taken from write method in querying
34  # will result in a new object that isn't attached to any session
35  # hence, SQLAlchemy won't track changes
36 
37  if object.__class__.__name__.lower() == "payload":
38  map_blobs = object.blobs_mapped
39  else:
40  map_blobs = False
41  # need to change this to only generate the required class - can be slow...
42  # extract class name of object
43  cls = object.__class__
44  class_name = class_name_to_column(cls).lower()
45  new_class = generate(map_blobs=map_blobs, class_name=class_name)
46  new_class.__table__.schema = schema
47  new_object = new_class(object.as_dicts(), convert_timestamps=False)
48 
49  return new_object
50 
51 def session_independent(objects):
52  if isinstance(objects, list):
53  return map(session_independent_object, objects)
54  else:
55  # assume objects is a single object (not a list)
56  return session_independent_object(objects)
57 
59  class_name = cls.__name__
60  all_upper_case = True
61  for character in class_name:
62  all_upper_case = character.isupper()
63  if all_upper_case:
64  return class_name
65  for n in range(0, len(class_name)):
66  if class_name[n].isupper() and n != 0:
67  class_name = str(class_name[0:n]) + "".join(["_", class_name[n].lower()]) + str(class_name[n+1:])
68  elif class_name[n].isupper() and n == 0:
69  class_name = str(class_name[0:n]) + "".join([class_name[n].lower()]) + str(class_name[n+1:])
70  return class_name
71 
72 def status_full_name(status):
73  full_status = {
74  'P': 'Pending',
75  'R': 'Rejected',
76  'A': 'Accepted'
77  }
78  return full_status[status]
79 
80 def date_args_to_days(**radius):
81  days = radius.get("days")
82  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
83  days += radius.get("months")*28 if radius.get("months") != None else 0
84  days += radius.get("years")+365 if radius.get("years") != None else 0
85  return days
86 
88  """
89  Base class for Radius and Range - used for checking by apply_filter function
90  """
91 
92  def __init__(self):
93  pass
94 
95  def get_start(self):
96  return self._start
97 
98  def get_end(self):
99  return self._end
100 
102  """
103  Used to tell proxy methods that a range of values defined by a centre and a radius should be queried for - special case of filter clauses.
104  """
105  def __init__(self, centre, radius):
106  """
107  centre and radius should be objects that can be added and subtracted.
108  eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta
109 
110  Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type.
111  """
112  self._centre = centre
113  self._radius = radius
114  self._start = self._centre - self._radius
115  self._end = self._centre + self._radius
116 
118  """
119  Used to tell proxy methods that a range of values defined by a start and end point should be queried for - special case of filter clauses.
120  """
121  def __init__(self, start, end):
122  """
123  centre and radius should be objects that can be added and subtracted.
124  eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta
125 
126  Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type.
127  """
128  self._start = start
129  self._end = end
130 
131 class RegExp(object):
132  """
133  Used to tell proxy methods that a regular expression should be used to query the column.
134  """
135  def __init__(self, regexp):
136  self._regexp = regexp
137 
138  def get_regexp(self):
139  return self._regexp
140 
141  def apply(self):
142  # uses code from conddb tool
143  if self.database_type in ["oracle", "frontier"]:
144  return sqlalchemy.func.regexp_like(field, regexp)
145  elif self.database_type == "sqlite":
146  # Relies on being a SingletonThreadPool
147  self.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
148 
149  return sqlalchemy.func.regexp(field, regexp)
150  else:
151  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
152 
153 def apply_filter(orm_query, orm_class, attribute, value):
154  filter_attribute = getattr(orm_class, attribute)
155  if isinstance(value, list):
156  orm_query = orm_query.filter(filter_attribute.in_(value))
157  elif isinstance(value, data_sources.json_list):
158  orm_query = orm_query.filter(filter_attribute.in_(value.data()))
159  elif type(value) in [Range, Radius]:
160 
161  minus = value.get_start()
162  plus = value.get_end()
163  orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
164 
165  elif isinstance(value, RegExp):
166 
167  # Relies on being a SingletonThreadPool
168 
169  if value.database_type in ["oracle", "frontier"]:
170  regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
171  elif value.database_type == "sqlite":
172  value.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
173  regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
174  else:
175  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
176  orm_query = orm_query.filter(regexp)
177 
178  else:
179  orm_query = orm_query.filter(filter_attribute == value)
180  return orm_query
181 
182 def apply_filters(orm_query, orm_class, **filters):
183  for (key, value) in filters.items():
184  if not(key in ["amount"]):
185  orm_query = apply_filter(orm_query, orm_class, key, value)
186  return orm_query
187 
188 def generate(map_blobs=False, class_name=None):
189 
190  Base = declarative_base()
191 
192  class GlobalTag(Base):
193  __tablename__ = 'GLOBAL_TAG'
194 
195  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
196 
197  name = Column(String(100), unique=True, nullable=False, primary_key=True)
198  validity = Column(Integer, nullable=False)
199  description = Column(String(4000), nullable=False)
200  release = Column(String(100), nullable=False)
201  insertion_time = Column(DateTime, nullable=False)
202  snapshot_time = Column(DateTime, nullable=False)
203  scenario = Column(String(100))
204  workflow = Column(String(100))
205  type = Column(String(1))
206  tag_map = relationship('GlobalTagMap', backref='global_tag')
207 
208  def __init__(self, dictionary={}, convert_timestamps=True):
209  # assign each entry in a kwargs
210  for key in dictionary:
211  try:
212  if convert_timestamps:
213  self.__dict__[key] = to_timestamp(dictionary[key])
214  else:
215  self.__dict__[key] = dictionary[key]
216  except KeyError as k:
217  continue
218 
219  def __repr__(self):
220  return '<GlobalTag %r>' % self.name
221 
222  def as_dicts(self, convert_timestamps=False):
223  """
224  Returns dictionary form of Global Tag object.
225  """
226  json_gt = {
227  'name': self.name,
228  'validity': self.validity,
229  'description': self.description,
230  'release': self.release,
231  'insertion_time': to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
232  'snapshot_time': to_timestamp(self.snapshot_time) if convert_timestamps else self.snapshot_time,
233  'scenario': self.scenario,
234  'workflow': self.workflow,
235  'type': self.type
236  }
237  return json_gt
238 
239  def to_array(self):
240  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
241 
242  def all(self, **kwargs):
243  """
244  Returns `amount` Global Tags ordered by Global Tag name.
245  """
246  query = self.session.query(GlobalTag)
247  query = apply_filters(query, self.__class__, **kwargs)
248  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
249  query_result = query.order_by(GlobalTag.name).limit(amount).all()
250  gts = data_sources.json_data_node.make(query_result)
251  return gts
252 
253  def tags(self, **kwargs):
254  """
255  Returns `amount` *Global Tag Maps* belonging to this Global Tag.
256  """
257  kwargs["global_tag_name"] = self.name
258  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
259  all_tags = apply_filters(all_tags, GlobalTagMap, **kwargs)
260  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
261  all_tags = all_tags.order_by(GlobalTagMap.tag_name).limit(amount).all()
262  column_names = ["global_tag_name", "record", "label", "tag_name"]
263  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
264  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
265  return data_sources.json_data_node.make(all_tags)
266 
267  def iovs(self, **kwargs):
268  """
269  Returns `amount` IOVs belonging to all Tags held in this Global Tag.
270  For large Global Tags (which is most of them), VERY slow.
271  Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names,
272  and then get IOVs from each Tag name.
273 
274  At some point, this method may replace the method currently used.
275  """
276  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
277  # first get only the IOVs that belong to Tags that are contained by this Global Tag
278 
279  # get IOVs belonging to a Tag contained by this Global Tag
280  tag_names = self.tags().get_members("tag_name").data()
281  iovs_all_tags = self.session.query(IOV).filter(IOV.tag_name.in_(tag_names))
282  iovs_all_tags = apply_filters(iovs_all_tags, IOV, **kwargs)
283  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
284  iovs_all_tags = iovs_all_tags.limit(amount).subquery()
285 
286  # now, join Global Tag Map table onto IOVs
287  iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
288  iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
289  .filter(GlobalTagMap.global_tag_name == self.name)\
290  .join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
291 
292  iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).all()
293 
294  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
295  all_iovs = map(lambda row : dict(zip(column_names, row)), iovs_gt_tags)
296  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
297 
298  return data_sources.json_data_node.make(all_iovs)
299 
300  def __sub__(self, other):
301  """
302  Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference.
303  Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1))
304  """
305  return self.diff(other)
306 
307  def diff(self, gt):
308  """
309  Returns the json_list of differences in the form of tuples:
310 
311  (record, label, tag name of gt1 (self), tag name of gt2 (gt))
312  """
313 
314  record_label_to_tag_name1 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in self.tags().data()])
315  record_label_to_tag_name2 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in gt.tags().data()])
316 
317  record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
318 
319  table = []
320  tags_pairs_with_differences = []
321 
322  for record_label in record_label_pairs:
323  tag_name1 = record_label_to_tag_name1.get(record_label)
324  tag_name2 = record_label_to_tag_name2.get(record_label)
325 
326  if tag_name1 == None or tag_name2 == None or tag_name1 != tag_name2:
327  table.append({
328  "Record" : record_label[0],
329  "Label" : record_label[1],
330  ("%s Tag" % self.name) : tag_name1,
331  ("%s Tag" % gt.name) : tag_name2
332  })
333 
335 
336  class GlobalTagMap(Base):
337  __tablename__ = 'GLOBAL_TAG_MAP'
338 
339  headers = ["global_tag_name", "record", "label", "tag_name"]
340 
341  global_tag_name = Column(String(100), ForeignKey('GLOBAL_TAG.name'), primary_key=True, nullable=False)
342  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
343  label = Column(String(100), primary_key=True, nullable=False)
344  tag_name = Column(String(100), ForeignKey('TAG.name'), nullable=False)
345 
346  def __init__(self, dictionary={}, convert_timestamps=True):
347  # assign each entry in a kwargs
348  for key in dictionary:
349  try:
350  if convert_timestamps:
351  self.__dict__[key] = to_timestamp(dictionary[key])
352  else:
353  self.__dict__[key] = dictionary[key]
354  except KeyError as k:
355  continue
356 
357  def __repr__(self):
358  return '<GlobalTagMap %r>' % self.global_tag_name
359 
360  def as_dicts(self, convert_timestamps=False):
361  """
362  Returns dictionary form of this Global Tag Map.
363  """
364  json_gtm = {
365  "global_tag_name" : str(self.global_tag_name),
366  "record" : str(self.record),
367  "label" : str(self.label),
368  "tag_name" : str(self.tag_name)
369  }
370  return json_gtm
371 
372 
373  class GlobalTagMapRequest(Base):
374  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
375 
376  queue = Column(String(100), primary_key=True, nullable=False)
377  tag = Column(String(100), ForeignKey('TAG.name'), primary_key=True, nullable=False)
378  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
379  label = Column(String(100), primary_key=True, nullable=False)
380  status = Column(String(1), nullable=False)
381  description = Column(String(4000), nullable=False)
382  submitter_id = Column(Integer, nullable=False)
383  time_submitted = Column(DateTime, nullable=False)
384  last_edited = Column(DateTime, nullable=False)
385 
386  def __init__(self, dictionary={}, convert_timestamps=True):
387  # assign each entry in a kwargs
388  for key in dictionary:
389  try:
390  if convert_timestamps:
391  self.__dict__[key] = to_timestamp(dictionary[key])
392  else:
393  self.__dict__[key] = dictionary[key]
394  except KeyError as k:
395  continue
396 
397  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
398 
399  def as_dicts(self):
400  """
401  Returns dictionary form of this Global Tag Map Request.
402  """
403  return {
404  "queue" : self.queue,
405  "tag" : self.tag,
406  "record" : self.record,
407  "label" : self.label,
408  "status" : self.status,
409  "description" : self.description,
410  "submitter_id" : self.submitter_id,
411  "time_submitted" : self.time_submitted,
412  "last_edited" : self.last_edited
413  }
414 
415  def __repr__(self):
416  return '<GlobalTagMapRequest %r>' % self.queue
417 
418  def to_array(self):
419  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
420 
421  class IOV(Base):
422  __tablename__ = 'IOV'
423 
424  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
425 
426  tag_name = Column(String(4000), ForeignKey('TAG.name'), primary_key=True, nullable=False)
427  since = Column(Integer, primary_key=True, nullable=False)
428  payload_hash = Column(String(40), ForeignKey('PAYLOAD.hash'), nullable=False)
429  insertion_time = Column(DateTime, primary_key=True, nullable=False)
430 
431  def __init__(self, dictionary={}, convert_timestamps=True):
432  # assign each entry in a kwargs
433  for key in dictionary:
434  try:
435  if convert_timestamps:
436  self.__dict__[key] = to_timestamp(dictionary[key])
437  else:
438  self.__dict__[key] = dictionary[key]
439  except KeyError as k:
440  continue
441 
442  def as_dicts(self, convert_timestamps=False):
443  """
444  Returns dictionary form of this IOV.
445  """
446  return {
447  "tag_name" : self.tag_name,
448  "since" : self.since,
449  "payload_hash" : self.payload_hash,
450  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
451  }
452 
453  def __repr__(self):
454  return '<IOV %r>' % self.tag_name
455 
456  def to_array(self):
457  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
458 
459  def all(self, **kwargs):
460  """
461  Returns `amount` IOVs ordered by since.
462  """
463  query = self.session.query(IOV)
464  query = apply_filters(query, IOV, **kwargs)
465  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
466  query_result = query.order_by(IOV.tag_name).order_by(IOV.since).limit(amount).all()
467  return data_sources.json_data_node.make(query_result)
468 
469 
470  class Payload(Base):
471  __tablename__ = 'PAYLOAD'
472 
473  headers = ["hash", "object_type", "version", "insertion_time"]
474 
475  hash = Column(String(40), primary_key=True, nullable=False)
476  object_type = Column(String(4000), nullable=False)
477  version = Column(String(4000), nullable=False)
478  insertion_time = Column(DateTime, nullable=False)
479  if map_blobs:
480  data = Column(Binary, nullable=False)
481  streamer_info = Column(Binary, nullable=False)
482  blobs_mapped = map_blobs
483 
484  def __init__(self, dictionary={}, convert_timestamps=True):
485  # assign each entry in a kwargs
486  for key in dictionary:
487  try:
488  if convert_timestamps:
489  self.__dict__[key] = to_timestamp(dictionary[key])
490  else:
491  self.__dict__[key] = dictionary[key]
492  except KeyError as k:
493  continue
494 
495  if map_blobs:
496  def as_dicts(self, convert_timestamps=False):
497  """
498  Returns dictionary form of this Payload's metadata (not the actual Payload).
499  """
500  return {
501  "hash" : self.hash,
502  "object_type" : self.object_type,
503  "version" : self.version,
504  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
505  "data" : self.data,
506  "streamer_info" : self.streamer_info
507  }
508  else:
509  def as_dicts(self, convert_timestamps=False):
510  """
511  Returns dictionary form of this Payload's metadata (not the actual Payload).
512  """
513  return {
514  "hash" : self.hash,
515  "object_type" : self.object_type,
516  "version" : self.version,
517  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
518  }
519 
520  def __repr__(self):
521  return '<Payload %r>' % self.hash
522 
523  def to_array(self):
524  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
525 
526  def parent_tags(self, **kwargs):
527  """
528  Returns `amount` parent Tags ordered by Tag name.
529  """
530  # check if this payload is empty
531  if self.empty:
532  return None
533  else:
534  kwargs["payload_hash"] = self.hash
535  query = self.session.query(IOV.tag_name)
536  query = apply_filters(query, IOV, **kwargs)
537  query_result = query.all()
538  tag_names = map(lambda entry : entry[0], query_result)
539  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
540  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).limit(amount).all()
542 
543  def all(self, **kwargs):
544  """
545  Returns `amount` Payloads ordered by Payload hash.
546  """
547  query = self.session.query(Payload)
548  query = apply_filters(query, Payload, **kwargs)
549  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
550  query_result = query.order_by(Payload.hash).limit(amount).all()
551  return data_sources.json_data_node.make(query_result)
552 
553 
554  class Record(Base):
555  __tablename__ = 'RECORDS'
556 
557  headers = ["record", "object", "type"]
558 
559  record = Column(String(100), primary_key=True, nullable=False)
560  object = Column(String(200), nullable=False)
561  type = Column(String(20), nullable=False)
562 
563  def as_dicts(self):
564  """
565  Returns dictionary form of this Record.
566  """
567  return {
568  "record" : self.record,
569  "object" : self.object,
570  "type" : self.type
571  }
572 
573  def __repr__(self):
574  return '<Record %r>' % self.record
575 
576  def to_array(self):
577  return [self.record, self.object]
578 
579  def all(self, **kwargs):
580  """
581  Returns `amount` Records ordered by Record record.
582  """
583  query = self.session.query(Record)
584  query = apply_filters(query, Record, kwargs)
585  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
586  query_result = query.order_by(Record.record).limit(amount).all()
587  return data_sources.json_data_node.make(query_result)
588 
589 
590  class Tag(Base):
591  __tablename__ = 'TAG'
592 
593  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
594  "description", "last_validated_time", "insertion_time", "modification_time"]
595 
596  name = Column(String(4000), primary_key=True, nullable=False)
597  time_type = Column(String(4000), nullable=False)
598  object_type = Column(String(4000), nullable=False)
599  synchronization = Column(String(4000), nullable=False)
600  end_of_validity = Column(Integer, nullable=False)
601  description = Column(String(4000), nullable=False)
602  last_validated_time = Column(BigInteger, nullable=False)
603  insertion_time = Column(DateTime, nullable=False)
604  modification_time = Column(DateTime, nullable=False)
605 
606  record = None
607  label = None
608 
609  iovs_list = relationship('IOV', backref='tag')
610 
611  def __init__(self, dictionary={}, convert_timestamps=True):
612  # assign each entry in a kwargs
613  for key in dictionary:
614  try:
615  if convert_timestamps:
616  self.__dict__[key] = to_timestamp(dictionary[key])
617  else:
618  self.__dict__[key] = dictionary[key]
619  except KeyError as k:
620  continue
621 
622  def as_dicts(self, convert_timestamps=False):
623  """
624  Returns dictionary form of this Tag.
625  """
626  return {
627  "name" : self.name,
628  "time_type" : self.time_type,
629  "object_type" : self.object_type,
630  "synchronization" : self.synchronization,
631  "end_of_validity" : self.end_of_validity,
632  "description" : self.description,
633  "last_validated_time" : self.last_validated_time,
634  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
635  "modification_time" : to_timestamp(self.modification_time) if convert_timestamps else self.modification_time,
636  "record" : self.record,
637  "label" : self.label
638  }
639 
640  def __repr__(self):
641  return '<Tag %r>' % self.name
642 
643  def to_array(self):
644  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
645 
646  def parent_global_tags(self, **kwargs):
647  """
648  Returns `amount` Global Tags that contain this Tag.
649  """
650  if self.empty:
651  return None
652  else:
653  kwargs["tag_name"] = self.name
654  query = self.session.query(GlobalTagMap.global_tag_name)
655  query = apply_filters(query, GlobalTagMap, **kwargs)
656  query_result = query.all()
657  if len(query_result) != 0:
658  global_tag_names = map(lambda entry : entry[0], query_result)
659  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
660  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).limit(amount).all()
661  else:
662  global_tags = None
663  return data_sources.json_data_node.make(global_tags)
664 
665  def all(self, **kwargs):
666  """
667  Returns `amount` Tags ordered by Tag name.
668  """
669  query = self.session.query(Tag)
670  query = apply_filters(query, Tag, **kwargs)
671  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
672  query_result = query.order_by(Tag.name).limit(amount).all()
673  return data_sources.json_data_node.make(query_result)
674 
675  def iovs(self, **kwargs):
676  """
677  Returns `amount` IOVs that belong to this Tag ordered by IOV since.
678  """
679  # filter_params contains a list of columns to filter the iovs by
680  iovs_query = self.session.query(IOV).filter(IOV.tag_name == self.name)
681  iovs_query = apply_filters(iovs_query, IOV, **kwargs)
682  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
683  iovs = iovs_query.order_by(IOV.since).limit(amount).all()
685 
686  def latest_iov(self):
687  """
688  Returns the single highest since held by this Tag.
689  Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since.
690  """
691  iov = self.session.query(IOV).filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).first()
692  return iov
693 
694  def __sub__(self, other):
695  """
696  Allows the arithmetic operator "-" to be applied to find the difference between two tags.
697  Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1.
698  """
699  return self.diff(other)
700 
701  def diff(self, tag, short=False):
702  """
703  Returns the `diff` of the first Tag, and the Tag given.
704  Summary of algorithm:
705 
706  Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash).
707  Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list.
708  Note: depending on where each Tag's IOVs start, 1 or both of these values can be None.
709  Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags.
710  For each triple (since, hash1, hash2),
711 
712  If the first_since_in_equality_range = None,
713  We are at the first since in the merged list, so set first_since... = since
714  Note: this is so set the previous... values for the second row, since the first row will never result in a print because
715  a row is only printed when past iovs have been processed.
716 
717  If either hash1 or hash2 is None, set it to the previous hash found
718  Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this.
719 
720  If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since)
721  Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since
722  because we have no looked forward, but we do know about the previous hashes.
723 
724  If hash1 != hash2,
725  The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags
726  Hence, print that for this range we have equal hashes denoted by "=" in each hash column.
727 
728  Else:
729 
730  The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical...
731  If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2),
732  then we have not found the end of a region of equality!
733  If the hashes have changed, print a row.
734 
735  """
736  if tag.__class__.__name__ != "Tag":
737  raise TypeError("Tag given must be a CondDBFW Tag object.")
738 
739  # get lists of iovs
740  iovs1 = dict(map(lambda iov : (iov.since, iov.payload_hash), self.iovs().data()))
741  iovs2 = dict(map(lambda iov : (iov.since, iov.payload_hash), tag.iovs().data()))
742 
743  iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
744  iovs.append(("Infinity", 1, 2))
745  table = []
746 
747  previous_hash1 = None
748  previous_hash2 = None
749  first_since_in_equality_range = None
750  previous_equal = False
751 
752  for since, hash1, hash2 in iovs:
753 
754  if first_since_in_equality_range == None:
755  # if no start of a region of equality has been found,
756  # set it to the first since in the merged list
757  # then set the previous hashes and equality status to the current
758  # and continue to the next iteration of the loop
759  first_since_in_equality_range = since
760  previous_hash1 = hash1
761  previous_hash2 = hash2
762  previous_equal = hash1 == hash2
763  continue
764 
765  # if previous_payload1 is also None, comparisons still matters
766  # eg, if hash1 = None and hash2 != None, they are different and so should be shown in the table
767  if hash1 == None:
768  hash1 = previous_hash1
769  if hash2 == None:
770  hash2 = previous_hash2
771 
772  if previous_equal:
773  # previous hashes were equal, but only say they were if we have found an end of the region of equality
774  if hash1 != hash2:
775  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : "=", tag.name : "="})
776  # this is the start of a new equality range - might only be one row if the next row has unequal hashes!
777  first_since_in_equality_range = since
778  else:
779  # if the payloads are not equal, the equality range has ended and we should print a row
780  # we only print if EITHER hash has changed
781  # if both hashes are equal to the previous row, skip to the next row to try to find the beginning
782  # of a region of equality
783  if not(hash1 == previous_hash1 and hash2 == previous_hash2):
784  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
785  first_since_in_equality_range = since
786 
787  previous_hash1 = hash1
788  previous_hash2 = hash2
789  previous_equal = hash1 == hash2
790 
791  final_list = data_sources.json_data_node.make(table)
792  return final_list
793 
794  def merge_into(self, tag, range_object):
795  """
796  Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag
797  into the IOVs of the other Tag.
798 
799  tag : CondDBFW Tag object that the IOVs from this Tag should be merged into.
800 
801  range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied
802  from the database this Tag belongs to.
803 
804  Script originally written by Joshua Dawes,
805  and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni.
806  """
807 
808  oracle_tag = self
809  merged_tag_name = oracle_tag.name + "_merged"
810 
811  #since_range = Range(6285191841738391552,6286157702573850624)
812  since_range = range_object
813 
814  #sqlite = shell.connect("sqlite://EcallaserTag_80X_2016_prompt_corr20160519_2.db")
815 
816  #sqlite_tag = sqlite.tag().all().data()[0]
817  sqlite_tag = tag
818  if sqlite_tag == None:
819  raise TypeError("Tag to be merged cannot be None.")
820 
821  sqlite_iovs = sqlite_tag.iovs().data()
822  sqlite_tag.iovs().as_table()
823 
824  new_tag = self.connection.models["tag"](sqlite_tag.as_dicts(convert_timestamps=False), convert_timestamps=False)
825  new_tag.name = merged_tag_name
826 
827  imported_iovs = oracle_tag.iovs(since=since_range).data()
828 
829  for i in range(0, len(imported_iovs)):
830  imported_iovs[i].source = "oracle"
831 
832  sqlite_iovs_sinces=[]
833  for i in range(0, len(sqlite_iovs)):
834  sqlite_iovs[i].source = "sqlite"
835  sqlite_iovs_sinces.append(sqlite_iovs[i].since)
836 
837 
838  print(sqlite_iovs_sinces)
839 
840  new_iovs_list = imported_iovs + sqlite_iovs
841  new_iovs_list = sorted(new_iovs_list, key=lambda iov : iov.since)
842 
843  for (n, iov) in enumerate(new_iovs_list):
844  # if iov is from oracle, change its hash
845  if iov.source == "oracle":
846  if new_iovs_list[n].since in sqlite_iovs_sinces:
847  # if its since is already defined in the target iovs
848  # ignore it
849  iov.source = "tobedeleted"
850  else:
851  # otherwise, iterate down from n to find the last sqlite iov,
852  # and assign that hash
853  for i in reversed(range(0,n)):
854  if new_iovs_list[i].source == "sqlite":
855  print("change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
856  iov.payload_hash = new_iovs_list[i].payload_hash
857  break
858 
859 
860  new_iov_list_copied = []
861 
862  for iov in new_iovs_list:
863  # only append IOVs that are not already defined in the target tag
864  if iov.source != "tobedeleted":
865  new_iov_list_copied.append(iov)
866 
867  new_iov_list_copied = sorted(new_iov_list_copied, key=lambda iov : iov.since)
868 
869  now = datetime.datetime.now()
870 
871  new_iovs = []
872  for iov in new_iov_list_copied:
873  new_iovs.append( self.connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False) )
874  for iov in new_iovs:
875  iov.insertion_time = now
876  iov.tag_name = merged_tag_name
877 
878  new_tag.iovs_list = new_iovs
879 
880  return new_tag
881  #sqlite.write_and_commit(new_iovs)
882 
883  classes = {"globaltag" : GlobalTag, "iov" : IOV, "globaltagmap" : GlobalTagMap,\
884  "payload" : Payload, "tag" : Tag, "Base" : Base}
885 
886  if class_name == None:
887  return classes
888  else:
889  return classes[class_name]
def _dicts_to_orm_objects(model, data)
def to_timestamp(obj)
Definition: utils.py:6
def get_start(self)
Definition: models.py:95
def session_independent_object(object, schema=None)
Definition: models.py:32
def Base(process)
S & print(S &os, JobReport::InputFile const &f)
Definition: JobReport.cc:65
def apply(self)
Definition: models.py:141
def get_regexp(self)
Definition: models.py:138
def generate(map_blobs=False, class_name=None)
Definition: models.py:188
def __init__(self)
Definition: models.py:92
def class_name_to_column(cls)
Definition: models.py:58
def __init__(self, regexp)
Definition: models.py:135
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def __init__(self, start, end)
Definition: models.py:121
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def session_independent(objects)
Definition: models.py:51
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:82
def apply_filters(orm_query, orm_class, filters)
Definition: models.py:182
def status_full_name(status)
Definition: models.py:72
def __init__(self, centre, radius)
Definition: models.py:105
#define str(s)
def date_args_to_days(radius)
Definition: models.py:80
def get_end(self)
Definition: models.py:98
def apply_filter(orm_query, orm_class, attribute, value)
Definition: models.py:153