CMS 3D CMS Logo

models.py
Go to the documentation of this file.
1 """
2 
3 Using Audrius' models from flask browser.
4 
5 This file contains models that are used with SQLAlchemy.
6 
7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
8  so these will not work in a normal context outside the framework.
9 
10 """
11 from __future__ import print_function
12 from __future__ import absolute_import
13 import json
14 import datetime
15 
16 try:
17  import sqlalchemy
18  from sqlalchemy.orm import relationship, backref
19  from sqlalchemy.ext.declarative import declarative_base
20  # Note: Binary is only used for blobs, if they are mapped
21  from sqlalchemy import Column, String, Integer, DateTime, Binary, ForeignKey, BigInteger, and_
22 except ImportError:
23  print("You must be working inside a CMSSW environment. Try running 'cmsenv'.")
24  exit()
25 
26 from . import data_sources, data_formats
27 import urllib, urllib2, base64
28 from copy import deepcopy
29 
30 # get utility functions
31 from .utils import to_timestamp, to_datetime, friendly_since
32 
33 def session_independent_object(object, schema=None):
34  # code original taken from write method in querying
35  # will result in a new object that isn't attached to any session
36  # hence, SQLAlchemy won't track changes
37 
38  if object.__class__.__name__.lower() == "payload":
39  map_blobs = object.blobs_mapped
40  else:
41  map_blobs = False
42  # need to change this to only generate the required class - can be slow...
43  # extract class name of object
44  cls = object.__class__
45  class_name = class_name_to_column(cls).lower()
46  new_class = generate(map_blobs=map_blobs, class_name=class_name)
47  new_class.__table__.schema = schema
48  new_object = new_class(object.as_dicts(), convert_timestamps=False)
49 
50  return new_object
51 
52 def session_independent(objects):
53  if isinstance(objects, list):
54  return map(session_independent_object, objects)
55  else:
56  # assume objects is a single object (not a list)
57  return session_independent_object(objects)
58 
60  class_name = cls.__name__
61  all_upper_case = True
62  for character in class_name:
63  all_upper_case = character.isupper()
64  if all_upper_case:
65  return class_name
66  for n in range(0, len(class_name)):
67  if class_name[n].isupper() and n != 0:
68  class_name = str(class_name[0:n]) + "".join(["_", class_name[n].lower()]) + str(class_name[n+1:])
69  elif class_name[n].isupper() and n == 0:
70  class_name = str(class_name[0:n]) + "".join([class_name[n].lower()]) + str(class_name[n+1:])
71  return class_name
72 
73 def status_full_name(status):
74  full_status = {
75  'P': 'Pending',
76  'R': 'Rejected',
77  'A': 'Accepted'
78  }
79  return full_status[status]
80 
81 def date_args_to_days(**radius):
82  days = radius.get("days")
83  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
84  days += radius.get("months")*28 if radius.get("months") != None else 0
85  days += radius.get("years")+365 if radius.get("years") != None else 0
86  return days
87 
89  """
90  Base class for Radius and Range - used for checking by apply_filter function
91  """
92 
93  def __init__(self):
94  pass
95 
96  def get_start(self):
97  return self._start
98 
99  def get_end(self):
100  return self._end
101 
103  """
104  Used to tell proxy methods that a range of values defined by a centre and a radius should be queried for - special case of filter clauses.
105  """
106  def __init__(self, centre, radius):
107  """
108  centre and radius should be objects that can be added and subtracted.
109  eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta
110 
111  Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type.
112  """
113  self._centre = centre
114  self._radius = radius
115  self._start = self._centre - self._radius
116  self._end = self._centre + self._radius
117 
119  """
120  Used to tell proxy methods that a range of values defined by a start and end point should be queried for - special case of filter clauses.
121  """
122  def __init__(self, start, end):
123  """
124  centre and radius should be objects that can be added and subtracted.
125  eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta
126 
127  Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type.
128  """
129  self._start = start
130  self._end = end
131 
132 class RegExp(object):
133  """
134  Used to tell proxy methods that a regular expression should be used to query the column.
135  """
136  def __init__(self, regexp):
137  self._regexp = regexp
138 
139  def get_regexp(self):
140  return self._regexp
141 
142  def apply(self):
143  # uses code from conddb tool
144  if self.database_type in ["oracle", "frontier"]:
145  return sqlalchemy.func.regexp_like(field, regexp)
146  elif self.database_type == "sqlite":
147  # Relies on being a SingletonThreadPool
148  self.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
149 
150  return sqlalchemy.func.regexp(field, regexp)
151  else:
152  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
153 
154 def apply_filter(orm_query, orm_class, attribute, value):
155  filter_attribute = getattr(orm_class, attribute)
156  if isinstance(value, list):
157  orm_query = orm_query.filter(filter_attribute.in_(value))
158  elif isinstance(value, data_sources.json_list):
159  orm_query = orm_query.filter(filter_attribute.in_(value.data()))
160  elif type(value) in [Range, Radius]:
161 
162  minus = value.get_start()
163  plus = value.get_end()
164  orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
165 
166  elif isinstance(value, RegExp):
167 
168  # Relies on being a SingletonThreadPool
169 
170  if value.database_type in ["oracle", "frontier"]:
171  regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
172  elif value.database_type == "sqlite":
173  value.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
174  regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
175  else:
176  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
177  orm_query = orm_query.filter(regexp)
178 
179  else:
180  orm_query = orm_query.filter(filter_attribute == value)
181  return orm_query
182 
183 def apply_filters(orm_query, orm_class, **filters):
184  for (key, value) in filters.items():
185  if not(key in ["amount"]):
186  orm_query = apply_filter(orm_query, orm_class, key, value)
187  return orm_query
188 
189 def generate(map_blobs=False, class_name=None):
190 
191  Base = declarative_base()
192 
193  class GlobalTag(Base):
194  __tablename__ = 'GLOBAL_TAG'
195 
196  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
197 
198  name = Column(String(100), unique=True, nullable=False, primary_key=True)
199  validity = Column(Integer, nullable=False)
200  description = Column(String(4000), nullable=False)
201  release = Column(String(100), nullable=False)
202  insertion_time = Column(DateTime, nullable=False)
203  snapshot_time = Column(DateTime, nullable=False)
204  scenario = Column(String(100))
205  workflow = Column(String(100))
206  type = Column(String(1))
207  tag_map = relationship('GlobalTagMap', backref='global_tag')
208 
209  def __init__(self, dictionary={}, convert_timestamps=True):
210  # assign each entry in a kwargs
211  for key in dictionary:
212  try:
213  if convert_timestamps:
214  self.__dict__[key] = to_timestamp(dictionary[key])
215  else:
216  self.__dict__[key] = dictionary[key]
217  except KeyError as k:
218  continue
219 
220  def __repr__(self):
221  return '<GlobalTag %r>' % self.name
222 
223  def as_dicts(self, convert_timestamps=False):
224  """
225  Returns dictionary form of Global Tag object.
226  """
227  json_gt = {
228  'name': self.name,
229  'validity': self.validity,
230  'description': self.description,
231  'release': self.release,
232  'insertion_time': to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
233  'snapshot_time': to_timestamp(self.snapshot_time) if convert_timestamps else self.snapshot_time,
234  'scenario': self.scenario,
235  'workflow': self.workflow,
236  'type': self.type
237  }
238  return json_gt
239 
240  def to_array(self):
241  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
242 
243  def all(self, **kwargs):
244  """
245  Returns `amount` Global Tags ordered by Global Tag name.
246  """
247  query = self.session.query(GlobalTag)
248  query = apply_filters(query, self.__class__, **kwargs)
249  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
250  query_result = query.order_by(GlobalTag.name).limit(amount).all()
251  gts = data_sources.json_data_node.make(query_result)
252  return gts
253 
254  def tags(self, **kwargs):
255  """
256  Returns `amount` *Global Tag Maps* belonging to this Global Tag.
257  """
258  kwargs["global_tag_name"] = self.name
259  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
260  all_tags = apply_filters(all_tags, GlobalTagMap, **kwargs)
261  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
262  all_tags = all_tags.order_by(GlobalTagMap.tag_name).limit(amount).all()
263  column_names = ["global_tag_name", "record", "label", "tag_name"]
264  all_tags = map(lambda row : dict(zip(column_names, map(to_timestamp, row))), all_tags)
265  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
266  return data_sources.json_data_node.make(all_tags)
267 
268  def iovs(self, **kwargs):
269  """
270  Returns `amount` IOVs belonging to all Tags held in this Global Tag.
271  For large Global Tags (which is most of them), VERY slow.
272  Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names,
273  and then get IOVs from each Tag name.
274 
275  At some point, this method may replace the method currently used.
276  """
277  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
278  # first get only the IOVs that belong to Tags that are contained by this Global Tag
279 
280  # get IOVs belonging to a Tag contained by this Global Tag
281  tag_names = self.tags().get_members("tag_name").data()
282  iovs_all_tags = self.session.query(IOV).filter(IOV.tag_name.in_(tag_names))
283  iovs_all_tags = apply_filters(iovs_all_tags, IOV, **kwargs)
284  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
285  iovs_all_tags = iovs_all_tags.limit(amount).subquery()
286 
287  # now, join Global Tag Map table onto IOVs
288  iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
289  iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
290  .filter(GlobalTagMap.global_tag_name == self.name)\
291  .join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
292 
293  iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).all()
294 
295  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
296  all_iovs = map(lambda row : dict(zip(column_names, row)), iovs_gt_tags)
297  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
298 
299  return data_sources.json_data_node.make(all_iovs)
300 
301  def __sub__(self, other):
302  """
303  Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference.
304  Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1))
305  """
306  return self.diff(other)
307 
308  def diff(self, gt):
309  """
310  Returns the json_list of differences in the form of tuples:
311 
312  (record, label, tag name of gt1 (self), tag name of gt2 (gt))
313  """
314 
315  record_label_to_tag_name1 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in self.tags().data()])
316  record_label_to_tag_name2 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in gt.tags().data()])
317 
318  record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
319 
320  table = []
321  tags_pairs_with_differences = []
322 
323  for record_label in record_label_pairs:
324  tag_name1 = record_label_to_tag_name1.get(record_label)
325  tag_name2 = record_label_to_tag_name2.get(record_label)
326 
327  if tag_name1 == None or tag_name2 == None or tag_name1 != tag_name2:
328  table.append({
329  "Record" : record_label[0],
330  "Label" : record_label[1],
331  ("%s Tag" % self.name) : tag_name1,
332  ("%s Tag" % gt.name) : tag_name2
333  })
334 
336 
337  class GlobalTagMap(Base):
338  __tablename__ = 'GLOBAL_TAG_MAP'
339 
340  headers = ["global_tag_name", "record", "label", "tag_name"]
341 
342  global_tag_name = Column(String(100), ForeignKey('GLOBAL_TAG.name'), primary_key=True, nullable=False)
343  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
344  label = Column(String(100), primary_key=True, nullable=False)
345  tag_name = Column(String(100), ForeignKey('TAG.name'), nullable=False)
346 
347  def __init__(self, dictionary={}, convert_timestamps=True):
348  # assign each entry in a kwargs
349  for key in dictionary:
350  try:
351  if convert_timestamps:
352  self.__dict__[key] = to_timestamp(dictionary[key])
353  else:
354  self.__dict__[key] = dictionary[key]
355  except KeyError as k:
356  continue
357 
358  def __repr__(self):
359  return '<GlobalTagMap %r>' % self.global_tag_name
360 
361  def as_dicts(self, convert_timestamps=False):
362  """
363  Returns dictionary form of this Global Tag Map.
364  """
365  json_gtm = {
366  "global_tag_name" : str(self.global_tag_name),
367  "record" : str(self.record),
368  "label" : str(self.label),
369  "tag_name" : str(self.tag_name)
370  }
371  return json_gtm
372 
373 
374  class GlobalTagMapRequest(Base):
375  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
376 
377  queue = Column(String(100), primary_key=True, nullable=False)
378  tag = Column(String(100), ForeignKey('TAG.name'), primary_key=True, nullable=False)
379  record = Column(String(100), ForeignKey('RECORDS.record'), primary_key=True, nullable=False)
380  label = Column(String(100), primary_key=True, nullable=False)
381  status = Column(String(1), nullable=False)
382  description = Column(String(4000), nullable=False)
383  submitter_id = Column(Integer, nullable=False)
384  time_submitted = Column(DateTime, nullable=False)
385  last_edited = Column(DateTime, nullable=False)
386 
387  def __init__(self, dictionary={}, convert_timestamps=True):
388  # assign each entry in a kwargs
389  for key in dictionary:
390  try:
391  if convert_timestamps:
392  self.__dict__[key] = to_timestamp(dictionary[key])
393  else:
394  self.__dict__[key] = dictionary[key]
395  except KeyError as k:
396  continue
397 
398  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
399 
400  def as_dicts(self):
401  """
402  Returns dictionary form of this Global Tag Map Request.
403  """
404  return {
405  "queue" : self.queue,
406  "tag" : self.tag,
407  "record" : self.record,
408  "label" : self.label,
409  "status" : self.status,
410  "description" : self.description,
411  "submitter_id" : self.submitter_id,
412  "time_submitted" : self.time_submitted,
413  "last_edited" : self.last_edited
414  }
415 
416  def __repr__(self):
417  return '<GlobalTagMapRequest %r>' % self.queue
418 
419  def to_array(self):
420  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
421 
422  class IOV(Base):
423  __tablename__ = 'IOV'
424 
425  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
426 
427  tag_name = Column(String(4000), ForeignKey('TAG.name'), primary_key=True, nullable=False)
428  since = Column(Integer, primary_key=True, nullable=False)
429  payload_hash = Column(String(40), ForeignKey('PAYLOAD.hash'), nullable=False)
430  insertion_time = Column(DateTime, primary_key=True, nullable=False)
431 
432  def __init__(self, dictionary={}, convert_timestamps=True):
433  # assign each entry in a kwargs
434  for key in dictionary:
435  try:
436  if convert_timestamps:
437  self.__dict__[key] = to_timestamp(dictionary[key])
438  else:
439  self.__dict__[key] = dictionary[key]
440  except KeyError as k:
441  continue
442 
443  def as_dicts(self, convert_timestamps=False):
444  """
445  Returns dictionary form of this IOV.
446  """
447  return {
448  "tag_name" : self.tag_name,
449  "since" : self.since,
450  "payload_hash" : self.payload_hash,
451  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
452  }
453 
454  def __repr__(self):
455  return '<IOV %r>' % self.tag_name
456 
457  def to_array(self):
458  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
459 
460  def all(self, **kwargs):
461  """
462  Returns `amount` IOVs ordered by since.
463  """
464  query = self.session.query(IOV)
465  query = apply_filters(query, IOV, **kwargs)
466  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
467  query_result = query.order_by(IOV.tag_name).order_by(IOV.since).limit(amount).all()
468  return data_sources.json_data_node.make(query_result)
469 
470 
471  class Payload(Base):
472  __tablename__ = 'PAYLOAD'
473 
474  headers = ["hash", "object_type", "version", "insertion_time"]
475 
476  hash = Column(String(40), primary_key=True, nullable=False)
477  object_type = Column(String(4000), nullable=False)
478  version = Column(String(4000), nullable=False)
479  insertion_time = Column(DateTime, nullable=False)
480  if map_blobs:
481  data = Column(Binary, nullable=False)
482  streamer_info = Column(Binary, nullable=False)
483  blobs_mapped = map_blobs
484 
485  def __init__(self, dictionary={}, convert_timestamps=True):
486  # assign each entry in a kwargs
487  for key in dictionary:
488  try:
489  if convert_timestamps:
490  self.__dict__[key] = to_timestamp(dictionary[key])
491  else:
492  self.__dict__[key] = dictionary[key]
493  except KeyError as k:
494  continue
495 
496  if map_blobs:
497  def as_dicts(self, convert_timestamps=False):
498  """
499  Returns dictionary form of this Payload's metadata (not the actual Payload).
500  """
501  return {
502  "hash" : self.hash,
503  "object_type" : self.object_type,
504  "version" : self.version,
505  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
506  "data" : self.data,
507  "streamer_info" : self.streamer_info
508  }
509  else:
510  def as_dicts(self, convert_timestamps=False):
511  """
512  Returns dictionary form of this Payload's metadata (not the actual Payload).
513  """
514  return {
515  "hash" : self.hash,
516  "object_type" : self.object_type,
517  "version" : self.version,
518  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
519  }
520 
521  def __repr__(self):
522  return '<Payload %r>' % self.hash
523 
524  def to_array(self):
525  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
526 
527  def parent_tags(self, **kwargs):
528  """
529  Returns `amount` parent Tags ordered by Tag name.
530  """
531  # check if this payload is empty
532  if self.empty:
533  return None
534  else:
535  kwargs["payload_hash"] = self.hash
536  query = self.session.query(IOV.tag_name)
537  query = apply_filters(query, IOV, **kwargs)
538  query_result = query.all()
539  tag_names = map(lambda entry : entry[0], query_result)
540  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
541  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).limit(amount).all()
543 
544  def all(self, **kwargs):
545  """
546  Returns `amount` Payloads ordered by Payload hash.
547  """
548  query = self.session.query(Payload)
549  query = apply_filters(query, Payload, **kwargs)
550  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
551  query_result = query.order_by(Payload.hash).limit(amount).all()
552  return data_sources.json_data_node.make(query_result)
553 
554 
555  class Record(Base):
556  __tablename__ = 'RECORDS'
557 
558  headers = ["record", "object", "type"]
559 
560  record = Column(String(100), primary_key=True, nullable=False)
561  object = Column(String(200), nullable=False)
562  type = Column(String(20), nullable=False)
563 
564  def as_dicts(self):
565  """
566  Returns dictionary form of this Record.
567  """
568  return {
569  "record" : self.record,
570  "object" : self.object,
571  "type" : self.type
572  }
573 
574  def __repr__(self):
575  return '<Record %r>' % self.record
576 
577  def to_array(self):
578  return [self.record, self.object]
579 
580  def all(self, **kwargs):
581  """
582  Returns `amount` Records ordered by Record record.
583  """
584  query = self.session.query(Record)
585  query = apply_filters(query, Record, kwargs)
586  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
587  query_result = query.order_by(Record.record).limit(amount).all()
588  return data_sources.json_data_node.make(query_result)
589 
590 
591  class Tag(Base):
592  __tablename__ = 'TAG'
593 
594  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
595  "description", "last_validated_time", "insertion_time", "modification_time"]
596 
597  name = Column(String(4000), primary_key=True, nullable=False)
598  time_type = Column(String(4000), nullable=False)
599  object_type = Column(String(4000), nullable=False)
600  synchronization = Column(String(4000), nullable=False)
601  end_of_validity = Column(Integer, nullable=False)
602  description = Column(String(4000), nullable=False)
603  last_validated_time = Column(BigInteger, nullable=False)
604  insertion_time = Column(DateTime, nullable=False)
605  modification_time = Column(DateTime, nullable=False)
606 
607  record = None
608  label = None
609 
610  iovs_list = relationship('IOV', backref='tag')
611 
612  def __init__(self, dictionary={}, convert_timestamps=True):
613  # assign each entry in a kwargs
614  for key in dictionary:
615  try:
616  if convert_timestamps:
617  self.__dict__[key] = to_timestamp(dictionary[key])
618  else:
619  self.__dict__[key] = dictionary[key]
620  except KeyError as k:
621  continue
622 
623  def as_dicts(self, convert_timestamps=False):
624  """
625  Returns dictionary form of this Tag.
626  """
627  return {
628  "name" : self.name,
629  "time_type" : self.time_type,
630  "object_type" : self.object_type,
631  "synchronization" : self.synchronization,
632  "end_of_validity" : self.end_of_validity,
633  "description" : self.description,
634  "last_validated_time" : self.last_validated_time,
635  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
636  "modification_time" : to_timestamp(self.modification_time) if convert_timestamps else self.modification_time,
637  "record" : self.record,
638  "label" : self.label
639  }
640 
641  def __repr__(self):
642  return '<Tag %r>' % self.name
643 
644  def to_array(self):
645  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
646 
647  def parent_global_tags(self, **kwargs):
648  """
649  Returns `amount` Global Tags that contain this Tag.
650  """
651  if self.empty:
652  return None
653  else:
654  kwargs["tag_name"] = self.name
655  query = self.session.query(GlobalTagMap.global_tag_name)
656  query = apply_filters(query, GlobalTagMap, **kwargs)
657  query_result = query.all()
658  if len(query_result) != 0:
659  global_tag_names = map(lambda entry : entry[0], query_result)
660  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
661  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).limit(amount).all()
662  else:
663  global_tags = None
664  return data_sources.json_data_node.make(global_tags)
665 
666  def all(self, **kwargs):
667  """
668  Returns `amount` Tags ordered by Tag name.
669  """
670  query = self.session.query(Tag)
671  query = apply_filters(query, Tag, **kwargs)
672  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
673  query_result = query.order_by(Tag.name).limit(amount).all()
674  return data_sources.json_data_node.make(query_result)
675 
676  def iovs(self, **kwargs):
677  """
678  Returns `amount` IOVs that belong to this Tag ordered by IOV since.
679  """
680  # filter_params contains a list of columns to filter the iovs by
681  iovs_query = self.session.query(IOV).filter(IOV.tag_name == self.name)
682  iovs_query = apply_filters(iovs_query, IOV, **kwargs)
683  amount = kwargs["amount"] if "amount" in kwargs.keys() else None
684  iovs = iovs_query.order_by(IOV.since).limit(amount).all()
686 
687  def latest_iov(self):
688  """
689  Returns the single highest since held by this Tag.
690  Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since.
691  """
692  iov = self.session.query(IOV).filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).first()
693  return iov
694 
695  def __sub__(self, other):
696  """
697  Allows the arithmetic operator "-" to be applied to find the difference between two tags.
698  Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1.
699  """
700  return self.diff(other)
701 
702  def diff(self, tag, short=False):
703  """
704  Returns the `diff` of the first Tag, and the Tag given.
705  Summary of algorithm:
706 
707  Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash).
708  Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list.
709  Note: depending on where each Tag's IOVs start, 1 or both of these values can be None.
710  Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags.
711  For each triple (since, hash1, hash2),
712 
713  If the first_since_in_equality_range = None,
714  We are at the first since in the merged list, so set first_since... = since
715  Note: this is so set the previous... values for the second row, since the first row will never result in a print because
716  a row is only printed when past iovs have been processed.
717 
718  If either hash1 or hash2 is None, set it to the previous hash found
719  Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this.
720 
721  If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since)
722  Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since
723  because we have no looked forward, but we do know about the previous hashes.
724 
725  If hash1 != hash2,
726  The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags
727  Hence, print that for this range we have equal hashes denoted by "=" in each hash column.
728 
729  Else:
730 
731  The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical...
732  If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2),
733  then we have not found the end of a region of equality!
734  If the hashes have changed, print a row.
735 
736  """
737  if tag.__class__.__name__ != "Tag":
738  raise TypeError("Tag given must be a CondDBFW Tag object.")
739 
740  # get lists of iovs
741  iovs1 = dict(map(lambda iov : (iov.since, iov.payload_hash), self.iovs().data()))
742  iovs2 = dict(map(lambda iov : (iov.since, iov.payload_hash), tag.iovs().data()))
743 
744  iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
745  iovs.append(("Infinity", 1, 2))
746  table = []
747 
748  previous_hash1 = None
749  previous_hash2 = None
750  first_since_in_equality_range = None
751  previous_equal = False
752 
753  for since, hash1, hash2 in iovs:
754 
755  if first_since_in_equality_range == None:
756  # if no start of a region of equality has been found,
757  # set it to the first since in the merged list
758  # then set the previous hashes and equality status to the current
759  # and continue to the next iteration of the loop
760  first_since_in_equality_range = since
761  previous_hash1 = hash1
762  previous_hash2 = hash2
763  previous_equal = hash1 == hash2
764  continue
765 
766  # if previous_payload1 is also None, comparisons still matters
767  # eg, if hash1 = None and hash2 != None, they are different and so should be shown in the table
768  if hash1 == None:
769  hash1 = previous_hash1
770  if hash2 == None:
771  hash2 = previous_hash2
772 
773  if previous_equal:
774  # previous hashes were equal, but only say they were if we have found an end of the region of equality
775  if hash1 != hash2:
776  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : "=", tag.name : "="})
777  # this is the start of a new equality range - might only be one row if the next row has unequal hashes!
778  first_since_in_equality_range = since
779  else:
780  # if the payloads are not equal, the equality range has ended and we should print a row
781  # we only print if EITHER hash has changed
782  # if both hashes are equal to the previous row, skip to the next row to try to find the beginning
783  # of a region of equality
784  if not(hash1 == previous_hash1 and hash2 == previous_hash2):
785  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
786  first_since_in_equality_range = since
787 
788  previous_hash1 = hash1
789  previous_hash2 = hash2
790  previous_equal = hash1 == hash2
791 
792  final_list = data_sources.json_data_node.make(table)
793  return final_list
794 
795  def merge_into(self, tag, range_object):
796  """
797  Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag
798  into the IOVs of the other Tag.
799 
800  tag : CondDBFW Tag object that the IOVs from this Tag should be merged into.
801 
802  range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied
803  from the database this Tag belongs to.
804 
805  Script originally written by Joshua Dawes,
806  and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni.
807  """
808 
809  oracle_tag = self
810  merged_tag_name = oracle_tag.name + "_merged"
811 
812  #since_range = Range(6285191841738391552,6286157702573850624)
813  since_range = range_object
814 
815  #sqlite = shell.connect("sqlite://EcallaserTag_80X_2016_prompt_corr20160519_2.db")
816 
817  #sqlite_tag = sqlite.tag().all().data()[0]
818  sqlite_tag = tag
819  if sqlite_tag == None:
820  raise TypeError("Tag to be merged cannot be None.")
821 
822  sqlite_iovs = sqlite_tag.iovs().data()
823  sqlite_tag.iovs().as_table()
824 
825  new_tag = self.connection.models["tag"](sqlite_tag.as_dicts(convert_timestamps=False), convert_timestamps=False)
826  new_tag.name = merged_tag_name
827 
828  imported_iovs = oracle_tag.iovs(since=since_range).data()
829 
830  for i in range(0, len(imported_iovs)):
831  imported_iovs[i].source = "oracle"
832 
833  sqlite_iovs_sinces=[]
834  for i in range(0, len(sqlite_iovs)):
835  sqlite_iovs[i].source = "sqlite"
836  sqlite_iovs_sinces.append(sqlite_iovs[i].since)
837 
838 
839  print(sqlite_iovs_sinces)
840 
841  new_iovs_list = imported_iovs + sqlite_iovs
842  new_iovs_list = sorted(new_iovs_list, key=lambda iov : iov.since)
843 
844  for (n, iov) in enumerate(new_iovs_list):
845  # if iov is from oracle, change its hash
846  if iov.source == "oracle":
847  if new_iovs_list[n].since in sqlite_iovs_sinces:
848  # if its since is already defined in the target iovs
849  # ignore it
850  iov.source = "tobedeleted"
851  else:
852  # otherwise, iterate down from n to find the last sqlite iov,
853  # and assign that hash
854  for i in reversed(range(0,n)):
855  if new_iovs_list[i].source == "sqlite":
856  print("change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
857  iov.payload_hash = new_iovs_list[i].payload_hash
858  break
859 
860 
861  new_iov_list_copied = []
862 
863  for iov in new_iovs_list:
864  # only append IOVs that are not already defined in the target tag
865  if iov.source != "tobedeleted":
866  new_iov_list_copied.append(iov)
867 
868  new_iov_list_copied = sorted(new_iov_list_copied, key=lambda iov : iov.since)
869 
870  now = datetime.datetime.now()
871 
872  new_iovs = []
873  for iov in new_iov_list_copied:
874  new_iovs.append( self.connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False) )
875  for iov in new_iovs:
876  iov.insertion_time = now
877  iov.tag_name = merged_tag_name
878 
879  new_tag.iovs_list = new_iovs
880 
881  return new_tag
882  #sqlite.write_and_commit(new_iovs)
883 
884  classes = {"globaltag" : GlobalTag, "iov" : IOV, "globaltagmap" : GlobalTagMap,\
885  "payload" : Payload, "tag" : Tag, "Base" : Base}
886 
887  if class_name == None:
888  return classes
889  else:
890  return classes[class_name]
change_name.diff
diff
Definition: change_name.py:13
models.RegExp.database_type
database_type
Definition: models.py:146
FastTimerService_cff.range
range
Definition: FastTimerService_cff.py:34
resolutioncreator_cfi.object
object
Definition: resolutioncreator_cfi.py:4
data_formats._dicts_to_orm_objects
def _dicts_to_orm_objects(model, data)
Definition: data_formats.py:103
models.date_args_to_days
def date_args_to_days(**radius)
Definition: models.py:81
models.Radius._end
_end
Definition: models.py:116
models.ContinuousRange
Definition: models.py:88
data_sources.json_data_node.make
def make(data)
Definition: data_sources.py:131
join
static std::string join(char **cmd)
Definition: RemoteFile.cc:17
models.ContinuousRange.__init__
def __init__(self)
Definition: models.py:93
models.Radius
Definition: models.py:102
models.generate
def generate(map_blobs=False, class_name=None)
Definition: models.py:189
models.Range.__init__
def __init__(self, start, end)
Definition: models.py:122
models.ContinuousRange.get_end
def get_end(self)
Definition: models.py:99
models.RegExp
Definition: models.py:132
conddb_time.to_timestamp
def to_timestamp(dt)
Definition: conddb_time.py:13
python.cmstools.all
def all(container)
workaround iterator generators for ROOT classes
Definition: cmstools.py:26
CustomConfigs.Base
def Base(process)
Definition: CustomConfigs.py:17
models.Radius._start
_start
Definition: models.py:115
models.Radius._centre
_centre
Definition: models.py:113
models.Radius._radius
_radius
Definition: models.py:114
models.Radius.__init__
def __init__(self, centre, radius)
Definition: models.py:106
query
Definition: query.py:1
str
#define str(s)
Definition: TestProcessor.cc:52
ALCARECOTkAlBeamHalo_cff.filter
filter
Definition: ALCARECOTkAlBeamHalo_cff.py:27
first
auto first
Definition: CAHitNtupletGeneratorKernelsImpl.h:112
GlobalTag
Definition: GlobalTag.h:4
print
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:46
ComparisonHelper::zip
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
Definition: L1TStage2CaloLayer1.h:41
models.RegExp.apply
def apply(self)
Definition: models.py:142
models.Range._end
_end
Definition: models.py:130
models.Range
Definition: models.py:118
remoteMonitoring_LED_IterMethod_cfg.limit
limit
Definition: remoteMonitoring_LED_IterMethod_cfg.py:427
models.class_name_to_column
def class_name_to_column(cls)
Definition: models.py:59
Record
data_sources.json_list
Definition: data_sources.py:175
models.session_independent_object
def session_independent_object(object, schema=None)
Definition: models.py:33
models.apply_filter
def apply_filter(orm_query, orm_class, attribute, value)
Definition: models.py:154
data
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:79
triggerMatcherToHLTDebug_cfi.tags
tags
Definition: triggerMatcherToHLTDebug_cfi.py:9
models.ContinuousRange.get_start
def get_start(self)
Definition: models.py:96
models.RegExp.get_regexp
def get_regexp(self)
Definition: models.py:139
models.RegExp._regexp
_regexp
Definition: models.py:137
genParticles_cff.map
map
Definition: genParticles_cff.py:11
beamvalidation.exit
def exit(msg="")
Definition: beamvalidation.py:53
models.session_independent
def session_independent(objects)
Definition: models.py:52
models.status_full_name
def status_full_name(status)
Definition: models.py:73
models.RegExp.__init__
def __init__(self, regexp)
Definition: models.py:136
models.apply_filters
def apply_filters(orm_query, orm_class, **filters)
Definition: models.py:183
models.Range._start
_start
Definition: models.py:129