CMS 3D CMS Logo

Classes | Functions
models Namespace Reference

Classes

class  ContinuousRange
 
class  Radius
 
class  Range
 
class  RegExp
 

Functions

def apply_filter (orm_query, orm_class, attribute, value)
 
def apply_filters (orm_query, orm_class, filters)
 
def class_name_to_column (cls)
 
def date_args_to_days (radius)
 
def generate (map_blobs=False, class_name=None)
 
def session_independent (objects)
 
def session_independent_object (object, schema=None)
 
def status_full_name (status)
 

Detailed Description

Using Audrius' models from flask browser.

This file contains models that are used with SQLAlchemy.

Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
      so these will not work in a normal context outside the framework.

Function Documentation

◆ apply_filter()

def models.apply_filter (   orm_query,
  orm_class,
  attribute,
  value 
)

Definition at line 152 of file models.py.

Referenced by apply_filters().

152 def apply_filter(orm_query, orm_class, attribute, value):
153  filter_attribute = getattr(orm_class, attribute)
154  if type(value) == list:
155  orm_query = orm_query.filter(filter_attribute.in_(value))
156  elif type(value) == data_sources.json_list:
157  orm_query = orm_query.filter(filter_attribute.in_(value.data()))
158  elif type(value) in [Range, Radius]:
159 
160  minus = value.get_start()
161  plus = value.get_end()
162  orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
163 
164  elif type(value) == RegExp:
165 
166  # Relies on being a SingletonThreadPool
167 
168  if value.database_type in ["oracle", "frontier"]:
169  regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
170  elif value.database_type == "sqlite":
171  value.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
172  regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
173  else:
174  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
175  orm_query = orm_query.filter(regexp)
176 
177  else:
178  orm_query = orm_query.filter(filter_attribute == value)
179  return orm_query
180 
def apply_filter(orm_query, orm_class, attribute, value)
Definition: models.py:152

◆ apply_filters()

def models.apply_filters (   orm_query,
  orm_class,
  filters 
)

Definition at line 181 of file models.py.

References apply_filter().

Referenced by generate(), and querying.factory.object().

181 def apply_filters(orm_query, orm_class, **filters):
182  for (key, value) in list(filters.items()):
183  if not(key in ["amount"]):
184  orm_query = apply_filter(orm_query, orm_class, key, value)
185  return orm_query
186 
def apply_filters(orm_query, orm_class, filters)
Definition: models.py:181
def apply_filter(orm_query, orm_class, attribute, value)
Definition: models.py:152

◆ class_name_to_column()

def models.class_name_to_column (   cls)

Definition at line 57 of file models.py.

References join(), FastTimerService_cff.range, and str.

Referenced by data_sources.json_list.as_table(), session_independent_object(), and data_formats_tests.data_formats_tests.test_dicts_to_orm_objects().

57 def class_name_to_column(cls):
58  class_name = cls.__name__
59  all_upper_case = True
60  for character in class_name:
61  all_upper_case = character.isupper()
62  if all_upper_case:
63  return class_name
64  for n in range(0, len(class_name)):
65  if class_name[n].isupper() and n != 0:
66  class_name = str(class_name[0:n]) + "".join(["_", class_name[n].lower()]) + str(class_name[n+1:])
67  elif class_name[n].isupper() and n == 0:
68  class_name = str(class_name[0:n]) + "".join([class_name[n].lower()]) + str(class_name[n+1:])
69  return class_name
70 
def class_name_to_column(cls)
Definition: models.py:57
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
#define str(s)

◆ date_args_to_days()

def models.date_args_to_days (   radius)

Definition at line 79 of file models.py.

79 def date_args_to_days(**radius):
80  days = radius.get("days")
81  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
82  days += radius.get("months")*28 if radius.get("months") != None else 0
83  days += radius.get("years")+365 if radius.get("years") != None else 0
84  return days
85 
def date_args_to_days(radius)
Definition: models.py:79

◆ generate()

def models.generate (   map_blobs = False,
  class_name = None 
)

Definition at line 187 of file models.py.

References submitPVValidationJobs.__init__(), data_formats._dicts_to_orm_objects(), python.cmstools.all(), apply_filters(), CustomConfigs.Base(), data, change_name.diff, ALCARECOTkAlBeamHalo_cff.filter, dqmdumpme.first, join(), remoteMonitoring_LASER_era2018_cfg.limit, data_sources.json_data_node.make(), genParticles_cff.map, mergeResourcesJson.merge_into(), print(), FastTimerService_cff.range, status_full_name(), str, triggerMatcherToHLTDebug_cfi.tags, conddb_time.to_timestamp(), and reco.zip().

Referenced by data_sources.json_list.as_table(), and session_independent_object().

187 def generate(map_blobs=False, class_name=None):
188 
189  Base = declarative_base()
190  schema = {"schema" : "CMS_CONDITIONS"}
191  fk_schema_prefix = ("%s." % schema["schema"]) if schema else ""
192 
193  class GlobalTag(Base):
194  __table_args__ = schema
195  __tablename__ = 'GLOBAL_TAG'
196 
197  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
198 
199  name = Column(String(100), unique=True, nullable=False, primary_key=True)
200  validity = Column(Integer, nullable=False)
201  description = Column(String(4000), nullable=False)
202  release = Column(String(100), nullable=False)
203  insertion_time = Column(DateTime, nullable=False)
204  snapshot_time = Column(DateTime, nullable=False)
205  scenario = Column(String(100))
206  workflow = Column(String(100))
207  type = Column(String(1))
208  tag_map = relationship('GlobalTagMap', backref='global_tag')
209 
210  def __init__(self, dictionary={}, convert_timestamps=True):
211  # assign each entry in a kwargs
212  for key in dictionary:
213  try:
214  if convert_timestamps:
215  self.__dict__[key] = to_timestamp(dictionary[key])
216  else:
217  self.__dict__[key] = dictionary[key]
218  except KeyError as k:
219  continue
220 
221  def __repr__(self):
222  return '<GlobalTag %r>' % self.name
223 
224  def as_dicts(self, convert_timestamps=False):
225  """
226  Returns dictionary form of Global Tag object.
227  """
228  json_gt = {
229  'name': self.name,
230  'validity': self.validity,
231  'description': self.description,
232  'release': self.release,
233  'insertion_time': to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
234  'snapshot_time': to_timestamp(self.snapshot_time) if convert_timestamps else self.snapshot_time,
235  'scenario': self.scenario,
236  'workflow': self.workflow,
237  'type': self.type
238  }
239  return json_gt
240 
241  def to_array(self):
242  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
243 
244  def all(self, **kwargs):
245  """
246  Returns `amount` Global Tags ordered by Global Tag name.
247  """
248  query = self.session.query(GlobalTag)
249  query = apply_filters(query, self.__class__, **kwargs)
250  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
251  query_result = query.order_by(GlobalTag.name).limit(amount).all()
252  gts = data_sources.json_data_node.make(query_result)
253  return gts
254 
255  def tags(self, **kwargs):
256  """
257  Returns `amount` *Global Tag Maps* belonging to this Global Tag.
258  """
259  kwargs["global_tag_name"] = self.name
260  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
261  all_tags = apply_filters(all_tags, GlobalTagMap, **kwargs)
262  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
263  all_tags = all_tags.order_by(GlobalTagMap.tag_name).limit(amount).all()
264  column_names = ["global_tag_name", "record", "label", "tag_name"]
265  all_tags = [dict(list(zip(column_names, list(map(to_timestamp, row))))) for row in all_tags]
266  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
267  return data_sources.json_data_node.make(all_tags)
268 
269  def iovs(self, **kwargs):
270  """
271  Returns `amount` IOVs belonging to all Tags held in this Global Tag.
272  For large Global Tags (which is most of them), VERY slow.
273  Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names,
274  and then get IOVs from each Tag name.
275 
276  At some point, this method may replace the method currently used.
277  """
278  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
279  # first get only the IOVs that belong to Tags that are contained by this Global Tag
280 
281  # get IOVs belonging to a Tag contained by this Global Tag
282  tag_names = self.tags().get_members("tag_name").data()
283  iovs_all_tags = self.session.query(IOV).filter(IOV.tag_name.in_(tag_names))
284  iovs_all_tags = apply_filters(iovs_all_tags, IOV, **kwargs)
285  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
286  iovs_all_tags = iovs_all_tags.limit(amount).subquery()
287 
288  # now, join Global Tag Map table onto IOVs
289  iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
290  iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
291  .filter(GlobalTagMap.global_tag_name == self.name)\
292  .join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
293 
294  iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).all()
295 
296  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
297  all_iovs = [dict(list(zip(column_names, row))) for row in iovs_gt_tags]
298  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
299 
300  return data_sources.json_data_node.make(all_iovs)
301 
302  def __sub__(self, other):
303  """
304  Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference.
305  Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1))
306  """
307  return self.diff(other)
308 
309  def diff(self, gt):
310  """
311  Returns the json_list of differences in the form of tuples:
312 
313  (record, label, tag name of gt1 (self), tag name of gt2 (gt))
314  """
315 
316  record_label_to_tag_name1 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in self.tags().data()])
317  record_label_to_tag_name2 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in gt.tags().data()])
318 
319  record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
320 
321  table = []
322  tags_pairs_with_differences = []
323 
324  for record_label in record_label_pairs:
325  tag_name1 = record_label_to_tag_name1.get(record_label)
326  tag_name2 = record_label_to_tag_name2.get(record_label)
327 
328  if tag_name1 == None or tag_name2 == None or tag_name1 != tag_name2:
329  table.append({
330  "Record" : record_label[0],
331  "Label" : record_label[1],
332  ("%s Tag" % self.name) : tag_name1,
333  ("%s Tag" % gt.name) : tag_name2
334  })
335 
337 
338  class GlobalTagMap(Base):
339  __table_args__ = schema
340  __tablename__ = 'GLOBAL_TAG_MAP'
341 
342  headers = ["global_tag_name", "record", "label", "tag_name"]
343 
344  global_tag_name = Column(String(100), ForeignKey(fk_schema_prefix + 'GLOBAL_TAG.name'), primary_key=True, nullable=False)
345  record = Column(String(100), ForeignKey(fk_schema_prefix + 'RECORDS.record'), primary_key=True, nullable=False)
346  label = Column(String(100), primary_key=True, nullable=False)
347  tag_name = Column(String(100), ForeignKey(fk_schema_prefix + 'TAG.name'), nullable=False)
348 
349  def __init__(self, dictionary={}, convert_timestamps=True):
350  # assign each entry in a kwargs
351  for key in dictionary:
352  try:
353  if convert_timestamps:
354  self.__dict__[key] = to_timestamp(dictionary[key])
355  else:
356  self.__dict__[key] = dictionary[key]
357  except KeyError as k:
358  continue
359 
360  def __repr__(self):
361  return '<GlobalTagMap %r>' % self.global_tag_name
362 
363  def as_dicts(self, convert_timestamps=False):
364  """
365  Returns dictionary form of this Global Tag Map.
366  """
367  json_gtm = {
368  "global_tag_name" : str(self.global_tag_name),
369  "record" : str(self.record),
370  "label" : str(self.label),
371  "tag_name" : str(self.tag_name)
372  }
373  return json_gtm
374 
375 
376  class GlobalTagMapRequest(Base):
377  __table_args__ = schema
378  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
379 
380  queue = Column(String(100), primary_key=True, nullable=False)
381  tag = Column(String(100), ForeignKey(fk_schema_prefix + 'TAG.name'), primary_key=True, nullable=False)
382  record = Column(String(100), ForeignKey(fk_schema_prefix + 'RECORDS.record'), primary_key=True, nullable=False)
383  label = Column(String(100), primary_key=True, nullable=False)
384  status = Column(String(1), nullable=False)
385  description = Column(String(4000), nullable=False)
386  submitter_id = Column(Integer, nullable=False)
387  time_submitted = Column(DateTime, nullable=False)
388  last_edited = Column(DateTime, nullable=False)
389 
390  def __init__(self, dictionary={}, convert_timestamps=True):
391  # assign each entry in a kwargs
392  for key in dictionary:
393  try:
394  if convert_timestamps:
395  self.__dict__[key] = to_timestamp(dictionary[key])
396  else:
397  self.__dict__[key] = dictionary[key]
398  except KeyError as k:
399  continue
400 
401  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
402 
403  def as_dicts(self):
404  """
405  Returns dictionary form of this Global Tag Map Request.
406  """
407  return {
408  "queue" : self.queue,
409  "tag" : self.tag,
410  "record" : self.record,
411  "label" : self.label,
412  "status" : self.status,
413  "description" : self.description,
414  "submitter_id" : self.submitter_id,
415  "time_submitted" : self.time_submitted,
416  "last_edited" : self.last_edited
417  }
418 
419  def __repr__(self):
420  return '<GlobalTagMapRequest %r>' % self.queue
421 
422  def to_array(self):
423  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
424 
425  class IOV(Base):
426  __table_args__ = schema
427  __tablename__ = 'IOV'
428 
429  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
430 
431  tag_name = Column(String(4000), ForeignKey(fk_schema_prefix + 'TAG.name'), primary_key=True, nullable=False)
432  since = Column(Integer, primary_key=True, nullable=False)
433  payload_hash = Column(String(40), ForeignKey(fk_schema_prefix + 'PAYLOAD.hash'), nullable=False)
434  insertion_time = Column(DateTime, primary_key=True, nullable=False)
435 
436  def __init__(self, dictionary={}, convert_timestamps=True):
437  # assign each entry in a kwargs
438  for key in dictionary:
439  try:
440  if convert_timestamps:
441  self.__dict__[key] = to_timestamp(dictionary[key])
442  else:
443  self.__dict__[key] = dictionary[key]
444  except KeyError as k:
445  continue
446 
447  def as_dicts(self, convert_timestamps=False):
448  """
449  Returns dictionary form of this IOV.
450  """
451  return {
452  "tag_name" : self.tag_name,
453  "since" : self.since,
454  "payload_hash" : self.payload_hash,
455  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
456  }
457 
458  def __repr__(self):
459  return '<IOV %r>' % self.tag_name
460 
461  def to_array(self):
462  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
463 
464  def all(self, **kwargs):
465  """
466  Returns `amount` IOVs ordered by since.
467  """
468  query = self.session.query(IOV)
469  query = apply_filters(query, IOV, **kwargs)
470  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
471  query_result = query.order_by(IOV.tag_name).order_by(IOV.since).limit(amount).all()
472  return data_sources.json_data_node.make(query_result)
473 
474 
475  class Payload(Base):
476  __table_args__ = schema
477  __tablename__ = 'PAYLOAD'
478 
479  headers = ["hash", "object_type", "version", "insertion_time"]
480 
481  hash = Column(String(40), primary_key=True, nullable=False)
482  object_type = Column(String(4000), nullable=False)
483  version = Column(String(4000), nullable=False)
484  insertion_time = Column(DateTime, nullable=False)
485  if map_blobs:
486  data = Column(Binary, nullable=False)
487  streamer_info = Column(Binary, nullable=False)
488  blobs_mapped = map_blobs
489 
490  def __init__(self, dictionary={}, convert_timestamps=True):
491  # assign each entry in a kwargs
492  for key in dictionary:
493  try:
494  if convert_timestamps:
495  self.__dict__[key] = to_timestamp(dictionary[key])
496  else:
497  self.__dict__[key] = dictionary[key]
498  except KeyError as k:
499  continue
500 
501  if map_blobs:
502  def as_dicts(self, convert_timestamps=False):
503  """
504  Returns dictionary form of this Payload's metadata (not the actual Payload).
505  """
506  return {
507  "hash" : self.hash,
508  "object_type" : self.object_type,
509  "version" : self.version,
510  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
511  "data" : self.data,
512  "streamer_info" : self.streamer_info
513  }
514  else:
515  def as_dicts(self, convert_timestamps=False):
516  """
517  Returns dictionary form of this Payload's metadata (not the actual Payload).
518  """
519  return {
520  "hash" : self.hash,
521  "object_type" : self.object_type,
522  "version" : self.version,
523  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
524  }
525 
526  def __repr__(self):
527  return '<Payload %r>' % self.hash
528 
529  def to_array(self):
530  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
531 
532  def parent_tags(self, **kwargs):
533  """
534  Returns `amount` parent Tags ordered by Tag name.
535  """
536  # check if this payload is empty
537  if self.empty:
538  return None
539  else:
540  kwargs["payload_hash"] = self.hash
541  query = self.session.query(IOV.tag_name)
542  query = apply_filters(query, IOV, **kwargs)
543  query_result = query.all()
544  tag_names = [entry[0] for entry in query_result]
545  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
546  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).limit(amount).all()
548 
549  def all(self, **kwargs):
550  """
551  Returns `amount` Payloads ordered by Payload hash.
552  """
553  query = self.session.query(Payload)
554  query = apply_filters(query, Payload, **kwargs)
555  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
556  query_result = query.order_by(Payload.hash).limit(amount).all()
557  return data_sources.json_data_node.make(query_result)
558 
559 
560  class Record(Base):
561  __table_args__ = schema
562  __tablename__ = 'RECORDS'
563 
564  headers = ["record", "object", "type"]
565 
566  record = Column(String(100), primary_key=True, nullable=False)
567  object = Column(String(200), nullable=False)
568  type = Column(String(20), nullable=False)
569 
570  def as_dicts(self):
571  """
572  Returns dictionary form of this Record.
573  """
574  return {
575  "record" : self.record,
576  "object" : self.object,
577  "type" : self.type
578  }
579 
580  def __repr__(self):
581  return '<Record %r>' % self.record
582 
583  def to_array(self):
584  return [self.record, self.object]
585 
586  def all(self, **kwargs):
587  """
588  Returns `amount` Records ordered by Record record.
589  """
590  query = self.session.query(Record)
591  query = apply_filters(query, Record, kwargs)
592  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
593  query_result = query.order_by(Record.record).limit(amount).all()
594  return data_sources.json_data_node.make(query_result)
595 
596 
597  class Tag(Base):
598  __table_args__ = schema
599  __tablename__ = 'TAG'
600 
601  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
602  "description", "last_validated_time", "insertion_time", "modification_time", "protection_code"]
603 
604  name = Column(String(4000), primary_key=True, nullable=False)
605  time_type = Column(String(4000), nullable=False)
606  object_type = Column(String(4000), nullable=False)
607  synchronization = Column(String(4000), nullable=False)
608  end_of_validity = Column(Integer, nullable=False)
609  description = Column(String(4000), nullable=False)
610  last_validated_time = Column(BigInteger, nullable=False)
611  insertion_time = Column(DateTime, nullable=False)
612  modification_time = Column(DateTime, nullable=False)
613  protection_code = Column(Integer, nullable=False)
614 
615  record = None
616  label = None
617 
618  iovs_list = relationship('IOV', backref='tag')
619 
620  def __init__(self, dictionary={}, convert_timestamps=True):
621  # assign each entry in a kwargs
622  for key in dictionary:
623  try:
624  if convert_timestamps:
625  self.__dict__[key] = to_timestamp(dictionary[key])
626  else:
627  self.__dict__[key] = dictionary[key]
628  except KeyError as k:
629  continue
630 
631  def as_dicts(self, convert_timestamps=False):
632  """
633  Returns dictionary form of this Tag.
634  """
635  return {
636  "name" : self.name,
637  "time_type" : self.time_type,
638  "object_type" : self.object_type,
639  "synchronization" : self.synchronization,
640  "end_of_validity" : self.end_of_validity,
641  "description" : self.description,
642  "last_validated_time" : self.last_validated_time,
643  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
644  "modification_time" : to_timestamp(self.modification_time) if convert_timestamps else self.modification_time,
645  "record" : self.record,
646  "label" : self.label
647  }
648 
649  def __repr__(self):
650  return '<Tag %r>' % self.name
651 
652  def to_array(self):
653  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
654 
655  def parent_global_tags(self, **kwargs):
656  """
657  Returns `amount` Global Tags that contain this Tag.
658  """
659  if self.empty:
660  return None
661  else:
662  kwargs["tag_name"] = self.name
663  query = self.session.query(GlobalTagMap.global_tag_name)
664  query = apply_filters(query, GlobalTagMap, **kwargs)
665  query_result = query.all()
666  if len(query_result) != 0:
667  global_tag_names = [entry[0] for entry in query_result]
668  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
669  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).limit(amount).all()
670  else:
671  global_tags = None
672  return data_sources.json_data_node.make(global_tags)
673 
674  def all(self, **kwargs):
675  """
676  Returns `amount` Tags ordered by Tag name.
677  """
678  query = self.session.query(Tag)
679  query = apply_filters(query, Tag, **kwargs)
680  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
681  query_result = query.order_by(Tag.name).limit(amount).all()
682  return data_sources.json_data_node.make(query_result)
683 
684  def iovs(self, **kwargs):
685  """
686  Returns `amount` IOVs that belong to this Tag ordered by IOV since.
687  """
688  # filter_params contains a list of columns to filter the iovs by
689  iovs_query = self.session.query(IOV).filter(IOV.tag_name == self.name)
690  iovs_query = apply_filters(iovs_query, IOV, **kwargs)
691  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
692  iovs = iovs_query.order_by(IOV.since).limit(amount).all()
694 
695  def latest_iov(self):
696  """
697  Returns the single highest since held by this Tag.
698  Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since.
699  """
700  iov = self.session.query(IOV).filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).first()
701  return iov
702 
703  def __sub__(self, other):
704  """
705  Allows the arithmetic operator "-" to be applied to find the difference between two tags.
706  Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1.
707  """
708  return self.diff(other)
709 
710  def diff(self, tag, short=False):
711  """
712  Returns the `diff` of the first Tag, and the Tag given.
713  Summary of algorithm:
714 
715  Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash).
716  Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list.
717  Note: depending on where each Tag's IOVs start, 1 or both of these values can be None.
718  Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags.
719  For each triple (since, hash1, hash2),
720 
721  If the first_since_in_equality_range = None,
722  We are at the first since in the merged list, so set first_since... = since
723  Note: this is so set the previous... values for the second row, since the first row will never result in a print because
724  a row is only printed when past iovs have been processed.
725 
726  If either hash1 or hash2 is None, set it to the previous hash found
727  Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this.
728 
729  If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since)
730  Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since
731  because we have no looked forward, but we do know about the previous hashes.
732 
733  If hash1 != hash2,
734  The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags
735  Hence, print that for this range we have equal hashes denoted by "=" in each hash column.
736 
737  Else:
738 
739  The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical...
740  If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2),
741  then we have not found the end of a region of equality!
742  If the hashes have changed, print a row.
743 
744  """
745  if tag.__class__.__name__ != "Tag":
746  raise TypeError("Tag given must be a CondDBFW Tag object.")
747 
748  # get lists of iovs
749  iovs1 = dict([(iov.since, iov.payload_hash) for iov in self.iovs().data()])
750  iovs2 = dict([(iov.since, iov.payload_hash) for iov in tag.iovs().data()])
751 
752  iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
753  iovs.append(("Infinity", 1, 2))
754  table = []
755 
756  previous_hash1 = None
757  previous_hash2 = None
758  first_since_in_equality_range = None
759  previous_equal = False
760 
761  for since, hash1, hash2 in iovs:
762 
763  if first_since_in_equality_range == None:
764  # if no start of a region of equality has been found,
765  # set it to the first since in the merged list
766  # then set the previous hashes and equality status to the current
767  # and continue to the next iteration of the loop
768  first_since_in_equality_range = since
769  previous_hash1 = hash1
770  previous_hash2 = hash2
771  previous_equal = hash1 == hash2
772  continue
773 
774  # if previous_payload1 is also None, comparisons still matters
775  # eg, if hash1 = None and hash2 != None, they are different and so should be shown in the table
776  if hash1 == None:
777  hash1 = previous_hash1
778  if hash2 == None:
779  hash2 = previous_hash2
780 
781  if previous_equal:
782  # previous hashes were equal, but only say they were if we have found an end of the region of equality
783  if hash1 != hash2:
784  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : "=", tag.name : "="})
785  # this is the start of a new equality range - might only be one row if the next row has unequal hashes!
786  first_since_in_equality_range = since
787  else:
788  # if the payloads are not equal, the equality range has ended and we should print a row
789  # we only print if EITHER hash has changed
790  # if both hashes are equal to the previous row, skip to the next row to try to find the beginning
791  # of a region of equality
792  if not(hash1 == previous_hash1 and hash2 == previous_hash2):
793  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
794  first_since_in_equality_range = since
795 
796  previous_hash1 = hash1
797  previous_hash2 = hash2
798  previous_equal = hash1 == hash2
799 
800  final_list = data_sources.json_data_node.make(table)
801  return final_list
802 
803  def merge_into(self, tag, range_object):
804  """
805  Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag
806  into the IOVs of the other Tag.
807 
808  tag : CondDBFW Tag object that the IOVs from this Tag should be merged into.
809 
810  range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied
811  from the database this Tag belongs to.
812 
813  Script originally written by Joshua Dawes,
814  and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni.
815  """
816 
817  oracle_tag = self
818  merged_tag_name = oracle_tag.name + "_merged"
819 
820  #since_range = Range(6285191841738391552,6286157702573850624)
821  since_range = range_object
822 
823  #sqlite = shell.connect("sqlite://EcallaserTag_80X_2016_prompt_corr20160519_2.db")
824 
825  #sqlite_tag = sqlite.tag().all().data()[0]
826  sqlite_tag = tag
827  if sqlite_tag == None:
828  raise TypeError("Tag to be merged cannot be None.")
829 
830  sqlite_iovs = sqlite_tag.iovs().data()
831  sqlite_tag.iovs().as_table()
832 
833  new_tag = self.connection.models["tag"](sqlite_tag.as_dicts(convert_timestamps=False), convert_timestamps=False)
834  new_tag.name = merged_tag_name
835 
836  imported_iovs = oracle_tag.iovs(since=since_range).data()
837 
838  for i in range(0, len(imported_iovs)):
839  imported_iovs[i].source = "oracle"
840 
841  sqlite_iovs_sinces=[]
842  for i in range(0, len(sqlite_iovs)):
843  sqlite_iovs[i].source = "sqlite"
844  sqlite_iovs_sinces.append(sqlite_iovs[i].since)
845 
846 
847  print(sqlite_iovs_sinces)
848 
849  new_iovs_list = imported_iovs + sqlite_iovs
850  new_iovs_list = sorted(new_iovs_list, key=lambda iov : iov.since)
851 
852  for (n, iov) in enumerate(new_iovs_list):
853  # if iov is from oracle, change its hash
854  if iov.source == "oracle":
855  if new_iovs_list[n].since in sqlite_iovs_sinces:
856  # if its since is already defined in the target iovs
857  # ignore it
858  iov.source = "tobedeleted"
859  else:
860  # otherwise, iterate down from n to find the last sqlite iov,
861  # and assign that hash
862  for i in reversed(list(range(0,n))):
863  if new_iovs_list[i].source == "sqlite":
864  print("change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
865  iov.payload_hash = new_iovs_list[i].payload_hash
866  break
867 
868 
869  new_iov_list_copied = []
870 
871  for iov in new_iovs_list:
872  # only append IOVs that are not already defined in the target tag
873  if iov.source != "tobedeleted":
874  new_iov_list_copied.append(iov)
875 
876  new_iov_list_copied = sorted(new_iov_list_copied, key=lambda iov : iov.since)
877 
878  now = datetime.datetime.utcnow()
879 
880  new_iovs = []
881  for iov in new_iov_list_copied:
882  new_iovs.append( self.connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False) )
883  for iov in new_iovs:
884  iov.insertion_time = now
885  iov.tag_name = merged_tag_name
886 
887  new_tag.iovs_list = new_iovs
888 
889  return new_tag
890  #sqlite.write_and_commit(new_iovs)
891 
892 
893  class TagAuthorization(Base):
894  __table_args__ = schema
895  __tablename__ = 'TAG_AUTHORIZATION'
896 
897  headers = ["tag_name", "access_type", "credential", "credential_type"]
898 
899  tag_name = Column(String(100), ForeignKey(fk_schema_prefix + 'TAG.name'), primary_key=True, nullable=False)
900  access_type = Column(Integer, nullable=False)
901  credential = Column(String(100), primary_key=True, nullable=False)
902  credential_type = Column(Integer, nullable=False)
903 
904  def as_dicts(self):
905  """
906  Returns dictionary form of this Tag Authorization.
907  """
908  return {
909  "tag_name" : self.tag_name,
910  "access_type" : self.access_type,
911  "credential" : self.credential,
912  "credential_type" : self.credential_type
913  }
914 
915  def __repr__(self):
916  return '<TagAuthorization %s %s %s %s>' % (self.tag_name, self.access_type, self.credential, self.credential_type)
917 
918  def to_array(self):
919  return [self.tag_name, self.access_type, self.credential, self.credential_type]
920 
921  def all(self, **kwargs):
922  """
923  Returns `amount` Records ordered by Record record.
924  """
925  query = self.session.query(TagAuthorization)
926  query = apply_filters(query, TagAuthorization, kwargs)
927  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
928  query_result = query.order_by(TagAuthorization.tag).limit(amount).all()
929  return data_sources.json_data_node.make(query_result)
930 
931  classes = {"globaltag" : GlobalTag, "iov" : IOV, "globaltagmap" : GlobalTagMap,\
932  "payload" : Payload, "tag" : Tag, "TagAuthorization": TagAuthorization, "Base" : Base}
933 
934  if class_name == None:
935  return classes
936  else:
937  return classes[class_name]
938 
def _dicts_to_orm_objects(model, data)
def all(container)
workaround iterator generators for ROOT classes
Definition: cmstools.py:25
ALPAKA_FN_HOST_ACC ALPAKA_FN_INLINE constexpr float zip(ConstView const &tracks, int32_t i)
Definition: TracksSoA.h:90
def __init__(self, dataset, job_number, job_id, job_name, isDA, isMC, applyBOWS, applyEXTRACOND, extraconditions, runboundary, lumilist, intlumi, maxevents, gt, allFromGT, alignmentDB, alignmentTAG, apeDB, apeTAG, bowDB, bowTAG, vertextype, tracktype, refittertype, ttrhtype, applyruncontrol, ptcut, CMSSW_dir, the_dir)
def Base(process)
Definition: query.py:1
void generate(uint32_t const nbins, float const *initValues, std::vector< float > &values)
def to_timestamp(dt)
Definition: conddb_time.py:13
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:80
def apply_filters(orm_query, orm_class, filters)
Definition: models.py:181
def status_full_name(status)
Definition: models.py:71
#define str(s)
def merge_into(metrics, data, dest)

◆ session_independent()

def models.session_independent (   objects)

Definition at line 50 of file models.py.

References genParticles_cff.map, and session_independent_object().

50 def session_independent(objects):
51  if type(objects) == list:
52  return list(map(session_independent_object, objects))
53  else:
54  # assume objects is a single object (not a list)
55  return session_independent_object(objects)
56 
def session_independent_object(object, schema=None)
Definition: models.py:31
def session_independent(objects)
Definition: models.py:50

◆ session_independent_object()

def models.session_independent_object (   object,
  schema = None 
)

Definition at line 31 of file models.py.

References class_name_to_column(), and generate().

Referenced by session_independent(), and querying.connection.write().

31 def session_independent_object(object, schema=None):
32  # code original taken from write method in querying
33  # will result in a new object that isn't attached to any session
34  # hence, SQLAlchemy won't track changes
35 
36  if object.__class__.__name__.lower() == "payload":
37  map_blobs = object.blobs_mapped
38  else:
39  map_blobs = False
40  # need to change this to only generate the required class - can be slow...
41  # extract class name of object
42  cls = object.__class__
43  class_name = class_name_to_column(cls).lower()
44  new_class = generate(map_blobs=map_blobs, class_name=class_name)
45  new_class.__table__.schema = schema
46  new_object = new_class(object.as_dicts(), convert_timestamps=False)
47 
48  return new_object
49 
def session_independent_object(object, schema=None)
Definition: models.py:31
void generate(uint32_t const nbins, float const *initValues, std::vector< float > &values)
def class_name_to_column(cls)
Definition: models.py:57

◆ status_full_name()

def models.status_full_name (   status)

Definition at line 71 of file models.py.

Referenced by generate().

71 def status_full_name(status):
72  full_status = {
73  'P': 'Pending',
74  'R': 'Rejected', 'A': 'Accepted'
75  }
76  return full_status[status]
77 
78 
def status_full_name(status)
Definition: models.py:71