CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
Classes | Functions
models Namespace Reference

Classes

class  ContinuousRange
 
class  Radius
 
class  Range
 
class  RegExp
 

Functions

def apply_filter
 
def apply_filters
 
def class_name_to_column
 
def date_args_to_days
 
def generate
 
def session_independent
 
def session_independent_object
 
def status_full_name
 

Detailed Description

Using Audrius' models from flask browser.

This file contains models that are used with SQLAlchemy.

Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
      so these will not work in a normal context outside the framework.

Function Documentation

def models.apply_filter (   orm_query,
  orm_class,
  attribute,
  value 
)

Definition at line 152 of file models.py.

Referenced by apply_filters().

153 def apply_filter(orm_query, orm_class, attribute, value):
154  filter_attribute = getattr(orm_class, attribute)
155  if type(value) == list:
156  orm_query = orm_query.filter(filter_attribute.in_(value))
157  elif type(value) == data_sources.json_list:
158  orm_query = orm_query.filter(filter_attribute.in_(value.data()))
159  elif type(value) in [Range, Radius]:
160 
161  minus = value.get_start()
162  plus = value.get_end()
163  orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
164 
165  elif type(value) == RegExp:
166 
167  # Relies on being a SingletonThreadPool
168 
169  if value.database_type in ["oracle", "frontier"]:
170  regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
171  elif value.database_type == "sqlite":
172  value.connection_object.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
173  regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
174  else:
175  raise NotImplemented("Can only apply regular expression search to Oracle, Frontier and SQLite.")
176  orm_query = orm_query.filter(regexp)
177 
178  else:
179  orm_query = orm_query.filter(filter_attribute == value)
180  return orm_query
def apply_filter
Definition: models.py:152
def models.apply_filters (   orm_query,
  orm_class,
  filters 
)

Definition at line 181 of file models.py.

References apply_filter().

Referenced by generate(), and querying.factory.object().

182 def apply_filters(orm_query, orm_class, **filters):
183  for (key, value) in list(filters.items()):
184  if not(key in ["amount"]):
185  orm_query = apply_filter(orm_query, orm_class, key, value)
186  return orm_query
def apply_filter
Definition: models.py:152
def apply_filters
Definition: models.py:181
def models.class_name_to_column (   cls)

Definition at line 57 of file models.py.

References join(), sistrip::SpyUtilities.range(), and str.

Referenced by data_sources.json_list.as_table(), session_independent_object(), and data_formats_tests.data_formats_tests.test_dicts_to_orm_objects().

57 
58 def class_name_to_column(cls):
59  class_name = cls.__name__
60  all_upper_case = True
61  for character in class_name:
62  all_upper_case = character.isupper()
63  if all_upper_case:
64  return class_name
65  for n in range(0, len(class_name)):
66  if class_name[n].isupper() and n != 0:
67  class_name = str(class_name[0:n]) + "".join(["_", class_name[n].lower()]) + str(class_name[n+1:])
68  elif class_name[n].isupper() and n == 0:
69  class_name = str(class_name[0:n]) + "".join([class_name[n].lower()]) + str(class_name[n+1:])
70  return class_name
const uint16_t range(const Frame &aFrame)
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def class_name_to_column
Definition: models.py:57
#define str(s)
def models.date_args_to_days (   radius)

Definition at line 79 of file models.py.

79 
80 def date_args_to_days(**radius):
81  days = radius.get("days")
82  days += radius.get("weeks")*7 if radius.get("weeks") != None else 0
83  days += radius.get("months")*28 if radius.get("months") != None else 0
84  days += radius.get("years")+365 if radius.get("years") != None else 0
85  return days
def date_args_to_days
Definition: models.py:79
def models.generate (   map_blobs = False,
  class_name = None 
)

Definition at line 187 of file models.py.

References submitPVValidationJobs.__init__(), data_formats._dicts_to_orm_objects(), python.cmstools.all(), apply_filters(), CustomConfigs.Base(), data, change_name.diff, alcazmumu_cfi.filter, first, run_AlCaRecoTriggerBitsUpdateWorkflow.IOV, run_AlCaRecoTriggerBitsUpdateWorkflow.iovs, join(), MessageLogger_cff.limit, data_sources.json_data_node.make(), print(), sistrip::SpyUtilities.range(), status_full_name(), str, getPayloadData.tags, conddb_time.to_timestamp(), and ComparisonHelper.zip().

Referenced by data_sources.json_list.as_table(), and session_independent_object().

188 def generate(map_blobs=False, class_name=None):
189 
190  Base = declarative_base()
191  schema = {"schema" : "CMS_CONDITIONS"}
192  fk_schema_prefix = ("%s." % schema["schema"]) if schema else ""
193 
194  class GlobalTag(Base):
195  __table_args__ = schema
196  __tablename__ = 'GLOBAL_TAG'
197 
198  headers = ["name", "validity", "description", "release", "insertion_time", "snapshot_time", "scenario", "workflow", "type"]
199 
200  name = Column(String(100), unique=True, nullable=False, primary_key=True)
201  validity = Column(Integer, nullable=False)
202  description = Column(String(4000), nullable=False)
203  release = Column(String(100), nullable=False)
204  insertion_time = Column(DateTime, nullable=False)
205  snapshot_time = Column(DateTime, nullable=False)
206  scenario = Column(String(100))
207  workflow = Column(String(100))
208  type = Column(String(1))
209  tag_map = relationship('GlobalTagMap', backref='global_tag')
210 
211  def __init__(self, dictionary={}, convert_timestamps=True):
212  # assign each entry in a kwargs
213  for key in dictionary:
214  try:
215  if convert_timestamps:
216  self.__dict__[key] = to_timestamp(dictionary[key])
217  else:
218  self.__dict__[key] = dictionary[key]
219  except KeyError as k:
220  continue
221 
222  def __repr__(self):
223  return '<GlobalTag %r>' % self.name
224 
225  def as_dicts(self, convert_timestamps=False):
226  """
227  Returns dictionary form of Global Tag object.
228  """
229  json_gt = {
230  'name': self.name,
231  'validity': self.validity,
232  'description': self.description,
233  'release': self.release,
234  'insertion_time': to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
235  'snapshot_time': to_timestamp(self.snapshot_time) if convert_timestamps else self.snapshot_time,
236  'scenario': self.scenario,
237  'workflow': self.workflow,
238  'type': self.type
239  }
240  return json_gt
241 
242  def to_array(self):
243  return [self.name, self.release, to_timestamp(self.insertion_time), to_timestamp(self.snapshot_time), self.description]
244 
245  def all(self, **kwargs):
246  """
247  Returns `amount` Global Tags ordered by Global Tag name.
248  """
249  query = self.session.query(GlobalTag)
250  query = apply_filters(query, self.__class__, **kwargs)
251  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
252  query_result = query.order_by(GlobalTag.name).limit(amount).all()
253  gts = data_sources.json_data_node.make(query_result)
254  return gts
255 
256  def tags(self, **kwargs):
257  """
258  Returns `amount` *Global Tag Maps* belonging to this Global Tag.
259  """
260  kwargs["global_tag_name"] = self.name
261  all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
262  all_tags = apply_filters(all_tags, GlobalTagMap, **kwargs)
263  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
264  all_tags = all_tags.order_by(GlobalTagMap.tag_name).limit(amount).all()
265  column_names = ["global_tag_name", "record", "label", "tag_name"]
266  all_tags = [dict(list(zip(column_names, list(map(to_timestamp, row))))) for row in all_tags]
267  all_tags = data_formats._dicts_to_orm_objects(GlobalTagMap, all_tags)
268  return data_sources.json_data_node.make(all_tags)
269 
270  def iovs(self, **kwargs):
271  """
272  Returns `amount` IOVs belonging to all Tags held in this Global Tag.
273  For large Global Tags (which is most of them), VERY slow.
274  Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names,
275  and then get IOVs from each Tag name.
276 
277  At some point, this method may replace the method currently used.
278  """
279  # join global_tag_map onto iov (where insertion time <= gt snapshot) by tag_name + return results
280  # first get only the IOVs that belong to Tags that are contained by this Global Tag
281 
282  # get IOVs belonging to a Tag contained by this Global Tag
283  tag_names = self.tags().get_members("tag_name").data()
284  iovs_all_tags = self.session.query(IOV).filter(IOV.tag_name.in_(tag_names))
285  iovs_all_tags = apply_filters(iovs_all_tags, IOV, **kwargs)
286  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
287  iovs_all_tags = iovs_all_tags.limit(amount).subquery()
288 
289  # now, join Global Tag Map table onto IOVs
290  iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
291  iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
292  .filter(GlobalTagMap.global_tag_name == self.name)\
293  .join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
294 
295  iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).all()
296 
297  column_names = ["tag_name", "since", "payload_hash", "insertion_time"]
298  all_iovs = [dict(list(zip(column_names, row))) for row in iovs_gt_tags]
299  all_iovs = data_formats._dicts_to_orm_objects(IOV, all_iovs)
300 
301  return data_sources.json_data_node.make(all_iovs)
302 
303  def __sub__(self, other):
304  """
305  Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference.
306  Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1))
307  """
308  return self.diff(other)
309 
310  def diff(self, gt):
311  """
312  Returns the json_list of differences in the form of tuples:
313 
314  (record, label, tag name of gt1 (self), tag name of gt2 (gt))
315  """
316 
317  record_label_to_tag_name1 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in self.tags().data()])
318  record_label_to_tag_name2 = dict([((gt_map.record, gt_map.label), gt_map.tag_name) for gt_map in gt.tags().data()])
319 
320  record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
321 
322  table = []
323  tags_pairs_with_differences = []
324 
325  for record_label in record_label_pairs:
326  tag_name1 = record_label_to_tag_name1.get(record_label)
327  tag_name2 = record_label_to_tag_name2.get(record_label)
328 
329  if tag_name1 == None or tag_name2 == None or tag_name1 != tag_name2:
330  table.append({
331  "Record" : record_label[0],
332  "Label" : record_label[1],
333  ("%s Tag" % self.name) : tag_name1,
334  ("%s Tag" % gt.name) : tag_name2
335  })
336 
338 
339  class GlobalTagMap(Base):
340  __table_args__ = schema
341  __tablename__ = 'GLOBAL_TAG_MAP'
342 
343  headers = ["global_tag_name", "record", "label", "tag_name"]
344 
345  global_tag_name = Column(String(100), ForeignKey(fk_schema_prefix + 'GLOBAL_TAG.name'), primary_key=True, nullable=False)
346  record = Column(String(100), ForeignKey(fk_schema_prefix + 'RECORDS.record'), primary_key=True, nullable=False)
347  label = Column(String(100), primary_key=True, nullable=False)
348  tag_name = Column(String(100), ForeignKey(fk_schema_prefix + 'TAG.name'), nullable=False)
349 
350  def __init__(self, dictionary={}, convert_timestamps=True):
351  # assign each entry in a kwargs
352  for key in dictionary:
353  try:
354  if convert_timestamps:
355  self.__dict__[key] = to_timestamp(dictionary[key])
356  else:
357  self.__dict__[key] = dictionary[key]
358  except KeyError as k:
359  continue
360 
361  def __repr__(self):
362  return '<GlobalTagMap %r>' % self.global_tag_name
363 
364  def as_dicts(self, convert_timestamps=False):
365  """
366  Returns dictionary form of this Global Tag Map.
367  """
368  json_gtm = {
369  "global_tag_name" : str(self.global_tag_name),
370  "record" : str(self.record),
371  "label" : str(self.label),
372  "tag_name" : str(self.tag_name)
373  }
374  return json_gtm
375 
376 
377  class GlobalTagMapRequest(Base):
378  __table_args__ = schema
379  __tablename__ = 'GLOBAL_TAG_MAP_REQUEST'
380 
381  queue = Column(String(100), primary_key=True, nullable=False)
382  tag = Column(String(100), ForeignKey(fk_schema_prefix + 'TAG.name'), primary_key=True, nullable=False)
383  record = Column(String(100), ForeignKey(fk_schema_prefix + 'RECORDS.record'), primary_key=True, nullable=False)
384  label = Column(String(100), primary_key=True, nullable=False)
385  status = Column(String(1), nullable=False)
386  description = Column(String(4000), nullable=False)
387  submitter_id = Column(Integer, nullable=False)
388  time_submitted = Column(DateTime, nullable=False)
389  last_edited = Column(DateTime, nullable=False)
390 
391  def __init__(self, dictionary={}, convert_timestamps=True):
392  # assign each entry in a kwargs
393  for key in dictionary:
394  try:
395  if convert_timestamps:
396  self.__dict__[key] = to_timestamp(dictionary[key])
397  else:
398  self.__dict__[key] = dictionary[key]
399  except KeyError as k:
400  continue
401 
402  headers = ["queue", "tag", "record", "label", "status", "description", "submitter_id", "time_submitted", "last_edited"]
403 
404  def as_dicts(self):
405  """
406  Returns dictionary form of this Global Tag Map Request.
407  """
408  return {
409  "queue" : self.queue,
410  "tag" : self.tag,
411  "record" : self.record,
412  "label" : self.label,
413  "status" : self.status,
414  "description" : self.description,
415  "submitter_id" : self.submitter_id,
416  "time_submitted" : self.time_submitted,
417  "last_edited" : self.last_edited
418  }
419 
420  def __repr__(self):
421  return '<GlobalTagMapRequest %r>' % self.queue
422 
423  def to_array(self):
424  return [self.queue, self.tag, self.record, self.label, status_full_name(self.status), to_timestamp(self.time_submitted), to_timestamp(self.last_edited)]
425 
426  class IOV(Base):
427  __table_args__ = schema
428  __tablename__ = 'IOV'
429 
430  headers = ["tag_name", "since", "payload_hash", "insertion_time"]
431 
432  tag_name = Column(String(4000), ForeignKey(fk_schema_prefix + 'TAG.name'), primary_key=True, nullable=False)
433  since = Column(Integer, primary_key=True, nullable=False)
434  payload_hash = Column(String(40), ForeignKey(fk_schema_prefix + 'PAYLOAD.hash'), nullable=False)
435  insertion_time = Column(DateTime, primary_key=True, nullable=False)
436 
437  def __init__(self, dictionary={}, convert_timestamps=True):
438  # assign each entry in a kwargs
439  for key in dictionary:
440  try:
441  if convert_timestamps:
442  self.__dict__[key] = to_timestamp(dictionary[key])
443  else:
444  self.__dict__[key] = dictionary[key]
445  except KeyError as k:
446  continue
447 
448  def as_dicts(self, convert_timestamps=False):
449  """
450  Returns dictionary form of this IOV.
451  """
452  return {
453  "tag_name" : self.tag_name,
454  "since" : self.since,
455  "payload_hash" : self.payload_hash,
456  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
457  }
458 
459  def __repr__(self):
460  return '<IOV %r>' % self.tag_name
461 
462  def to_array(self):
463  return [self.since, to_timestamp(self.insertion_time), self.payload_hash]
464 
465  def all(self, **kwargs):
466  """
467  Returns `amount` IOVs ordered by since.
468  """
469  query = self.session.query(IOV)
470  query = apply_filters(query, IOV, **kwargs)
471  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
472  query_result = query.order_by(IOV.tag_name).order_by(IOV.since).limit(amount).all()
473  return data_sources.json_data_node.make(query_result)
474 
475 
476  class Payload(Base):
477  __table_args__ = schema
478  __tablename__ = 'PAYLOAD'
479 
480  headers = ["hash", "object_type", "version", "insertion_time"]
481 
482  hash = Column(String(40), primary_key=True, nullable=False)
483  object_type = Column(String(4000), nullable=False)
484  version = Column(String(4000), nullable=False)
485  insertion_time = Column(DateTime, nullable=False)
486  if map_blobs:
487  data = Column(Binary, nullable=False)
488  streamer_info = Column(Binary, nullable=False)
489  blobs_mapped = map_blobs
490 
491  def __init__(self, dictionary={}, convert_timestamps=True):
492  # assign each entry in a kwargs
493  for key in dictionary:
494  try:
495  if convert_timestamps:
496  self.__dict__[key] = to_timestamp(dictionary[key])
497  else:
498  self.__dict__[key] = dictionary[key]
499  except KeyError as k:
500  continue
501 
502  if map_blobs:
503  def as_dicts(self, convert_timestamps=False):
504  """
505  Returns dictionary form of this Payload's metadata (not the actual Payload).
506  """
507  return {
508  "hash" : self.hash,
509  "object_type" : self.object_type,
510  "version" : self.version,
511  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
512  "data" : self.data,
513  "streamer_info" : self.streamer_info
514  }
515  else:
516  def as_dicts(self, convert_timestamps=False):
517  """
518  Returns dictionary form of this Payload's metadata (not the actual Payload).
519  """
520  return {
521  "hash" : self.hash,
522  "object_type" : self.object_type,
523  "version" : self.version,
524  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time
525  }
526 
527  def __repr__(self):
528  return '<Payload %r>' % self.hash
529 
530  def to_array(self):
531  return [self.hash, self.object_type, self.version, to_timestamp(self.insertion_time)]
532 
533  def parent_tags(self, **kwargs):
534  """
535  Returns `amount` parent Tags ordered by Tag name.
536  """
537  # check if this payload is empty
538  if self.empty:
539  return None
540  else:
541  kwargs["payload_hash"] = self.hash
542  query = self.session.query(IOV.tag_name)
543  query = apply_filters(query, IOV, **kwargs)
544  query_result = query.all()
545  tag_names = [entry[0] for entry in query_result]
546  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
547  tags = self.session.query(Tag).filter(Tag.name.in_(tag_names)).order_by(Tag.name).limit(amount).all()
549 
550  def all(self, **kwargs):
551  """
552  Returns `amount` Payloads ordered by Payload hash.
553  """
554  query = self.session.query(Payload)
555  query = apply_filters(query, Payload, **kwargs)
556  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
557  query_result = query.order_by(Payload.hash).limit(amount).all()
558  return data_sources.json_data_node.make(query_result)
559 
560 
561  class Record(Base):
562  __table_args__ = schema
563  __tablename__ = 'RECORDS'
564 
565  headers = ["record", "object", "type"]
566 
567  record = Column(String(100), primary_key=True, nullable=False)
568  object = Column(String(200), nullable=False)
569  type = Column(String(20), nullable=False)
570 
571  def as_dicts(self):
572  """
573  Returns dictionary form of this Record.
574  """
575  return {
576  "record" : self.record,
577  "object" : self.object,
578  "type" : self.type
579  }
580 
581  def __repr__(self):
582  return '<Record %r>' % self.record
583 
584  def to_array(self):
585  return [self.record, self.object]
586 
587  def all(self, **kwargs):
588  """
589  Returns `amount` Records ordered by Record record.
590  """
591  query = self.session.query(Record)
592  query = apply_filters(query, Record, kwargs)
593  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
594  query_result = query.order_by(Record.record).limit(amount).all()
595  return data_sources.json_data_node.make(query_result)
596 
597 
598  class Tag(Base):
599  __table_args__ = schema
600  __tablename__ = 'TAG'
601 
602  headers = ["name", "time_type", "object_type", "synchronization", "end_of_validity",\
603  "description", "last_validated_time", "insertion_time", "modification_time", "protection_code"]
604 
605  name = Column(String(4000), primary_key=True, nullable=False)
606  time_type = Column(String(4000), nullable=False)
607  object_type = Column(String(4000), nullable=False)
608  synchronization = Column(String(4000), nullable=False)
609  end_of_validity = Column(Integer, nullable=False)
610  description = Column(String(4000), nullable=False)
611  last_validated_time = Column(BigInteger, nullable=False)
612  insertion_time = Column(DateTime, nullable=False)
613  modification_time = Column(DateTime, nullable=False)
614  protection_code = Column(Integer, nullable=False)
615 
616  record = None
617  label = None
618 
619  iovs_list = relationship('IOV', backref='tag')
620 
621  def __init__(self, dictionary={}, convert_timestamps=True):
622  # assign each entry in a kwargs
623  for key in dictionary:
624  try:
625  if convert_timestamps:
626  self.__dict__[key] = to_timestamp(dictionary[key])
627  else:
628  self.__dict__[key] = dictionary[key]
629  except KeyError as k:
630  continue
631 
632  def as_dicts(self, convert_timestamps=False):
633  """
634  Returns dictionary form of this Tag.
635  """
636  return {
637  "name" : self.name,
638  "time_type" : self.time_type,
639  "object_type" : self.object_type,
640  "synchronization" : self.synchronization,
641  "end_of_validity" : self.end_of_validity,
642  "description" : self.description,
643  "last_validated_time" : self.last_validated_time,
644  "insertion_time" : to_timestamp(self.insertion_time) if convert_timestamps else self.insertion_time,
645  "modification_time" : to_timestamp(self.modification_time) if convert_timestamps else self.modification_time,
646  "record" : self.record,
647  "label" : self.label
648  }
649 
650  def __repr__(self):
651  return '<Tag %r>' % self.name
652 
653  def to_array(self):
654  return [self.name, self.time_type, self.object_type, self.synchronization, to_timestamp(self.insertion_time), self.description]
655 
656  def parent_global_tags(self, **kwargs):
657  """
658  Returns `amount` Global Tags that contain this Tag.
659  """
660  if self.empty:
661  return None
662  else:
663  kwargs["tag_name"] = self.name
664  query = self.session.query(GlobalTagMap.global_tag_name)
665  query = apply_filters(query, GlobalTagMap, **kwargs)
666  query_result = query.all()
667  if len(query_result) != 0:
668  global_tag_names = [entry[0] for entry in query_result]
669  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
670  global_tags = self.session.query(GlobalTag).filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).limit(amount).all()
671  else:
672  global_tags = None
673  return data_sources.json_data_node.make(global_tags)
674 
675  def all(self, **kwargs):
676  """
677  Returns `amount` Tags ordered by Tag name.
678  """
679  query = self.session.query(Tag)
680  query = apply_filters(query, Tag, **kwargs)
681  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
682  query_result = query.order_by(Tag.name).limit(amount).all()
683  return data_sources.json_data_node.make(query_result)
684 
685  def iovs(self, **kwargs):
686  """
687  Returns `amount` IOVs that belong to this Tag ordered by IOV since.
688  """
689  # filter_params contains a list of columns to filter the iovs by
690  iovs_query = self.session.query(IOV).filter(IOV.tag_name == self.name)
691  iovs_query = apply_filters(iovs_query, IOV, **kwargs)
692  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
693  iovs = iovs_query.order_by(IOV.since).limit(amount).all()
695 
696  def latest_iov(self):
697  """
698  Returns the single highest since held by this Tag.
699  Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since.
700  """
701  iov = self.session.query(IOV).filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).first()
702  return iov
703 
704  def __sub__(self, other):
705  """
706  Allows the arithmetic operator "-" to be applied to find the difference between two tags.
707  Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1.
708  """
709  return self.diff(other)
710 
711  def diff(self, tag, short=False):
712  """
713  Returns the `diff` of the first Tag, and the Tag given.
714  Summary of algorithm:
715 
716  Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash).
717  Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list.
718  Note: depending on where each Tag's IOVs start, 1 or both of these values can be None.
719  Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags.
720  For each triple (since, hash1, hash2),
721 
722  If the first_since_in_equality_range = None,
723  We are at the first since in the merged list, so set first_since... = since
724  Note: this is so set the previous... values for the second row, since the first row will never result in a print because
725  a row is only printed when past iovs have been processed.
726 
727  If either hash1 or hash2 is None, set it to the previous hash found
728  Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this.
729 
730  If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since)
731  Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since
732  because we have no looked forward, but we do know about the previous hashes.
733 
734  If hash1 != hash2,
735  The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags
736  Hence, print that for this range we have equal hashes denoted by "=" in each hash column.
737 
738  Else:
739 
740  The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical...
741  If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2),
742  then we have not found the end of a region of equality!
743  If the hashes have changed, print a row.
744 
745  """
746  if tag.__class__.__name__ != "Tag":
747  raise TypeError("Tag given must be a CondDBFW Tag object.")
748 
749  # get lists of iovs
750  iovs1 = dict([(iov.since, iov.payload_hash) for iov in self.iovs().data()])
751  iovs2 = dict([(iov.since, iov.payload_hash) for iov in tag.iovs().data()])
752 
753  iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
754  iovs.append(("Infinity", 1, 2))
755  table = []
756 
757  previous_hash1 = None
758  previous_hash2 = None
759  first_since_in_equality_range = None
760  previous_equal = False
761 
762  for since, hash1, hash2 in iovs:
763 
764  if first_since_in_equality_range == None:
765  # if no start of a region of equality has been found,
766  # set it to the first since in the merged list
767  # then set the previous hashes and equality status to the current
768  # and continue to the next iteration of the loop
769  first_since_in_equality_range = since
770  previous_hash1 = hash1
771  previous_hash2 = hash2
772  previous_equal = hash1 == hash2
773  continue
774 
775  # if previous_payload1 is also None, comparisons still matters
776  # eg, if hash1 = None and hash2 != None, they are different and so should be shown in the table
777  if hash1 == None:
778  hash1 = previous_hash1
779  if hash2 == None:
780  hash2 = previous_hash2
781 
782  if previous_equal:
783  # previous hashes were equal, but only say they were if we have found an end of the region of equality
784  if hash1 != hash2:
785  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : "=", tag.name : "="})
786  # this is the start of a new equality range - might only be one row if the next row has unequal hashes!
787  first_since_in_equality_range = since
788  else:
789  # if the payloads are not equal, the equality range has ended and we should print a row
790  # we only print if EITHER hash has changed
791  # if both hashes are equal to the previous row, skip to the next row to try to find the beginning
792  # of a region of equality
793  if not(hash1 == previous_hash1 and hash2 == previous_hash2):
794  table.append({"since" : "[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
795  first_since_in_equality_range = since
796 
797  previous_hash1 = hash1
798  previous_hash2 = hash2
799  previous_equal = hash1 == hash2
800 
801  final_list = data_sources.json_data_node.make(table)
802  return final_list
803 
804  def merge_into(self, tag, range_object):
805  """
806  Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag
807  into the IOVs of the other Tag.
808 
809  tag : CondDBFW Tag object that the IOVs from this Tag should be merged into.
810 
811  range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied
812  from the database this Tag belongs to.
813 
814  Script originally written by Joshua Dawes,
815  and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni.
816  """
817 
818  oracle_tag = self
819  merged_tag_name = oracle_tag.name + "_merged"
820 
821  #since_range = Range(6285191841738391552,6286157702573850624)
822  since_range = range_object
823 
824  #sqlite = shell.connect("sqlite://EcallaserTag_80X_2016_prompt_corr20160519_2.db")
825 
826  #sqlite_tag = sqlite.tag().all().data()[0]
827  sqlite_tag = tag
828  if sqlite_tag == None:
829  raise TypeError("Tag to be merged cannot be None.")
830 
831  sqlite_iovs = sqlite_tag.iovs().data()
832  sqlite_tag.iovs().as_table()
833 
834  new_tag = self.connection.models["tag"](sqlite_tag.as_dicts(convert_timestamps=False), convert_timestamps=False)
835  new_tag.name = merged_tag_name
836 
837  imported_iovs = oracle_tag.iovs(since=since_range).data()
838 
839  for i in range(0, len(imported_iovs)):
840  imported_iovs[i].source = "oracle"
841 
842  sqlite_iovs_sinces=[]
843  for i in range(0, len(sqlite_iovs)):
844  sqlite_iovs[i].source = "sqlite"
845  sqlite_iovs_sinces.append(sqlite_iovs[i].since)
846 
847 
848  print(sqlite_iovs_sinces)
849 
850  new_iovs_list = imported_iovs + sqlite_iovs
851  new_iovs_list = sorted(new_iovs_list, key=lambda iov : iov.since)
852 
853  for (n, iov) in enumerate(new_iovs_list):
854  # if iov is from oracle, change its hash
855  if iov.source == "oracle":
856  if new_iovs_list[n].since in sqlite_iovs_sinces:
857  # if its since is already defined in the target iovs
858  # ignore it
859  iov.source = "tobedeleted"
860  else:
861  # otherwise, iterate down from n to find the last sqlite iov,
862  # and assign that hash
863  for i in reversed(list(range(0,n))):
864  if new_iovs_list[i].source == "sqlite":
865  print("change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
866  iov.payload_hash = new_iovs_list[i].payload_hash
867  break
868 
869 
870  new_iov_list_copied = []
871 
872  for iov in new_iovs_list:
873  # only append IOVs that are not already defined in the target tag
874  if iov.source != "tobedeleted":
875  new_iov_list_copied.append(iov)
876 
877  new_iov_list_copied = sorted(new_iov_list_copied, key=lambda iov : iov.since)
878 
879  now = datetime.datetime.utcnow()
880 
881  new_iovs = []
882  for iov in new_iov_list_copied:
883  new_iovs.append( self.connection.models["iov"](iov.as_dicts(convert_timestamps=False), convert_timestamps=False) )
884  for iov in new_iovs:
885  iov.insertion_time = now
886  iov.tag_name = merged_tag_name
887 
888  new_tag.iovs_list = new_iovs
889 
890  return new_tag
891  #sqlite.write_and_commit(new_iovs)
892 
893 
894  class TagAuthorization(Base):
895  __table_args__ = schema
896  __tablename__ = 'TAG_AUTHORIZATION'
897 
898  headers = ["tag_name", "access_type", "credential", "credential_type"]
899 
900  tag_name = Column(String(100), ForeignKey(fk_schema_prefix + 'TAG.name'), primary_key=True, nullable=False)
901  access_type = Column(Integer, nullable=False)
902  credential = Column(String(100), primary_key=True, nullable=False)
903  credential_type = Column(Integer, nullable=False)
904 
905  def as_dicts(self):
906  """
907  Returns dictionary form of this Tag Authorization.
908  """
909  return {
910  "tag_name" : self.tag_name,
911  "access_type" : self.access_type,
912  "credential" : self.credential,
913  "credential_type" : self.credential_type
914  }
915 
916  def __repr__(self):
917  return '<TagAuthorization %s %s %s %s>' % (self.tag_name, self.access_type, self.credential, self.credential_type)
918 
919  def to_array(self):
920  return [self.tag_name, self.access_type, self.credential, self.credential_type]
921 
922  def all(self, **kwargs):
923  """
924  Returns `amount` Records ordered by Record record.
925  """
926  query = self.session.query(TagAuthorization)
927  query = apply_filters(query, TagAuthorization, kwargs)
928  amount = kwargs["amount"] if "amount" in list(kwargs.keys()) else None
929  query_result = query.order_by(TagAuthorization.tag).limit(amount).all()
930  return data_sources.json_data_node.make(query_result)
931 
932  classes = {"globaltag" : GlobalTag, "iov" : IOV, "globaltagmap" : GlobalTagMap,\
933  "payload" : Payload, "tag" : Tag, "TagAuthorization": TagAuthorization, "Base" : Base}
934 
935  if class_name == None:
936  return classes
937  else:
return classes[class_name]
const uint16_t range(const Frame &aFrame)
def _dicts_to_orm_objects
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def all
workaround iterator generators for ROOT classes
Definition: cmstools.py:25
def generate
Definition: models.py:187
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def status_full_name
Definition: models.py:71
def to_timestamp
Definition: conddb_time.py:13
char data[epos_bytes_allocation]
Definition: EPOS_Wrapper.h:79
def apply_filters
Definition: models.py:181
#define str(s)
def models.session_independent (   objects)

Definition at line 50 of file models.py.

References session_independent_object().

50 
51 def session_independent(objects):
52  if type(objects) == list:
53  return list(map(session_independent_object, objects))
54  else:
55  # assume objects is a single object (not a list)
56  return session_independent_object(objects)
def session_independent
Definition: models.py:50
def session_independent_object
Definition: models.py:31
def models.session_independent_object (   object,
  schema = None 
)

Definition at line 31 of file models.py.

References class_name_to_column(), and generate().

Referenced by session_independent(), and querying.connection.write().

31 
32 def session_independent_object(object, schema=None):
33  # code original taken from write method in querying
34  # will result in a new object that isn't attached to any session
35  # hence, SQLAlchemy won't track changes
36 
37  if object.__class__.__name__.lower() == "payload":
38  map_blobs = object.blobs_mapped
39  else:
40  map_blobs = False
41  # need to change this to only generate the required class - can be slow...
42  # extract class name of object
43  cls = object.__class__
44  class_name = class_name_to_column(cls).lower()
45  new_class = generate(map_blobs=map_blobs, class_name=class_name)
46  new_class.__table__.schema = schema
47  new_object = new_class(object.as_dicts(), convert_timestamps=False)
48 
49  return new_object
def generate
Definition: models.py:187
def session_independent_object
Definition: models.py:31
def class_name_to_column
Definition: models.py:57
def models.status_full_name (   status)

Definition at line 71 of file models.py.

Referenced by generate().

71 
72 def status_full_name(status):
73  full_status = {
74  'P': 'Pending',
75  'R': 'Rejected',
76  'A': 'Accepted'
77  }
78  return full_status[status]
def status_full_name
Definition: models.py:71