3 Using Audrius' models from flask browser. 5 This file contains models that are used with SQLAlchemy. 7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes, 8 so these will not work in a normal context outside the framework. 11 from __future__
import print_function
17 from sqlalchemy.orm
import relationship, backref
18 from sqlalchemy.ext.declarative
import declarative_base
20 from sqlalchemy
import Column, String, Integer, DateTime, Binary, ForeignKey, BigInteger, and_
22 print(
"You must be working inside a CMSSW environment. Try running 'cmsenv'.")
25 import data_sources, data_formats
26 import urllib, urllib2, base64
27 from copy
import deepcopy
30 from utils
import to_timestamp, to_datetime, friendly_since
37 if object.__class__.__name__.lower() ==
"payload":
38 map_blobs = object.blobs_mapped
43 cls = object.__class__
45 new_class =
generate(map_blobs=map_blobs, class_name=class_name)
46 new_class.__table__.schema = schema
47 new_object = new_class(object.as_dicts(), convert_timestamps=
False)
52 if isinstance(objects, list):
53 return map(session_independent_object, objects)
59 class_name = cls.__name__
61 for character
in class_name:
62 all_upper_case = character.isupper()
65 for n
in range(0, len(class_name)):
66 if class_name[n].isupper()
and n != 0:
67 class_name =
str(class_name[0:n]) +
"".
join([
"_", class_name[n].lower()]) +
str(class_name[n+1:])
68 elif class_name[n].isupper()
and n == 0:
69 class_name =
str(class_name[0:n]) +
"".
join([class_name[n].lower()]) +
str(class_name[n+1:])
78 return full_status[status]
81 days = radius.get(
"days")
82 days += radius.get(
"weeks")*7
if radius.get(
"weeks") !=
None else 0
83 days += radius.get(
"months")*28
if radius.get(
"months") !=
None else 0
84 days += radius.get(
"years")+365
if radius.get(
"years") !=
None else 0
89 Base class for Radius and Range - used for checking by apply_filter function 103 Used to tell proxy methods that a range of values defined by a centre and a radius should be queried for - special case of filter clauses. 107 centre and radius should be objects that can be added and subtracted. 108 eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta 110 Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type. 119 Used to tell proxy methods that a range of values defined by a start and end point should be queried for - special case of filter clauses. 123 centre and radius should be objects that can be added and subtracted. 124 eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta 126 Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type. 133 Used to tell proxy methods that a regular expression should be used to query the column. 144 return sqlalchemy.func.regexp_like(field, regexp)
147 self.connection_object.engine.pool.connect().create_function(
'regexp', 2,
lambda data, regexp: re.search(regexp, data)
is not None)
149 return sqlalchemy.func.regexp(field, regexp)
151 raise NotImplemented(
"Can only apply regular expression search to Oracle, Frontier and SQLite.")
154 filter_attribute = getattr(orm_class, attribute)
155 if isinstance(value, list):
156 orm_query = orm_query.filter(filter_attribute.in_(value))
158 orm_query = orm_query.filter(filter_attribute.in_(value.data()))
159 elif type(value)
in [Range, Radius]:
161 minus = value.get_start()
162 plus = value.get_end()
163 orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
165 elif isinstance(value, RegExp):
169 if value.database_type
in [
"oracle",
"frontier"]:
170 regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
171 elif value.database_type ==
"sqlite":
172 value.connection_object.engine.pool.connect().create_function(
'regexp', 2,
lambda data, regexp: re.search(regexp, data)
is not None)
173 regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
175 raise NotImplemented(
"Can only apply regular expression search to Oracle, Frontier and SQLite.")
176 orm_query = orm_query.filter(regexp)
179 orm_query = orm_query.filter(filter_attribute == value)
183 for (key, value)
in filters.items():
184 if not(key
in [
"amount"]):
185 orm_query =
apply_filter(orm_query, orm_class, key, value)
190 Base = declarative_base()
193 __tablename__ =
'GLOBAL_TAG' 195 headers = [
"name",
"validity",
"description",
"release",
"insertion_time",
"snapshot_time",
"scenario",
"workflow",
"type"]
197 name = Column(String(100), unique=
True, nullable=
False, primary_key=
True)
198 validity = Column(Integer, nullable=
False)
199 description = Column(String(4000), nullable=
False)
200 release = Column(String(100), nullable=
False)
201 insertion_time = Column(DateTime, nullable=
False)
202 snapshot_time = Column(DateTime, nullable=
False)
203 scenario = Column(String(100))
204 workflow = Column(String(100))
205 type = Column(String(1))
206 tag_map = relationship(
'GlobalTagMap', backref=
'global_tag')
208 def __init__(self, dictionary={}, convert_timestamps=True):
210 for key
in dictionary:
212 if convert_timestamps:
215 self.__dict__[key] = dictionary[key]
216 except KeyError
as k:
220 return '<GlobalTag %r>' % self.name
222 def as_dicts(self, convert_timestamps=False):
224 Returns dictionary form of Global Tag object. 228 'validity': self.validity,
229 'description': self.description,
230 'release': self.release,
231 'insertion_time':
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time,
232 'snapshot_time':
to_timestamp(self.snapshot_time)
if convert_timestamps
else self.snapshot_time,
233 'scenario': self.scenario,
234 'workflow': self.workflow,
240 return [self.name, self.release,
to_timestamp(self.insertion_time),
to_timestamp(self.snapshot_time), self.description]
242 def all(self, **kwargs):
244 Returns `amount` Global Tags ordered by Global Tag name. 246 query = self.session.query(GlobalTag)
248 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 249 query_result = query.order_by(GlobalTag.name).
limit(amount).
all()
253 def tags(self, **kwargs):
255 Returns `amount` *Global Tag Maps* belonging to this Global Tag. 257 kwargs[
"global_tag_name"] = self.name
258 all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
260 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 261 all_tags = all_tags.order_by(GlobalTagMap.tag_name).
limit(amount).
all()
262 column_names = [
"global_tag_name",
"record",
"label",
"tag_name"]
263 all_tags =
map(
lambda row :
dict(
zip(column_names,
map(to_timestamp, row))), all_tags)
267 def iovs(self, **kwargs):
269 Returns `amount` IOVs belonging to all Tags held in this Global Tag. 270 For large Global Tags (which is most of them), VERY slow. 271 Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names, 272 and then get IOVs from each Tag name. 274 At some point, this method may replace the method currently used. 280 tag_names = self.tags().get_members(
"tag_name").
data()
281 iovs_all_tags = self.session.query(IOV).
filter(IOV.tag_name.in_(tag_names))
283 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 284 iovs_all_tags = iovs_all_tags.limit(amount).subquery()
287 iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
288 iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
289 .
filter(GlobalTagMap.global_tag_name == self.name)\
290 .
join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
292 iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).
all()
294 column_names = [
"tag_name",
"since",
"payload_hash",
"insertion_time"]
295 all_iovs =
map(
lambda row :
dict(
zip(column_names, row)), iovs_gt_tags)
300 def __sub__(self, other):
302 Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference. 303 Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1)) 305 return self.diff(other)
309 Returns the json_list of differences in the form of tuples: 311 (record, label, tag name of gt1 (self), tag name of gt2 (gt)) 314 record_label_to_tag_name1 =
dict([((gt_map.record, gt_map.label), gt_map.tag_name)
for gt_map
in self.tags().
data()])
315 record_label_to_tag_name2 =
dict([((gt_map.record, gt_map.label), gt_map.tag_name)
for gt_map
in gt.tags().
data()])
317 record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
320 tags_pairs_with_differences = []
322 for record_label
in record_label_pairs:
323 tag_name1 = record_label_to_tag_name1.get(record_label)
324 tag_name2 = record_label_to_tag_name2.get(record_label)
326 if tag_name1 ==
None or tag_name2 ==
None or tag_name1 != tag_name2:
328 "Record" : record_label[0],
329 "Label" : record_label[1],
330 (
"%s Tag" % self.name) : tag_name1,
331 (
"%s Tag" % gt.name) : tag_name2
336 class GlobalTagMap(
Base):
337 __tablename__ =
'GLOBAL_TAG_MAP' 339 headers = [
"global_tag_name",
"record",
"label",
"tag_name"]
341 global_tag_name = Column(String(100), ForeignKey(
'GLOBAL_TAG.name'), primary_key=
True, nullable=
False)
342 record = Column(String(100), ForeignKey(
'RECORDS.record'), primary_key=
True, nullable=
False)
343 label = Column(String(100), primary_key=
True, nullable=
False)
344 tag_name = Column(String(100), ForeignKey(
'TAG.name'), nullable=
False)
346 def __init__(self, dictionary={}, convert_timestamps=True):
348 for key
in dictionary:
350 if convert_timestamps:
353 self.__dict__[key] = dictionary[key]
354 except KeyError
as k:
358 return '<GlobalTagMap %r>' % self.global_tag_name
360 def as_dicts(self, convert_timestamps=False):
362 Returns dictionary form of this Global Tag Map. 365 "global_tag_name" :
str(self.global_tag_name),
366 "record" :
str(self.record),
367 "label" :
str(self.label),
368 "tag_name" :
str(self.tag_name)
373 class GlobalTagMapRequest(
Base):
374 __tablename__ =
'GLOBAL_TAG_MAP_REQUEST' 376 queue = Column(String(100), primary_key=
True, nullable=
False)
377 tag = Column(String(100), ForeignKey(
'TAG.name'), primary_key=
True, nullable=
False)
378 record = Column(String(100), ForeignKey(
'RECORDS.record'), primary_key=
True, nullable=
False)
379 label = Column(String(100), primary_key=
True, nullable=
False)
380 status = Column(String(1), nullable=
False)
381 description = Column(String(4000), nullable=
False)
382 submitter_id = Column(Integer, nullable=
False)
383 time_submitted = Column(DateTime, nullable=
False)
384 last_edited = Column(DateTime, nullable=
False)
386 def __init__(self, dictionary={}, convert_timestamps=True):
388 for key
in dictionary:
390 if convert_timestamps:
393 self.__dict__[key] = dictionary[key]
394 except KeyError
as k:
397 headers = [
"queue",
"tag",
"record",
"label",
"status",
"description",
"submitter_id",
"time_submitted",
"last_edited"]
401 Returns dictionary form of this Global Tag Map Request. 404 "queue" : self.queue,
406 "record" : self.record,
407 "label" : self.label,
408 "status" : self.status,
409 "description" : self.description,
410 "submitter_id" : self.submitter_id,
411 "time_submitted" : self.time_submitted,
412 "last_edited" : self.last_edited
416 return '<GlobalTagMapRequest %r>' % self.queue
422 __tablename__ =
'IOV' 424 headers = [
"tag_name",
"since",
"payload_hash",
"insertion_time"]
426 tag_name = Column(String(4000), ForeignKey(
'TAG.name'), primary_key=
True, nullable=
False)
427 since = Column(Integer, primary_key=
True, nullable=
False)
428 payload_hash = Column(String(40), ForeignKey(
'PAYLOAD.hash'), nullable=
False)
429 insertion_time = Column(DateTime, primary_key=
True, nullable=
False)
431 def __init__(self, dictionary={}, convert_timestamps=True):
433 for key
in dictionary:
435 if convert_timestamps:
438 self.__dict__[key] = dictionary[key]
439 except KeyError
as k:
442 def as_dicts(self, convert_timestamps=False):
444 Returns dictionary form of this IOV. 447 "tag_name" : self.tag_name,
448 "since" : self.since,
449 "payload_hash" : self.payload_hash,
450 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time
454 return '<IOV %r>' % self.tag_name
457 return [self.since,
to_timestamp(self.insertion_time), self.payload_hash]
459 def all(self, **kwargs):
461 Returns `amount` IOVs ordered by since. 463 query = self.session.query(IOV)
465 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 466 query_result = query.order_by(IOV.tag_name).order_by(IOV.since).
limit(amount).
all()
471 __tablename__ =
'PAYLOAD' 473 headers = [
"hash",
"object_type",
"version",
"insertion_time"]
475 hash = Column(String(40), primary_key=
True, nullable=
False)
476 object_type = Column(String(4000), nullable=
False)
477 version = Column(String(4000), nullable=
False)
478 insertion_time = Column(DateTime, nullable=
False)
480 data = Column(Binary, nullable=
False)
481 streamer_info = Column(Binary, nullable=
False)
482 blobs_mapped = map_blobs
484 def __init__(self, dictionary={}, convert_timestamps=True):
486 for key
in dictionary:
488 if convert_timestamps:
491 self.__dict__[key] = dictionary[key]
492 except KeyError
as k:
496 def as_dicts(self, convert_timestamps=False):
498 Returns dictionary form of this Payload's metadata (not the actual Payload). 502 "object_type" : self.object_type,
503 "version" : self.version,
504 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time,
506 "streamer_info" : self.streamer_info
509 def as_dicts(self, convert_timestamps=False):
511 Returns dictionary form of this Payload's metadata (not the actual Payload). 515 "object_type" : self.object_type,
516 "version" : self.version,
517 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time
521 return '<Payload %r>' % self.hash
524 return [self.hash, self.object_type, self.version,
to_timestamp(self.insertion_time)]
526 def parent_tags(self, **kwargs):
528 Returns `amount` parent Tags ordered by Tag name. 534 kwargs[
"payload_hash"] = self.hash
535 query = self.session.query(IOV.tag_name)
537 query_result = query.all()
538 tag_names =
map(
lambda entry : entry[0], query_result)
539 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 540 tags = self.session.query(Tag).
filter(Tag.name.in_(tag_names)).order_by(Tag.name).
limit(amount).
all()
543 def all(self, **kwargs):
545 Returns `amount` Payloads ordered by Payload hash. 547 query = self.session.query(Payload)
549 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 550 query_result = query.order_by(Payload.hash).
limit(amount).
all()
555 __tablename__ =
'RECORDS' 557 headers = [
"record",
"object",
"type"]
559 record = Column(String(100), primary_key=
True, nullable=
False)
560 object = Column(String(200), nullable=
False)
561 type = Column(String(20), nullable=
False)
565 Returns dictionary form of this Record. 568 "record" : self.record,
569 "object" : self.object,
574 return '<Record %r>' % self.record
577 return [self.record, self.object]
579 def all(self, **kwargs):
581 Returns `amount` Records ordered by Record record. 583 query = self.session.query(Record)
585 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 586 query_result = query.order_by(Record.record).
limit(amount).
all()
591 __tablename__ =
'TAG' 593 headers = [
"name",
"time_type",
"object_type",
"synchronization",
"end_of_validity",\
594 "description",
"last_validated_time",
"insertion_time",
"modification_time"]
596 name = Column(String(4000), primary_key=
True, nullable=
False)
597 time_type = Column(String(4000), nullable=
False)
598 object_type = Column(String(4000), nullable=
False)
599 synchronization = Column(String(4000), nullable=
False)
600 end_of_validity = Column(Integer, nullable=
False)
601 description = Column(String(4000), nullable=
False)
602 last_validated_time = Column(BigInteger, nullable=
False)
603 insertion_time = Column(DateTime, nullable=
False)
604 modification_time = Column(DateTime, nullable=
False)
609 iovs_list = relationship(
'IOV', backref=
'tag')
611 def __init__(self, dictionary={}, convert_timestamps=True):
613 for key
in dictionary:
615 if convert_timestamps:
618 self.__dict__[key] = dictionary[key]
619 except KeyError
as k:
622 def as_dicts(self, convert_timestamps=False):
624 Returns dictionary form of this Tag. 628 "time_type" : self.time_type,
629 "object_type" : self.object_type,
630 "synchronization" : self.synchronization,
631 "end_of_validity" : self.end_of_validity,
632 "description" : self.description,
633 "last_validated_time" : self.last_validated_time,
634 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time,
635 "modification_time" :
to_timestamp(self.modification_time)
if convert_timestamps
else self.modification_time,
636 "record" : self.record,
641 return '<Tag %r>' % self.name
644 return [self.name, self.time_type, self.object_type, self.synchronization,
to_timestamp(self.insertion_time), self.description]
646 def parent_global_tags(self, **kwargs):
648 Returns `amount` Global Tags that contain this Tag. 653 kwargs[
"tag_name"] = self.name
654 query = self.session.query(GlobalTagMap.global_tag_name)
656 query_result = query.all()
657 if len(query_result) != 0:
658 global_tag_names =
map(
lambda entry : entry[0], query_result)
659 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 660 global_tags = self.session.query(GlobalTag).
filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).
limit(amount).
all()
665 def all(self, **kwargs):
667 Returns `amount` Tags ordered by Tag name. 669 query = self.session.query(Tag)
671 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 672 query_result = query.order_by(Tag.name).
limit(amount).
all()
675 def iovs(self, **kwargs):
677 Returns `amount` IOVs that belong to this Tag ordered by IOV since. 680 iovs_query = self.session.query(IOV).
filter(IOV.tag_name == self.name)
682 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 683 iovs = iovs_query.order_by(IOV.since).
limit(amount).
all()
686 def latest_iov(self):
688 Returns the single highest since held by this Tag. 689 Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since. 691 iov = self.session.query(IOV).
filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).
first()
694 def __sub__(self, other):
696 Allows the arithmetic operator "-" to be applied to find the difference between two tags. 697 Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1. 699 return self.diff(other)
701 def diff(self, tag, short=False):
703 Returns the `diff` of the first Tag, and the Tag given. 704 Summary of algorithm: 706 Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash). 707 Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list. 708 Note: depending on where each Tag's IOVs start, 1 or both of these values can be None. 709 Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags. 710 For each triple (since, hash1, hash2), 712 If the first_since_in_equality_range = None, 713 We are at the first since in the merged list, so set first_since... = since 714 Note: this is so set the previous... values for the second row, since the first row will never result in a print because 715 a row is only printed when past iovs have been processed. 717 If either hash1 or hash2 is None, set it to the previous hash found 718 Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this. 720 If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since) 721 Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since 722 because we have no looked forward, but we do know about the previous hashes. 725 The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags 726 Hence, print that for this range we have equal hashes denoted by "=" in each hash column. 730 The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical... 731 If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2), 732 then we have not found the end of a region of equality! 733 If the hashes have changed, print a row. 736 if tag.__class__.__name__ !=
"Tag":
737 raise TypeError(
"Tag given must be a CondDBFW Tag object.")
740 iovs1 =
dict(
map(
lambda iov : (iov.since, iov.payload_hash), self.iovs().
data()))
741 iovs2 =
dict(
map(
lambda iov : (iov.since, iov.payload_hash), tag.iovs().
data()))
743 iovs = [(x, iovs1.get(x), iovs2.get(x))
for x
in sorted(set(iovs1) | set(iovs2))]
744 iovs.append((
"Infinity", 1, 2))
747 previous_hash1 =
None 748 previous_hash2 =
None 749 first_since_in_equality_range =
None 750 previous_equal =
False 752 for since, hash1, hash2
in iovs:
754 if first_since_in_equality_range ==
None:
759 first_since_in_equality_range = since
760 previous_hash1 = hash1
761 previous_hash2 = hash2
762 previous_equal = hash1 == hash2
768 hash1 = previous_hash1
770 hash2 = previous_hash2
775 table.append({
"since" :
"[%s, %s)" % (first_since_in_equality_range, since), self.name :
"=", tag.name :
"="})
777 first_since_in_equality_range = since
783 if not(hash1 == previous_hash1
and hash2 == previous_hash2):
784 table.append({
"since" :
"[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
785 first_since_in_equality_range = since
787 previous_hash1 = hash1
788 previous_hash2 = hash2
789 previous_equal = hash1 == hash2
794 def merge_into(self, tag, range_object):
796 Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag 797 into the IOVs of the other Tag. 799 tag : CondDBFW Tag object that the IOVs from this Tag should be merged into. 801 range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied 802 from the database this Tag belongs to. 804 Script originally written by Joshua Dawes, 805 and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni. 809 merged_tag_name = oracle_tag.name +
"_merged" 812 since_range = range_object
818 if sqlite_tag ==
None:
819 raise TypeError(
"Tag to be merged cannot be None.")
821 sqlite_iovs = sqlite_tag.iovs().
data()
822 sqlite_tag.iovs().as_table()
824 new_tag = self.connection.models[
"tag"](sqlite_tag.as_dicts(convert_timestamps=
False), convert_timestamps=
False)
825 new_tag.name = merged_tag_name
827 imported_iovs = oracle_tag.iovs(since=since_range).
data()
829 for i
in range(0, len(imported_iovs)):
830 imported_iovs[i].source =
"oracle" 832 sqlite_iovs_sinces=[]
833 for i
in range(0, len(sqlite_iovs)):
834 sqlite_iovs[i].source =
"sqlite" 835 sqlite_iovs_sinces.append(sqlite_iovs[i].since)
838 print(sqlite_iovs_sinces)
840 new_iovs_list = imported_iovs + sqlite_iovs
841 new_iovs_list = sorted(new_iovs_list, key=
lambda iov : iov.since)
843 for (n, iov)
in enumerate(new_iovs_list):
845 if iov.source ==
"oracle":
846 if new_iovs_list[n].since
in sqlite_iovs_sinces:
849 iov.source =
"tobedeleted" 853 for i
in reversed(range(0,n)):
854 if new_iovs_list[i].source ==
"sqlite":
855 print(
"change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
856 iov.payload_hash = new_iovs_list[i].payload_hash
860 new_iov_list_copied = []
862 for iov
in new_iovs_list:
864 if iov.source !=
"tobedeleted":
865 new_iov_list_copied.append(iov)
867 new_iov_list_copied = sorted(new_iov_list_copied, key=
lambda iov : iov.since)
869 now = datetime.datetime.now()
872 for iov
in new_iov_list_copied:
873 new_iovs.append( self.connection.models[
"iov"](iov.as_dicts(convert_timestamps=
False), convert_timestamps=
False) )
875 iov.insertion_time = now
876 iov.tag_name = merged_tag_name
878 new_tag.iovs_list = new_iovs
883 classes = {
"globaltag" : GlobalTag,
"iov" : IOV,
"globaltagmap" : GlobalTagMap,\
884 "payload" : Payload,
"tag" : Tag,
"Base" : Base}
886 if class_name ==
None:
889 return classes[class_name]
def session_independent_object(object, schema=None)
S & print(S &os, JobReport::InputFile const &f)
def generate(map_blobs=False, class_name=None)
def class_name_to_column(cls)
def __init__(self, regexp)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def __init__(self, start, end)
static std::string join(char **cmd)
def session_independent(objects)
char data[epos_bytes_allocation]
def apply_filters(orm_query, orm_class, filters)
def status_full_name(status)
def __init__(self, centre, radius)
def date_args_to_days(radius)
def apply_filter(orm_query, orm_class, attribute, value)