3 Using Audrius' models from flask browser. 5 This file contains models that are used with SQLAlchemy. 7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes, 8 so these will not work in a normal context outside the framework. 16 from sqlalchemy.orm
import relationship, backref
17 from sqlalchemy.ext.declarative
import declarative_base
19 from sqlalchemy
import Column, String, Integer, DateTime, Binary, ForeignKey, BigInteger, and_
21 print(
"You must be working inside a CMSSW environment. Try running 'cmsenv'.")
24 import data_sources, data_formats
25 import urllib, urllib2, base64
26 from copy
import deepcopy
29 from utils
import to_timestamp, to_datetime, friendly_since
36 if object.__class__.__name__.lower() ==
"payload":
37 map_blobs = object.blobs_mapped
42 cls = object.__class__
44 new_class =
generate(map_blobs=map_blobs, class_name=class_name)
45 new_class.__table__.schema = schema
46 new_object = new_class(object.as_dicts(), convert_timestamps=
False)
51 if type(objects) == list:
52 return map(session_independent_object, objects)
58 class_name = cls.__name__
60 for character
in class_name:
61 all_upper_case = character.isupper()
64 for n
in range(0, len(class_name)):
65 if class_name[n].isupper()
and n != 0:
66 class_name =
str(class_name[0:n]) +
"".
join([
"_", class_name[n].lower()]) +
str(class_name[n+1:])
67 elif class_name[n].isupper()
and n == 0:
68 class_name =
str(class_name[0:n]) +
"".
join([class_name[n].lower()]) +
str(class_name[n+1:])
77 return full_status[status]
80 days = radius.get(
"days")
81 days += radius.get(
"weeks")*7
if radius.get(
"weeks") !=
None else 0
82 days += radius.get(
"months")*28
if radius.get(
"months") !=
None else 0
83 days += radius.get(
"years")+365
if radius.get(
"years") !=
None else 0
88 Base class for Radius and Range - used for checking by apply_filter function 102 Used to tell proxy methods that a range of values defined by a centre and a radius should be queried for - special case of filter clauses. 106 centre and radius should be objects that can be added and subtracted. 107 eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta 109 Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type. 118 Used to tell proxy methods that a range of values defined by a start and end point should be queried for - special case of filter clauses. 122 centre and radius should be objects that can be added and subtracted. 123 eg, centre could be a datetime.datetime object, and radius could be datetime.timedelta 125 Radius and Range objects are assigned to properties of querying.connection objects, hence are given the database type. 132 Used to tell proxy methods that a regular expression should be used to query the column. 143 return sqlalchemy.func.regexp_like(field, regexp)
146 self.connection_object.engine.pool.connect().create_function(
'regexp', 2,
lambda data, regexp: re.search(regexp, data)
is not None)
148 return sqlalchemy.func.regexp(field, regexp)
150 raise NotImplemented(
"Can only apply regular expression search to Oracle, Frontier and SQLite.")
153 filter_attribute = getattr(orm_class, attribute)
154 if type(value) == list:
155 orm_query = orm_query.filter(filter_attribute.in_(value))
157 orm_query = orm_query.filter(filter_attribute.in_(value.data()))
158 elif type(value)
in [Range, Radius]:
160 minus = value.get_start()
161 plus = value.get_end()
162 orm_query = orm_query.filter(and_(filter_attribute >= minus, filter_attribute <= plus))
164 elif type(value) == RegExp:
168 if value.database_type
in [
"oracle",
"frontier"]:
169 regexp = sqlalchemy.func.regexp_like(filter_attribute, value.get_regexp())
170 elif value.database_type ==
"sqlite":
171 value.connection_object.engine.pool.connect().create_function(
'regexp', 2,
lambda data, regexp: re.search(regexp, data)
is not None)
172 regexp = sqlalchemy.func.regexp(filter_attribute, value.get_regexp())
174 raise NotImplemented(
"Can only apply regular expression search to Oracle, Frontier and SQLite.")
175 orm_query = orm_query.filter(regexp)
178 orm_query = orm_query.filter(filter_attribute == value)
182 for (key, value)
in filters.items():
183 if not(key
in [
"amount"]):
184 orm_query =
apply_filter(orm_query, orm_class, key, value)
189 Base = declarative_base()
192 __tablename__ =
'GLOBAL_TAG' 194 headers = [
"name",
"validity",
"description",
"release",
"insertion_time",
"snapshot_time",
"scenario",
"workflow",
"type"]
196 name = Column(String(100), unique=
True, nullable=
False, primary_key=
True)
197 validity = Column(Integer, nullable=
False)
198 description = Column(String(4000), nullable=
False)
199 release = Column(String(100), nullable=
False)
200 insertion_time = Column(DateTime, nullable=
False)
201 snapshot_time = Column(DateTime, nullable=
False)
202 scenario = Column(String(100))
203 workflow = Column(String(100))
204 type = Column(String(1))
205 tag_map = relationship(
'GlobalTagMap', backref=
'global_tag')
207 def __init__(self, dictionary={}, convert_timestamps=True):
209 for key
in dictionary:
211 if convert_timestamps:
214 self.__dict__[key] = dictionary[key]
215 except KeyError
as k:
219 return '<GlobalTag %r>' % self.name
221 def as_dicts(self, convert_timestamps=False):
223 Returns dictionary form of Global Tag object. 227 'validity': self.validity,
228 'description': self.description,
229 'release': self.release,
230 'insertion_time':
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time,
231 'snapshot_time':
to_timestamp(self.snapshot_time)
if convert_timestamps
else self.snapshot_time,
232 'scenario': self.scenario,
233 'workflow': self.workflow,
239 return [self.name, self.release,
to_timestamp(self.insertion_time),
to_timestamp(self.snapshot_time), self.description]
241 def all(self, **kwargs):
243 Returns `amount` Global Tags ordered by Global Tag name. 245 query = self.session.query(GlobalTag)
247 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 248 query_result = query.order_by(GlobalTag.name).
limit(amount).
all()
252 def tags(self, **kwargs):
254 Returns `amount` *Global Tag Maps* belonging to this Global Tag. 256 kwargs[
"global_tag_name"] = self.name
257 all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)
259 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 260 all_tags = all_tags.order_by(GlobalTagMap.tag_name).
limit(amount).
all()
261 column_names = [
"global_tag_name",
"record",
"label",
"tag_name"]
262 all_tags =
map(
lambda row :
dict(
zip(column_names,
map(to_timestamp, row))), all_tags)
266 def iovs(self, **kwargs):
268 Returns `amount` IOVs belonging to all Tags held in this Global Tag. 269 For large Global Tags (which is most of them), VERY slow. 270 Highly recommended to instead used `tags().get_members("tag_name").data()` to get a `list` of tag names, 271 and then get IOVs from each Tag name. 273 At some point, this method may replace the method currently used. 279 tag_names = self.tags().get_members(
"tag_name").
data()
280 iovs_all_tags = self.session.query(IOV).
filter(IOV.tag_name.in_(tag_names))
282 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 283 iovs_all_tags = iovs_all_tags.limit(amount).subquery()
286 iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, iovs_all_tags.c.since,\
287 iovs_all_tags.c.payload_hash, iovs_all_tags.c.insertion_time)\
288 .
filter(GlobalTagMap.global_tag_name == self.name)\
289 .
join(iovs_all_tags, GlobalTagMap.tag_name == iovs_all_tags.c.tag_name)
291 iovs_gt_tags = iovs_gt_tags.order_by(iovs_all_tags.c.since).
all()
293 column_names = [
"tag_name",
"since",
"payload_hash",
"insertion_time"]
294 all_iovs =
map(
lambda row :
dict(
zip(column_names, row)), iovs_gt_tags)
299 def __sub__(self, other):
301 Allows Global Tag objects to be used with the "-" arithmetic operator to find their difference. 302 Note: gt1 - gt2 = gt1.diff(gt2) ( = gt2 - gt1 = gt2.diff(gt1)) 304 return self.diff(other)
308 Returns the json_list of differences in the form of tuples: 310 (record, label, tag name of gt1 (self), tag name of gt2 (gt)) 313 record_label_to_tag_name1 =
dict([((gt_map.record, gt_map.label), gt_map.tag_name)
for gt_map
in self.tags().
data()])
314 record_label_to_tag_name2 =
dict([((gt_map.record, gt_map.label), gt_map.tag_name)
for gt_map
in gt.tags().
data()])
316 record_label_pairs = sorted(set(record_label_to_tag_name1) | set(record_label_to_tag_name2))
319 tags_pairs_with_differences = []
321 for record_label
in record_label_pairs:
322 tag_name1 = record_label_to_tag_name1.get(record_label)
323 tag_name2 = record_label_to_tag_name2.get(record_label)
325 if tag_name1 ==
None or tag_name2 ==
None or tag_name1 != tag_name2:
327 "Record" : record_label[0],
328 "Label" : record_label[1],
329 (
"%s Tag" % self.name) : tag_name1,
330 (
"%s Tag" % gt.name) : tag_name2
335 class GlobalTagMap(
Base):
336 __tablename__ =
'GLOBAL_TAG_MAP' 338 headers = [
"global_tag_name",
"record",
"label",
"tag_name"]
340 global_tag_name = Column(String(100), ForeignKey(
'GLOBAL_TAG.name'), primary_key=
True, nullable=
False)
341 record = Column(String(100), ForeignKey(
'RECORDS.record'), primary_key=
True, nullable=
False)
342 label = Column(String(100), primary_key=
True, nullable=
False)
343 tag_name = Column(String(100), ForeignKey(
'TAG.name'), nullable=
False)
345 def __init__(self, dictionary={}, convert_timestamps=True):
347 for key
in dictionary:
349 if convert_timestamps:
352 self.__dict__[key] = dictionary[key]
353 except KeyError
as k:
357 return '<GlobalTagMap %r>' % self.global_tag_name
359 def as_dicts(self, convert_timestamps=False):
361 Returns dictionary form of this Global Tag Map. 364 "global_tag_name" :
str(self.global_tag_name),
365 "record" :
str(self.record),
366 "label" :
str(self.label),
367 "tag_name" :
str(self.tag_name)
372 class GlobalTagMapRequest(
Base):
373 __tablename__ =
'GLOBAL_TAG_MAP_REQUEST' 375 queue = Column(String(100), primary_key=
True, nullable=
False)
376 tag = Column(String(100), ForeignKey(
'TAG.name'), primary_key=
True, nullable=
False)
377 record = Column(String(100), ForeignKey(
'RECORDS.record'), primary_key=
True, nullable=
False)
378 label = Column(String(100), primary_key=
True, nullable=
False)
379 status = Column(String(1), nullable=
False)
380 description = Column(String(4000), nullable=
False)
381 submitter_id = Column(Integer, nullable=
False)
382 time_submitted = Column(DateTime, nullable=
False)
383 last_edited = Column(DateTime, nullable=
False)
385 def __init__(self, dictionary={}, convert_timestamps=True):
387 for key
in dictionary:
389 if convert_timestamps:
392 self.__dict__[key] = dictionary[key]
393 except KeyError
as k:
396 headers = [
"queue",
"tag",
"record",
"label",
"status",
"description",
"submitter_id",
"time_submitted",
"last_edited"]
400 Returns dictionary form of this Global Tag Map Request. 403 "queue" : self.queue,
405 "record" : self.record,
406 "label" : self.label,
407 "status" : self.status,
408 "description" : self.description,
409 "submitter_id" : self.submitter_id,
410 "time_submitted" : self.time_submitted,
411 "last_edited" : self.last_edited
415 return '<GlobalTagMapRequest %r>' % self.queue
421 __tablename__ =
'IOV' 423 headers = [
"tag_name",
"since",
"payload_hash",
"insertion_time"]
425 tag_name = Column(String(4000), ForeignKey(
'TAG.name'), primary_key=
True, nullable=
False)
426 since = Column(Integer, primary_key=
True, nullable=
False)
427 payload_hash = Column(String(40), ForeignKey(
'PAYLOAD.hash'), nullable=
False)
428 insertion_time = Column(DateTime, primary_key=
True, nullable=
False)
430 def __init__(self, dictionary={}, convert_timestamps=True):
432 for key
in dictionary:
434 if convert_timestamps:
437 self.__dict__[key] = dictionary[key]
438 except KeyError
as k:
441 def as_dicts(self, convert_timestamps=False):
443 Returns dictionary form of this IOV. 446 "tag_name" : self.tag_name,
447 "since" : self.since,
448 "payload_hash" : self.payload_hash,
449 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time
453 return '<IOV %r>' % self.tag_name
456 return [self.since,
to_timestamp(self.insertion_time), self.payload_hash]
458 def all(self, **kwargs):
460 Returns `amount` IOVs ordered by since. 462 query = self.session.query(IOV)
464 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 465 query_result = query.order_by(IOV.tag_name).order_by(IOV.since).
limit(amount).
all()
470 __tablename__ =
'PAYLOAD' 472 headers = [
"hash",
"object_type",
"version",
"insertion_time"]
474 hash = Column(String(40), primary_key=
True, nullable=
False)
475 object_type = Column(String(4000), nullable=
False)
476 version = Column(String(4000), nullable=
False)
477 insertion_time = Column(DateTime, nullable=
False)
479 data = Column(Binary, nullable=
False)
480 streamer_info = Column(Binary, nullable=
False)
481 blobs_mapped = map_blobs
483 def __init__(self, dictionary={}, convert_timestamps=True):
485 for key
in dictionary:
487 if convert_timestamps:
490 self.__dict__[key] = dictionary[key]
491 except KeyError
as k:
495 def as_dicts(self, convert_timestamps=False):
497 Returns dictionary form of this Payload's metadata (not the actual Payload). 501 "object_type" : self.object_type,
502 "version" : self.version,
503 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time,
505 "streamer_info" : self.streamer_info
508 def as_dicts(self, convert_timestamps=False):
510 Returns dictionary form of this Payload's metadata (not the actual Payload). 514 "object_type" : self.object_type,
515 "version" : self.version,
516 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time
520 return '<Payload %r>' % self.hash
523 return [self.hash, self.object_type, self.version,
to_timestamp(self.insertion_time)]
525 def parent_tags(self, **kwargs):
527 Returns `amount` parent Tags ordered by Tag name. 533 kwargs[
"payload_hash"] = self.hash
534 query = self.session.query(IOV.tag_name)
536 query_result = query.all()
537 tag_names =
map(
lambda entry : entry[0], query_result)
538 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 539 tags = self.session.query(Tag).
filter(Tag.name.in_(tag_names)).order_by(Tag.name).
limit(amount).
all()
542 def all(self, **kwargs):
544 Returns `amount` Payloads ordered by Payload hash. 546 query = self.session.query(Payload)
548 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 549 query_result = query.order_by(Payload.hash).
limit(amount).
all()
554 __tablename__ =
'RECORDS' 556 headers = [
"record",
"object",
"type"]
558 record = Column(String(100), primary_key=
True, nullable=
False)
559 object = Column(String(200), nullable=
False)
560 type = Column(String(20), nullable=
False)
564 Returns dictionary form of this Record. 567 "record" : self.record,
568 "object" : self.object,
573 return '<Record %r>' % self.record
576 return [self.record, self.object]
578 def all(self, **kwargs):
580 Returns `amount` Records ordered by Record record. 582 query = self.session.query(Record)
584 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 585 query_result = query.order_by(Record.record).
limit(amount).
all()
590 __tablename__ =
'TAG' 592 headers = [
"name",
"time_type",
"object_type",
"synchronization",
"end_of_validity",\
593 "description",
"last_validated_time",
"insertion_time",
"modification_time"]
595 name = Column(String(4000), primary_key=
True, nullable=
False)
596 time_type = Column(String(4000), nullable=
False)
597 object_type = Column(String(4000), nullable=
False)
598 synchronization = Column(String(4000), nullable=
False)
599 end_of_validity = Column(Integer, nullable=
False)
600 description = Column(String(4000), nullable=
False)
601 last_validated_time = Column(BigInteger, nullable=
False)
602 insertion_time = Column(DateTime, nullable=
False)
603 modification_time = Column(DateTime, nullable=
False)
608 iovs_list = relationship(
'IOV', backref=
'tag')
610 def __init__(self, dictionary={}, convert_timestamps=True):
612 for key
in dictionary:
614 if convert_timestamps:
617 self.__dict__[key] = dictionary[key]
618 except KeyError
as k:
621 def as_dicts(self, convert_timestamps=False):
623 Returns dictionary form of this Tag. 627 "time_type" : self.time_type,
628 "object_type" : self.object_type,
629 "synchronization" : self.synchronization,
630 "end_of_validity" : self.end_of_validity,
631 "description" : self.description,
632 "last_validated_time" : self.last_validated_time,
633 "insertion_time" :
to_timestamp(self.insertion_time)
if convert_timestamps
else self.insertion_time,
634 "modification_time" :
to_timestamp(self.modification_time)
if convert_timestamps
else self.modification_time,
635 "record" : self.record,
640 return '<Tag %r>' % self.name
643 return [self.name, self.time_type, self.object_type, self.synchronization,
to_timestamp(self.insertion_time), self.description]
645 def parent_global_tags(self, **kwargs):
647 Returns `amount` Global Tags that contain this Tag. 652 kwargs[
"tag_name"] = self.name
653 query = self.session.query(GlobalTagMap.global_tag_name)
655 query_result = query.all()
656 if len(query_result) != 0:
657 global_tag_names =
map(
lambda entry : entry[0], query_result)
658 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 659 global_tags = self.session.query(GlobalTag).
filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).
limit(amount).
all()
664 def all(self, **kwargs):
666 Returns `amount` Tags ordered by Tag name. 668 query = self.session.query(Tag)
670 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 671 query_result = query.order_by(Tag.name).
limit(amount).
all()
674 def iovs(self, **kwargs):
676 Returns `amount` IOVs that belong to this Tag ordered by IOV since. 679 iovs_query = self.session.query(IOV).
filter(IOV.tag_name == self.name)
681 amount = kwargs[
"amount"]
if "amount" in kwargs.keys()
else None 682 iovs = iovs_query.order_by(IOV.since).
limit(amount).
all()
685 def latest_iov(self):
687 Returns the single highest since held by this Tag. 688 Insertion times do not matter - if there are two IOVs at since > all others, both have the highest since. 690 iov = self.session.query(IOV).
filter(IOV.tag_name == self.name).order_by(IOV.since.desc()).
first()
693 def __sub__(self, other):
695 Allows the arithmetic operator "-" to be applied to find the difference between two tags. 696 Note: diff() is symmetric, hence tag1 - tag2 = tag2 - tag1. 698 return self.diff(other)
700 def diff(self, tag, short=False):
702 Returns the `diff` of the first Tag, and the Tag given. 703 Summary of algorithm: 705 Compute the ordered set of iov sinces from both tags, and construct a list of triples, (since, tag1 hash, tag2 hash). 706 Set previous_payload1 and previous_payload2 to be the first hash values from each tag for the first since in the merged list. 707 Note: depending on where each Tag's IOVs start, 1 or both of these values can be None. 708 Set the first_since_in_equality_range = -1, which holds the since at which the last hashes were equal in the Tags. 709 For each triple (since, hash1, hash2), 711 If the first_since_in_equality_range = None, 712 We are at the first since in the merged list, so set first_since... = since 713 Note: this is so set the previous... values for the second row, since the first row will never result in a print because 714 a row is only printed when past iovs have been processed. 716 If either hash1 or hash2 is None, set it to the previous hash found 717 Note: if a Tag defines a hash for one since and then not another for n rows, the last defined hash will be carried through because of this. 719 If the previous found hashes were equal, that means we have equality on the range [first_since_in_equality_range, since) 720 Note: we CANNOT conclude anything about the hashes corresponding to sinces >= since 721 because we have no looked forward, but we do know about the previous hashes. 724 The region of equality has ended, and so we have that [first_since_in_equality_range, since) is equal for both Tags 725 Hence, print that for this range we have equal hashes denoted by "=" in each hash column. 729 The previous hashes were not equal, BUT we must check that ths hashes on this row are not identical... 730 If the hashes on this row are the same as the hashes above (hash1 == previous_payload1 and hash2 == previous_payload2), 731 then we have not found the end of a region of equality! 732 If the hashes have changed, print a row. 735 if tag.__class__.__name__ !=
"Tag":
736 raise TypeError(
"Tag given must be a CondDBFW Tag object.")
739 iovs1 =
dict(
map(
lambda iov : (iov.since, iov.payload_hash), self.iovs().
data()))
740 iovs2 =
dict(
map(
lambda iov : (iov.since, iov.payload_hash), tag.iovs().
data()))
742 iovs = [(x, iovs1.get(x), iovs2.get(x))
for x
in sorted(set(iovs1) | set(iovs2))]
743 iovs.append((
"Infinity", 1, 2))
746 previous_hash1 =
None 747 previous_hash2 =
None 748 first_since_in_equality_range =
None 749 previous_equal =
False 751 for since, hash1, hash2
in iovs:
753 if first_since_in_equality_range ==
None:
758 first_since_in_equality_range = since
759 previous_hash1 = hash1
760 previous_hash2 = hash2
761 previous_equal = hash1 == hash2
767 hash1 = previous_hash1
769 hash2 = previous_hash2
774 table.append({
"since" :
"[%s, %s)" % (first_since_in_equality_range, since), self.name :
"=", tag.name :
"="})
776 first_since_in_equality_range = since
782 if not(hash1 == previous_hash1
and hash2 == previous_hash2):
783 table.append({
"since" :
"[%s, %s)" % (first_since_in_equality_range, since), self.name : previous_hash1, tag.name : previous_hash2})
784 first_since_in_equality_range = since
786 previous_hash1 = hash1
787 previous_hash2 = hash2
788 previous_equal = hash1 == hash2
793 def merge_into(self, tag, range_object):
795 Given another connection, apply the 'merge' algorithm to merge the IOVs from this Tag 796 into the IOVs of the other Tag. 798 tag : CondDBFW Tag object that the IOVs from this Tag should be merged into. 800 range_object : CondDBFW.data_sources.Range object to describe the subset of IOVs that should be copied 801 from the database this Tag belongs to. 803 Script originally written by Joshua Dawes, 804 and adapted by Giacomo Govi, Gianluca Cerminara and Giovanni Franzoni. 808 merged_tag_name = oracle_tag.name +
"_merged" 811 since_range = range_object
817 if sqlite_tag ==
None:
818 raise TypeError(
"Tag to be merged cannot be None.")
820 sqlite_iovs = sqlite_tag.iovs().
data()
821 sqlite_tag.iovs().as_table()
823 new_tag = self.connection.models[
"tag"](sqlite_tag.as_dicts(convert_timestamps=
False), convert_timestamps=
False)
824 new_tag.name = merged_tag_name
826 imported_iovs = oracle_tag.iovs(since=since_range).
data()
828 for i
in range(0, len(imported_iovs)):
829 imported_iovs[i].source =
"oracle" 831 sqlite_iovs_sinces=[]
832 for i
in range(0, len(sqlite_iovs)):
833 sqlite_iovs[i].source =
"sqlite" 834 sqlite_iovs_sinces.append(sqlite_iovs[i].since)
837 print sqlite_iovs_sinces
839 new_iovs_list = imported_iovs + sqlite_iovs
840 new_iovs_list = sorted(new_iovs_list, key=
lambda iov : iov.since)
842 for (n, iov)
in enumerate(new_iovs_list):
844 if iov.source ==
"oracle":
845 if new_iovs_list[n].since
in sqlite_iovs_sinces:
848 iov.source =
"tobedeleted" 852 for i
in reversed(range(0,n)):
853 if new_iovs_list[i].source ==
"sqlite":
854 print(
"change %s to %s at since %d" % (iov.payload_hash, new_iovs_list[i].payload_hash, iov.since))
855 iov.payload_hash = new_iovs_list[i].payload_hash
859 new_iov_list_copied = []
861 for iov
in new_iovs_list:
863 if iov.source !=
"tobedeleted":
864 new_iov_list_copied.append(iov)
866 new_iov_list_copied = sorted(new_iov_list_copied, key=
lambda iov : iov.since)
868 now = datetime.datetime.now()
871 for iov
in new_iov_list_copied:
872 new_iovs.append( self.connection.models[
"iov"](iov.as_dicts(convert_timestamps=
False), convert_timestamps=
False) )
874 iov.insertion_time = now
875 iov.tag_name = merged_tag_name
877 new_tag.iovs_list = new_iovs
882 classes = {
"globaltag" : GlobalTag,
"iov" : IOV,
"globaltagmap" : GlobalTagMap,\
883 "payload" : Payload,
"tag" : Tag,
"Base" : Base}
885 if class_name ==
None:
888 return classes[class_name]
def session_independent_object(object, schema=None)
S & print(S &os, JobReport::InputFile const &f)
def generate(map_blobs=False, class_name=None)
def class_name_to_column(cls)
def __init__(self, regexp)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def __init__(self, start, end)
static std::string join(char **cmd)
def session_independent(objects)
char data[epos_bytes_allocation]
def apply_filters(orm_query, orm_class, filters)
def status_full_name(status)
def __init__(self, centre, radius)
def date_args_to_days(radius)
def apply_filter(orm_query, orm_class, attribute, value)