3 Using Audrius' models from flask browser.
5 This file contains models that are used with SQLAlchemy.
7 Note: some things done in methods written in classes rely on the querying module adding extra information to classes,
8 so these will not work in a normal context outside the framework.
13 from sqlalchemy.orm
import relationship, backref
14 from sqlalchemy.ext.declarative
import declarative_base
15 from sqlalchemy
import Column, String, Integer, DateTime, ForeignKey, and_
16 import data_sources, data_formats
17 import urllib, urllib2, base64
18 from copy
import deepcopy
21 return obj.strftime(
'%Y-%m-%d %H:%M:%S,%f')
if isinstance(obj, datetime.datetime)
else obj
29 return full_status[status]
32 days = radius.get(
"days")
33 days += radius.get(
"weeks")*7
if radius.get(
"weeks") !=
None else 0
34 days += radius.get(
"months")*28
if radius.get(
"months") !=
None else 0
35 days += radius.get(
"years")+365
if radius.get(
"years") !=
None else 0
40 Base = declarative_base()
43 __tablename__ =
'GLOBAL_TAG'
45 headers = [
"name",
"validity",
"description",
"release",
"insertion_time",
"snapshot_time",
"scenario",
"workflow",
"type"]
47 name = Column(String(100), unique=
True, nullable=
False, primary_key=
True)
48 validity = Column(Integer, nullable=
False)
49 description = Column(String(4000), nullable=
False)
50 release = Column(String(100), nullable=
False)
51 insertion_time = Column(DateTime, nullable=
False)
52 snapshot_time = Column(DateTime, nullable=
False)
53 scenario = Column(String(100))
54 workflow = Column(String(100))
55 type = Column(String(1))
56 tag_map = relationship(
'GlobalTagMap', backref=
'global_tag')
58 def __init__(self, dictionary={}, convert_timestamps=True):
60 for key
in dictionary:
62 if convert_timestamps:
65 self.__dict__[key] = dictionary[key]
70 return '<GlobalTag %r>' % self.name
75 'validity': self.validity,
76 'description': self.description,
77 'release': self.release,
78 'insertion_time': self.insertion_time,
79 'snapshot_time': self.snapshot_time,
80 'scenario': self.scenario,
81 'workflow': self.workflow,
87 return [self.name, self.release,
to_timestamp(self.insertion_time),
to_timestamp(self.snapshot_time), self.description]
92 'headers': [
'Global Tag',
'Release',
'Insertion Time',
'Snapshot Time',
'Description'],
93 'data': [ g.to_array()
for g
in global_tags ],
98 def all(self, amount=10):
102 def tags(self, amount=10):
103 """gt_maps = self.session.query(GlobalTagMap).filter(GlobalTagMap.global_tag_name == self.name).limit(amount).subquery()
104 all_tags = self.session.query(gt_maps.c.record, gt_maps.c.label,\
105 Tag.name, Tag.time_type, Tag.object_type,\
106 Tag.synchronization, Tag.end_of_validity, Tag.description,\
107 Tag.last_validated_time, Tag.insertion_time,\
108 Tag.modification_time)\
109 .join(gt_maps, Tag.name == gt_maps.c.tag_name).order_by(Tag.name.asc()).limit(amount).all()"""
110 all_tags = self.session.query(GlobalTagMap.global_tag_name, GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name)\
111 .
filter(GlobalTagMap.global_tag_name == self.name)\
112 .order_by(GlobalTagMap.tag_name).
limit(amount).
all()
113 column_names = [
"global_tag_name",
"record",
"label",
"tag_name"]
114 all_tags = map(
lambda row :
dict(
zip(column_names, map(to_timestamp, row))), all_tags)
119 def tags_full(self, amount=10):
120 tags = self.session.query(Tag).order_by(Tag.name).subquery()
121 all_tags = self.session.query(GlobalTagMap.global_tag_name,\
122 GlobalTagMap.record,\
124 tags.c.name, tags.c.time_type, tags.c.object_type,\
125 tags.c.synchronization, tags.c.end_of_validity, tags.c.description,\
126 tags.c.last_validated_time, tags.c.insertion_time,\
127 tags.c.modification_time)\
128 .
join(tags, GlobalTagMap.tag_name == tags.c.name).
filter(GlobalTagMap.global_tag_name == self.name)
130 all_tags = all_tags.limit(amount)
131 all_tags = all_tags.all()
132 column_names = [
"global_tag_name",
"record",
"label",
"name",
"time_type",
"object_type",
"synchronization",\
133 "end_of_validity",
"description",
"last_validated_time",
"insertion_time",
"modification_time"]
134 all_tags = map(
lambda row :
dict(
zip(column_names, map(to_timestamp, row))), all_tags)
140 def insertion_time_interval(self, insertion_time, **radius):
144 minus = insertion_time - datetime.timedelta(days=days)
145 plus = insertion_time + datetime.timedelta(days=days)
146 gts = self.session.query(GlobalTag).
filter(and_(GlobalTag.insertion_time >= minus, GlobalTag.insertion_time <= plus)).order_by(GlobalTag.name).
all()
149 def snapshot_time_interval(self, snapshot_time, **radius):
151 minus = snapshot_time - datetime.timedelta(days=days)
152 plus = snapshot_time + datetime.timedelta(days=days)
153 gts = self.session.query(GlobalTag).
filter(and_(GlobalTag.snapshot_time >= minus, GlobalTag.snapshot_time <= plus)).order_by(GlobalTag.name).
all()
157 def iovs(self, amount=10, valid=False):
159 valid_iovs_all_tags = self.session.query(IOV)
161 valid_iovs_all_tags = valid_iovs_all_tags.filter(IOV.insertion_time < self.snapshot_time)
162 valid_iovs_all_tags = valid_iovs_all_tags.subquery()
163 valid_iovs_gt_tags = self.session.query(GlobalTagMap.tag_name, valid_iovs_all_tags.c.since,\
164 valid_iovs_all_tags.c.payload_hash, valid_iovs_all_tags.c.insertion_time)\
165 .
join(valid_iovs_all_tags, GlobalTagMap.tag_name == valid_iovs_all_tags.c.tag_name)\
166 .
filter(GlobalTagMap.global_tag_name == self.name)\
167 .order_by(valid_iovs_all_tags.c.insertion_time).
limit(amount).
all()
168 column_names = [
"tag_name",
"since",
"payload_hash",
"insertion_time"]
169 all_iovs = map(
lambda row :
dict(
zip(column_names, map(to_timestamp, row))), valid_iovs_gt_tags)
173 def pending_tag_requests(self):
177 gt_map_requests = self.session.query(GlobalTagMapRequest.queue, GlobalTagMapRequest.record, GlobalTagMapRequest.label,\
178 GlobalTagMapRequest.tag, GlobalTagMapRequest.status)\
179 .
filter(and_(GlobalTagMapRequest.queue == self.name, GlobalTagMapRequest.status.in_([
"P",
"R"]))).all()
181 column_names = [
"queue",
"record",
"label",
"tag",
"status"]
182 gt_map_requests = map(
lambda row :
dict(
zip(column_names, map(to_timestamp, row))), gt_map_requests)
187 def candidate(self, gt_map_requests):
190 new_candidate =
Candidate(self, gt_map_requests)
195 global_tag_object =
None
197 authentication =
None
199 def __init__(self, queue, gt_map_requests):
201 self.session = queue.session
202 self.authentication = queue.authentication
205 if queue.type !=
"Q":
208 self.global_tag_object = queue
211 found_record_label_pairs = []
213 for gt_map
in gt_map_requests:
214 if (gt_map.record, gt_map.label)
in found_record_label_pairs:
216 if gt_map_requests.__class__.__name__ ==
"json_list":
217 gt_map_requests.reset()
220 found_record_label_pairs.append((gt_map.record, gt_map.label))
222 if gt_map_requests.__class__.__name__ ==
"json_list":
223 gt_map_requests.reset()
226 self.tags_to_use = gt_map_requests
231 CANDIDATE_TIME_FORMAT =
"%Y_%m_%d_%H_%M_%S"
232 TIME_FORMAT =
"%Y-%m-%d %H:%M:%S"
234 candidate_name = self.global_tag_object.name.replace(
"Queue",
"Candidate")
235 candidate_name +=
"_%s" % datetime.datetime.now().strftime(CANDIDATE_TIME_FORMAT)
236 time_now = datetime.datetime.now().strftime(TIME_FORMAT)
237 candidate_release = self.global_tag_object.release
238 candidate_description =
"Candidate created from the queue: '%s' at: '%s'" % (self.global_tag_object.name, time_now)
240 extra_records = data_formats._objects_as_dicts(self.tags_to_use)
241 for record
in extra_records:
242 for key
in [
"submitter_id",
"description",
"time_submitted",
"last_edited"]:
246 "c_name" : candidate_name,
247 "snapshot_time" : time_now,
248 "from_gt" : self.global_tag_object.name,
249 "release" : candidate_release,
250 "desc" : candidate_description,
251 "validity" : 18446744073709551615,
252 "extra_records" : json.dumps(extra_records.data())
257 credentials = self.authentication.authenticators(
"dbAccess")
261 auth = base64.encodestring(
"%s:%s" % (credentials[0], credentials[1])).
replace(
'\n',
'')
264 params = urllib.urlencode(params)
269 class GlobalTagMap(
Base):
270 __tablename__ =
'GLOBAL_TAG_MAP'
272 headers = [
"global_tag_name",
"record",
"label",
"tag_name"]
274 global_tag_name = Column(String(100), ForeignKey(
'GLOBAL_TAG.name'), primary_key=
True, nullable=
False)
275 record = Column(String(100), ForeignKey(
'RECORDS.record'), primary_key=
True, nullable=
False)
276 label = Column(String(100), primary_key=
True, nullable=
False)
277 tag_name = Column(String(100), ForeignKey(
'TAG.name'), nullable=
False)
279 def __init__(self, dictionary={}, convert_timestamps=True):
281 for key
in dictionary:
283 if convert_timestamps:
286 self.__dict__[key] = dictionary[key]
287 except KeyError
as k:
291 return '<GlobalTagMap %r>' % self.global_tag_name
295 "global_tag_name" : str(self.global_tag_name),
296 "record" : str(self.record),
297 "label" : str(self.label),
298 "tag_name" : str(self.tag_name)
303 class GlobalTagMapRequest(
Base):
304 __tablename__ =
'GLOBAL_TAG_MAP_REQUEST'
306 queue = Column(String(100), primary_key=
True, nullable=
False)
307 tag = Column(String(100), ForeignKey(
'TAG.name'), primary_key=
True, nullable=
False)
308 record = Column(String(100), ForeignKey(
'RECORDS.record'), primary_key=
True, nullable=
False)
309 label = Column(String(100), primary_key=
True, nullable=
False)
310 status = Column(String(1), nullable=
False)
311 description = Column(String(4000), nullable=
False)
312 submitter_id = Column(Integer, nullable=
False)
313 time_submitted = Column(DateTime, nullable=
False)
314 last_edited = Column(DateTime, nullable=
False)
316 def __init__(self, dictionary={}, convert_timestamps=True):
318 for key
in dictionary:
320 if convert_timestamps:
323 self.__dict__[key] = dictionary[key]
324 except KeyError
as k:
327 headers = [
"queue",
"tag",
"record",
"label",
"status",
"description",
"submitter_id",
"time_submitted",
"last_edited"]
331 "queue" : self.queue,
333 "record" : self.record,
334 "label" : self.label,
335 "status" : self.status,
336 "description" : self.description,
337 "submitter_id" : self.submitter_id,
338 "time_submitted" : self.time_submitted,
339 "last_edited" : self.last_edited
343 return '<GlobalTagMapRequest %r>' % self.queue
351 'headers': [
'Queue',
'Tag',
'Record',
'Label',
'Status',
'Submitted',
'Modified'],
352 'data': [ r.to_array()
for r
in requests ],
357 __tablename__ =
'IOV'
359 headers = [
"tag_name",
"since",
"payload_hash",
"insertion_time"]
361 tag_name = Column(String(4000), ForeignKey(
'TAG.name'), primary_key=
True, nullable=
False)
362 since = Column(Integer, primary_key=
True, nullable=
False)
363 payload_hash = Column(String(40), ForeignKey(
'PAYLOAD.hash'), primary_key=
True, nullable=
False)
364 insertion_time = Column(DateTime, nullable=
False)
366 def __init__(self, dictionary={}, convert_timestamps=True):
368 for key
in dictionary:
370 if convert_timestamps:
373 self.__dict__[key] = dictionary[key]
374 except KeyError
as k:
379 "tag_name" : self.tag_name,
380 "since" : self.since,
381 "payload_hash" : self.payload_hash,
382 "insertion_time" : self.insertion_time
386 return '<IOV %r>' % self.tag_name
389 return [self.since,
to_timestamp(self.insertion_time), self.payload_hash]
394 'headers': [
'Since',
'Insertion Time',
'Payload'],
395 'data': [ i.to_array()
for i
in iovs ],
399 def all(self, amount=10):
404 __tablename__ =
'PAYLOAD'
406 headers = [
"hash",
"object_type",
"version",
"insertion_time"]
408 hash = Column(String(40), primary_key=
True, nullable=
False)
409 object_type = Column(String(4000), nullable=
False)
410 version = Column(String(4000), nullable=
False)
411 insertion_time = Column(DateTime, nullable=
False)
413 def __init__(self, dictionary={}, convert_timestamps=True):
415 for key
in dictionary:
417 if convert_timestamps:
420 self.__dict__[key] = dictionary[key]
421 except KeyError
as k:
427 "object_type" : self.object_type,
428 "version" : self.version,
429 "insertion_time" : self.insertion_time
433 return '<Payload %r>' % self.hash
436 return [self.hash, self.object_type, self.version,
to_timestamp(self.insertion_time)]
441 'headers': [
"Payload",
"Object Type",
"Version",
"Insertion Time"],
442 'data': [ p.to_array()
for p
in payloads ],
446 def parent_tags(self):
451 tag_names = map(
lambda entry : entry[0],\
452 self.session.query(IOV.tag_name).
filter(IOV.payload_hash == self.hash).
all())
453 tags = self.session.query(Tag).
filter(Tag.name.in_(tag_names)).order_by(Tag.name).
all()
456 def all(self, amount=10):
461 __tablename__ =
'RECORDS'
463 headers = [
"record",
"object",
"type"]
465 record = Column(String(100), primary_key=
True, nullable=
False)
466 object = Column(String(200), nullable=
False)
467 type = Column(String(20), nullable=
False)
471 "record" : self.record,
472 "object" : self.object,
477 return '<Record %r>' % self.record
480 return [self.record, self.object]
485 'headers': [
"Record",
"Object"],
486 'data': [ r.to_array()
for r
in records ],
490 def all(self, amount=10):
494 class RecordReleases(
Base):
495 __tablename__ =
'RECORD_RELEASES'
497 record = Column(String(100), ForeignKey(
'RECORDS.record'), nullable=
False)
498 release_cycle = Column(String(100), primary_key=
True, nullable=
False)
499 release = Column(String(100), nullable=
False)
500 release_int = Column(String(100), nullable=
False)
504 "release_cycle" : self.release_cycle,
505 "release" : self.release,
506 "release_int" : self.release_int
510 return '<RecordReleases %r>' % self.record
513 return [self.release_cycle, self.release, self.release_int]
517 record_releases_data = {
518 'headers': [
"Release Cycle",
"Starting Release",
"Starting Release Number"],
519 'data': [ r.to_array()
for r
in recordReleases ],
521 return record_releases_data
524 class ParsedReleases(
Base):
525 __tablename__ =
'PARSED_RELEASES'
527 release_cycle = Column(String(100), primary_key=
True, nullable=
False)
528 release = Column(String(100), nullable=
False)
529 release_int = Column(String(100), nullable=
False)
533 "release_cycle" : self.release_cycle,
534 "release" : self.release,
535 "release_int" : self.release_int
539 return '<ParsedReleases %r>' % self.release_cycle
542 return [self.release_cycle, self.release, self.release_int]
546 parsed_releases_data = {
547 'headers': [
"Release Cycle",
"Starting Release",
"Starting Release Number"],
548 'data': [ p.to_array()
for p
in parsedReleases ],
550 return parsed_releases_data
554 __tablename__ =
'TAG'
556 headers = [
"name",
"time_type",
"object_type",
"synchronization",
"end_of_validity",\
557 "description",
"last_validated_time",
"insertion_time",
"modification_time"]
559 name = Column(String(4000), primary_key=
True, nullable=
False)
560 time_type = Column(String(4000), nullable=
False)
561 object_type = Column(String(4000), nullable=
False)
562 synchronization = Column(String(4000), nullable=
False)
563 end_of_validity = Column(Integer, nullable=
False)
564 description = Column(String(4000), nullable=
False)
565 last_validated_time = Column(Integer, nullable=
False)
566 insertion_time = Column(DateTime, nullable=
False)
567 modification_time = Column(DateTime, nullable=
False)
572 def __init__(self, dictionary={}, convert_timestamps=True):
574 for key
in dictionary:
576 if convert_timestamps:
579 self.__dict__[key] = dictionary[key]
580 except KeyError
as k:
586 "time_type" : self.time_type,
587 "object_type" : self.object_type,
588 "synchronization" : self.synchronization,
589 "end_of_validity" : self.end_of_validity,
590 "description" : self.description,
591 "last_validated_time" : self.last_validated_time,
592 "insertion_time" : self.insertion_time,
593 "modification_time" : self.modification_time,
594 "record" : self.record,
599 return '<Tag %r>' % self.name
602 return [self.name, self.time_type, self.object_type, self.synchronization,
to_timestamp(self.insertion_time), self.description]
607 'headers': [
"Tag",
"Time Type",
"Object Type",
"Synchronization",
"Insertion Time",
"Description"],
608 'data': [ t.to_array()
for t
in tags ],
612 def parent_global_tags(self):
616 global_tag_names = map(
lambda entry : entry[0], self.session.query(GlobalTagMap.global_tag_name).
filter(GlobalTagMap.tag_name == self.name).
all())
617 if len(global_tag_names) != 0:
618 global_tags = self.session.query(GlobalTag).
filter(GlobalTag.name.in_(global_tag_names)).order_by(GlobalTag.name).
all()
623 def all(self, amount=10):
626 def insertion_time_interval(self, insertion_time, **radius):
628 minus = insertion_time - datetime.timedelta(days=days)
629 plus = insertion_time + datetime.timedelta(days=days)
630 tags = self.session.query(Tag).
filter(and_(Tag.insertion_time >= minus, Tag.insertion_time <= plus)).order_by(Tag.name).
all()
633 def modification_time_interval(self, modification_time, **radius):
635 minus = modification_time - datetime.timedelta(days=days)
636 plus = modification_time + datetime.timedelta(days=days)
637 tags = self.session.query(Tag).
filter(and_(Tag.modification_time >= minus, Tag.modification_time <= plus)).order_by(Tag.name).
all()
641 def iovs(self, pretty=False):
643 iovs = self.session.query(IOV).
filter(IOV.tag_name == self.name).
all()
646 for n
in range(0, len(iovs)):
647 iovs[n][
"since"] =
"{:>6}".
format(str(iovs[n][
"since"])) +
" - " + (
"{:<6}".
format(str(iovs[n+1][
"since"]-1))
if n != len(iovs)-1
else "")
651 return {
"globaltag" : GlobalTag,
"candidate" : Candidate,
"globaltagmap" : GlobalTagMap,
"globaltagmaprequest" : GlobalTagMapRequest,
"iov" : IOV,\
652 "payload" : Payload,
"record" : Record,
"recordreleases" : RecordReleases,
"parsedreleases" : ParsedReleases,
"tag" : Tag,
"Base" : Base}
std::string print(const Track &, edm::Verbosity=edm::Concise)
Track print utility.
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
static std::string join(char **cmd)
char data[epos_bytes_allocation]