forked from cms-sw/cmssw
-
Notifications
You must be signed in to change notification settings - Fork 1
/
conddb
executable file
·1521 lines (1170 loc) · 63.8 KB
/
conddb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
'''CMS Conditions DB command-line tool.
'''
import argparse
import datetime
import getpass
import logging
import netrc
import os
import re
import sys
import stat
import subprocess
import tempfile
import textwrap
import time
import sqlalchemy
import CondCore.Utilities.conddblib as conddb
import CondCore.Utilities.cond2xml as cond2xml
# -------------------------------------------------------------------------------------------------------
# TODO: Diffs may look better in the -/+ mode, instead the 2 columns mode.
# TODO: Colored diff! (green +, red -)
# TODO: Support the old connection string syntax, e.g. sqlite_file://...
# Utility functions
def _rawdict(obj):
return dict([(str(column), getattr(obj, column)) for column in obj.__table__.columns.keys()])
def _get_payload_full_hash(session, payload, check=True):
# Limited to 2 to know whether there is more than one in a single query
payloads = session.query(conddb.Payload.hash).\
filter(conddb.Payload.hash.like('%s%%' % payload.lower())).\
limit(2).\
all()
if check:
if len(payloads) == 0:
raise Exception('There is no payload matching %s in the database.' % payload)
if len(payloads) > 1:
raise Exception('There is more than one payload matching %s in the database. Please provide a longer prefix.' % payload)
return payloads[0].hash if len(payloads) == 1 else None
def _dump_payload(session, payload, loadonly):
data = session.query(conddb.Payload.data).\
filter(conddb.Payload.hash == payload).\
one()[0]
logging.info('Loading %spayload %s of length %s ...', '' if loadonly else 'and dumping ', payload, len(data))
print 'Data (TODO: Replace with the call to the actual compiled C++ tool):', repr(data)
def _identify_object(session, objtype, name):
# We can't just use get() here since frontier fetches the entire
# BLOBs by default when requesting them in a column
if objtype is not None:
# Check the type is correct (i.e. if the object exists)
if objtype == 'tag':
if not _exists(session, conddb.Tag.name, name):
raise Exception('There is no tag named %s in the database.' % name)
elif objtype == 'gt':
if not _exists(session, conddb.GlobalTag.name, name):
# raise Exception('There is no global tag named %s in the database.' % name)
logging.info('There is no global tag table in the database.')
elif objtype == 'payload':
# In the case of a payload, check and also return the full hash
return objtype, _get_payload_full_hash(session, name)
return objtype, name
# Search for the object
tag = _exists(session, conddb.Tag.name, name)
global_tag = _exists(session, conddb.GlobalTag.name, name)
payload_hash = _get_payload_full_hash(session, name, check = False)
count = len(filter(None, [tag, global_tag, payload_hash]))
if count > 1:
raise Exception('There is more than one object named %s in the database.' % name)
if count == 0:
raise Exception('There is no tag, global tag or (unique) payload named %s in the database.' % name)
if tag:
return 'tag', name
elif global_tag:
return 'gt', name
elif payload_hash is not None:
return 'payload', payload_hash
raise Exception('Should not have arrived here.')
def _get_editor(args):
if args.editor is not None:
return args.editor
editor = os.environ.get('EDITOR')
if editor is None:
raise Exception('An editor was not provided and the EDITOR environment variable does not exist either.')
return editor
def _run_editor(editor, tempfd):
tempfd.flush()
subprocess.check_call('%s %s' % (editor, tempfd.name), shell=True)
tempfd.seek(0)
def _parse_timestamp(timestamp):
try:
return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
except ValueError:
pass
try:
return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')
except ValueError:
pass
try:
return datetime.datetime.strptime(timestamp, '%Y-%m-%d')
except ValueError:
pass
raise Exception("Could not parse timestamp '%s'" % timestamp)
def _confirm_changes(args):
output(args, 'Confirm changes? [n]', newline=False)
if not args.yes and raw_input().lower() not in ['y', 'yes']:
raise Exception('Aborted by the user.')
def _exists(session, primary_key, value):
ret = None
try:
ret = session.query(primary_key).\
filter(primary_key == value).\
count() != 0
except sqlalchemy.exc.OperationalError:
pass
return ret
def _regexp(connection, field, regexp):
'''To be used inside filter().
'''
if connection.is_oracle or connection.is_frontier:
return sqlalchemy.func.regexp_like(field, regexp)
elif connection.is_sqlite:
# Relies on being a SingletonThreadPool
connection.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
return sqlalchemy.func.regexp(field, regexp)
else:
raise Exception('Unimplemented.')
def _ilike_or_regexp(args, connection, field, term):
'''To be used inside filter().
'''
if args.regexp:
return _regexp(connection, field, term)
return field.ilike('%%%s%%' % term)
def _ilike_or_regexp_highlight(args, string, term):
'''Highlights the strings that would have matched _ilike_or_regexp()
in the database, i.e. performs the same search client-side and adds
colors around the matches
'''
highlight = colors.bold_red + '\\1' + colors.end
if args.regexp:
return re.sub('(%s)' % term, highlight, string)
return re.sub('(%s)' % re.escape(term), highlight, string, flags=re.IGNORECASE)
def _list_object(obj):
table = []
for column in obj.__table__.columns.keys():
table.append([column, getattr(obj, column)])
return table
def _output_list_object(args, obj):
output_table(args,
_list_object(obj),
['Property', 'Value'],
)
def _diff_objects(object1, object2):
table = []
columns = object1.__table__.columns.keys()
columns.remove('name')
for column in columns:
value1 = getattr(object1, column)
value2 = getattr(object2, column)
if value1 != value2:
table.append([column, value1, value2])
return table
def _output_diff_objects(args, object1, object2):
output_table(args,
_diff_objects(object1, object2),
['Property', '%s Value' % str_db_object(args.db, args.first), '%s Value' % str_db_object(args.destdb, args.second)],
)
def _default(value, default_value='-'):
return default_value if value is None else value
def _truefalse(value):
return 'Present' if value else '-'
def _check_same_object(args):
if (args.destdb is None or args.db == args.destdb) and (args.second is None or args.first == args.second):
raise Exception('The source object and the destination object are the same (i.e. same database and same name): %s' % str_db_object(args.db, args.first))
def _connect(db, init, verbose, read_only, force):
logging.debug('Connecting to %s ...', db)
connection = conddb.connect(db, init=init, verbose=0 if verbose is None else verbose - 1)
if not read_only:
if connection.is_read_only:
raise Exception('Impossible to edit a read-only database.')
if connection.is_official:
if force:
logging.warning('You are going to edit an official database. If you are not one of the Offline DB experts but have access to the password for other reasons, please stop now.')
else:
raise Exception('Editing official databases is forbidden. Use the official DropBox to upload conditions. If you need a special intervention on the database, see the contact help: %s' % conddb.contact_help)
return connection
def connect(args, init=False, read_only=True):
args.force = args.force if 'force' in dir(args) else False
if 'destdb' in args:
if args.destdb is None or args.db == args.destdb:
args.destdb = args.db
connection = _connect(args.db, init, args.verbose, read_only, args.force)
return connection, connection
# read_only refers to the destination database only
# (the source is always read_only) -- see copy() command
if os.path.exists(args.destdb):
return _connect(args.db, init, args.verbose, True, args.force), _connect(args.destdb, init, args.verbose, read_only, args.force)
else: # dest DB not yet there, needs init ...
conn1 = _connect(args.db, init, args.verbose, True, args.force)
conn2 = _connect(args.destdb, True, args.verbose, False, args.force)
conn2.init()
return conn1, conn2
return _connect(args.db, init, args.verbose, read_only, args.force)
def str_db_object(db, name):
return '%s::%s' % (db, name)
def str_iov(since, insertion_time):
return '(%s, %s)' % (since, insertion_time)
def str_record(record, label):
return '(%s, %s)' % (record, label)
class Colors(object):
normal_template = '\033[9%sm'
bold_template = '\033[9%s;1m'
bold = '\033[1m'
black = normal_template % 0
red = normal_template % 1
green = normal_template % 2
yellow = normal_template % 3
blue = normal_template % 4
magenta = normal_template % 5
cyan = normal_template % 6
white = normal_template % 7
bold_black = bold_template % 0
bold_red = bold_template % 1
bold_green = bold_template % 2
bold_yellow = bold_template % 3
bold_blue = bold_template % 4
bold_magenta = bold_template % 5
bold_cyan = bold_template % 6
bold_white = bold_template % 7
end = '\033[0m'
def __init__(self, args):
if ( stat.S_ISFIFO(os.fstat(sys.stdout.fileno()).st_mode) or # we are running in a pipe
args.nocolors ):
self.noColors()
def noColors(self):
for member in dir(self):
if not member.startswith('_'):
setattr(self, member, '')
colors = None
def output(args, string, *parameters, **kwargs):
if args.quiet:
return
output_file = kwargs.get('output_file', sys.stdout)
print >>output_file, string % parameters + colors.end,
if kwargs.get('newline', True):
print >>output_file
def _strip_colors(args, string):
'''Strips colors (i.e. ANSI sequences).
'''
if args.nocolors:
return string
return re.sub('\x1b\[[;\d]*[A-Za-z]', '', string)
def _ljust_colors(args, string, width, fillchar=' '):
'''Same as string.ljust(width, fillchar) but supporting colors.
'''
if args.nocolors:
return string.ljust(width, fillchar)
return string + fillchar * (width - len(_strip_colors(args, string)))
def output_table(args, table, headers, filters=None, output_file=None, no_first_header=False):
if args.quiet:
return
if output_file is None:
output_file = sys.stdout
if filters is None:
filters = [None] * len(headers)
def max_length_filter(s):
s = str(s).replace('\n', '\\n')
return '%s...' % s[:conddb.name_length] if len(s) > conddb.name_length else s
new_table = [[] for i in range(len(table))]
for column_index in range(len(headers)):
for row_index, row in enumerate(table):
cell = max_length_filter(row[column_index])
if filters[column_index] is not None:
cell = filters[column_index](cell)
new_table[row_index].append(cell)
# Calculate the width of each column
widths = []
for column_index in range(len(headers)):
width = len(headers[column_index])
for row in new_table:
width = max(width, len(_strip_colors(args, row[column_index])))
widths.append(width)
# Print the table
header_separator = '-'
column_separator = ''
for column_index, header in enumerate(headers):
output(args, colors.bold + _ljust_colors(args, header, widths[column_index]) + ' ' + column_separator, newline=False, output_file=output_file)
output(args, '', output_file=output_file)
for column_index in range(len(headers)):
output(args, (' ' if column_index == 0 and no_first_header else header_separator) * widths[column_index] + ' ' + column_separator, newline=False, output_file=output_file)
output(args, '', output_file=output_file)
for row in new_table:
for column_index, cell in enumerate(row):
output(args, _ljust_colors(args, cell, widths[column_index]) + ' ' + column_separator, newline=False, output_file=output_file)
output(args, '', output_file=output_file)
output(args, '', output_file=output_file)
# Commands
def help(args):
output(args, colors.bold + 'CMS Condition DB command-line tool.')
output(args, '')
output(args, colors.bold + 'Usage')
output(args, colors.bold + '-----')
output(args, '')
output(args, ' This tool provides several subcommands, each of those')
output(args, ' serves a well-defined purpose.')
output(args, '')
output(args, ' To see the list of available subcommands and the global options, run:')
output(args, '')
output(args, ' conddb -h')
output(args, '')
output(args, ' To see the help of a subcommand and its options, run:')
output(args, '')
output(args, ' conddb <command> -h.')
output(args, ' e.g. conddb list -h')
output(args, '')
output(args, '')
output(args, colors.bold + 'Exit status')
output(args, colors.bold + '-----------')
output(args, '')
output(args, ' 0 = OK.')
output(args, ' 1 = Runtime error (i.e. any kind of error not related to syntax).')
output(args, ' 2 = Usage/syntax error.')
output(args, '')
output(args, '')
output(args, colors.bold + 'Database parameter (--db)')
output(args, colors.bold + '-------------------------')
output(args, ' ' + '\n '.join(textwrap.dedent(conddb.database_help).splitlines()))
output(args, '')
output(args, '')
output(args, colors.bold + 'Contact help')
output(args, colors.bold + '------------')
output(args, '')
output(args, ' ' + '\n '.join(textwrap.wrap(conddb.contact_help)))
output(args, '')
def init(args):
connection = connect(args, init=True, read_only=False)
logging.info('Initializing database...')
connection.init(drop=args.drop)
def status(args):
connection = connect(args)
valid = connection.is_valid()
output(args, 'Database Status:')
output(args, '')
output(args, ' Schema: %s', 'OK (required tables are present)' if valid else 'Wrong (missing required tables)')
if not valid:
return
session = connection.session()
tag_count = session.query(conddb.Tag.name).count()
payload_count = session.query(conddb.Payload.hash).count()
global_tag_count = session.query(conddb.GlobalTag.name).count()
output(args, ' # tags: %s %s', tag_count, '(the last %s inserted are shown below)' % args.limit if tag_count > 0 else '')
output(args, ' # payloads: %s %s', payload_count, '(the last %s inserted are shown below)' % args.limit if payload_count > 0 else '')
output(args, ' # global tags: %s %s', global_tag_count, '(the last %s inserted are shown below)' % args.limit if global_tag_count > 0 else '')
output(args, '')
if tag_count > 0:
output_table(args,
session.query(conddb.Tag.name, conddb.Tag.time_type, conddb.Tag.object_type, conddb.Tag.synchronization, conddb.Tag.insertion_time, conddb.Tag.description).\
order_by(conddb.Tag.insertion_time.desc()).\
limit(args.limit).\
all(),
['Name', 'Time Type', 'Object Type', 'Synchronization', 'Insertion Time', 'Description'],
)
if payload_count > 0:
output_table(args,
session.query(conddb.Payload.hash, conddb.Payload.object_type, conddb.Payload.version, conddb.Payload.insertion_time).\
order_by(conddb.Payload.insertion_time.desc()).\
limit(args.limit).\
all(),
['Payload', 'Object Type', 'Version', 'Insertion Time'],
)
if global_tag_count > 0:
output_table(args,
session.query(conddb.GlobalTag.name, conddb.GlobalTag.release, conddb.GlobalTag.insertion_time, conddb.GlobalTag.description).\
order_by(conddb.GlobalTag.insertion_time.desc()).\
limit(args.limit).\
all(),
['Global Tag', 'Release', 'Insertion Time', 'Description'],
)
def search(args):
connection = connect(args)
session = connection.session()
max_limit = 100
if args.limit > max_limit:
raise Exception('The limit on the number of returned results is capped at %s. Please use a reasonable limit.' % max_limit)
if connection.is_frontier and ':' in args.string:
raise Exception('Sorry, the colon : character is not allowed in queries to Frontier (yet). Please use another search term or connect to Oracle directly.')
logging.info('Searching with a limit of %s results per type of object, starting from the latest inserted ones. If you do not find your object, please try to be more specific or increase the limit of returned results.', args.limit)
if args.nocolors:
_ilike_or_regexp_highlight_filter = None
else:
def _ilike_or_regexp_highlight_filter(cell):
return _ilike_or_regexp_highlight(args, cell, args.string)
def size(cell):
return str( sys.getsizeof( bytearray(cell) ) )
output_table(args,
session.query(conddb.Tag.name, conddb.Tag.time_type, conddb.Tag.object_type, conddb.Tag.synchronization, conddb.Tag.insertion_time, conddb.Tag.description).\
filter(
_ilike_or_regexp(args, connection, conddb.Tag.name, args.string)
| _ilike_or_regexp(args, connection, conddb.Tag.object_type, args.string)
| _ilike_or_regexp(args, connection, conddb.Tag.description, args.string)
).\
order_by(conddb.Tag.insertion_time.desc()).\
limit(args.limit).\
all(),
['Tag', 'Time Type', 'Object Type', 'Synchronization', 'Insertion Time', 'Description'],
filters = [_ilike_or_regexp_highlight_filter, None, _ilike_or_regexp_highlight_filter, None, None, _ilike_or_regexp_highlight_filter],
)
output_table(args,
session.query(conddb.Payload.hash, conddb.Payload.object_type, conddb.Payload.version, conddb.Payload.insertion_time, conddb.Payload.data).\
filter(
_ilike_or_regexp(args, connection, conddb.Payload.hash, args.string)
| _ilike_or_regexp(args, connection, conddb.Payload.object_type, args.string)
).\
order_by(conddb.Payload.insertion_time.desc()).\
limit(args.limit).\
all(),
['Payload', 'Object Type', 'Version', 'Insertion Time', 'Size'],
filters = [_ilike_or_regexp_highlight_filter, _ilike_or_regexp_highlight_filter, None, None, size],
)
try:
output_table(args,
session.query(conddb.GlobalTag.name, conddb.GlobalTag.release, conddb.GlobalTag.insertion_time, conddb.GlobalTag.description).\
filter(
_ilike_or_regexp(args, connection, conddb.GlobalTag.name, args.string)
| _ilike_or_regexp(args, connection, conddb.GlobalTag.release, args.string)
| _ilike_or_regexp(args, connection, conddb.GlobalTag.description, args.string)
).\
order_by(conddb.GlobalTag.insertion_time.desc()).\
limit(args.limit).\
all(),
['Global Tag', 'Release', 'Insertion Time', 'Description'],
filters = [_ilike_or_regexp_highlight_filter, _ilike_or_regexp_highlight_filter, None, _ilike_or_regexp_highlight_filter],
)
except sqlalchemy.exc.OperationalError:
sys.stderr.write("No table for GlobalTags found in DB.\n\n")
def _inserted_before(timestamp):
'''To be used inside filter().
'''
if timestamp is None:
# XXX: Returning None does not get optimized (skipped) by SQLAlchemy,
# and returning True does not work in Oracle (generates "and 1"
# which breaks Oracle but not SQLite). For the moment just use
# this dummy condition.
return sqlalchemy.literal(True) == sqlalchemy.literal(True)
return conddb.IOV.insertion_time <= _parse_timestamp(timestamp)
def _high(n):
return int(n) >> 32
def _low(n):
return int(n) & 0xffffffff
def _convertTimeType(since):
try:
return str(datetime.datetime.utcfromtimestamp(_high(since)).replace(microsecond = _low(since)))
except ValueError:
return str(datetime.datetime.utcfromtimestamp(_high(since)).replace(microsecond = _low(since)/1000))
def _since_filter(time_type):
'''Returns a filter function for the given time type that returns
a human-readable string of the given since.
For run (sinces are 32-bit unsigned integers), hash (sinces are strings)
and user (sinces are strings) the filter returns the sinces unchanged.
The time sinces are 64-bit integers built from a pair (UNIX time,
microseconds), each 32-bit wide. The filter returns a readable timestamp,
including the microseconds.
The lumi sinces are 64-bit integers built from a pair (run, lumi),
each 32-bit wide. The filter returns a string with both numbers, split.
'''
if time_type == conddb.TimeType.time:
return lambda since: '%s (%s)' % (_convertTimeType(since), since)
if time_type == conddb.TimeType.lumi:
return lambda since: '%s : %5s (%s)' % (_high(since), _low(since), since)
return lambda since: since
def listTags_(args):
connection = connect(args)
session = connection.session()
output_table(args,
session.query(conddb.Tag.name, conddb.Tag.time_type, conddb.Tag.object_type, conddb.Tag.synchronization, conddb.Tag.end_of_validity, conddb.Tag.insertion_time, conddb.Tag.description ).\
order_by(conddb.Tag.insertion_time, conddb.Tag.name).\
all(),
['Name', 'TimeType', 'ObjectType', 'Synchronisation', 'EndOfValidity', 'Insertion_time', 'Description'],
)
def listGTsForTag_(args):
connection = connect(args)
session = connection.session()
output_table(args,
session.query(conddb.GlobalTagMap.global_tag_name, conddb.GlobalTagMap.tag_name, conddb.GlobalTagMap.record, conddb.GlobalTagMap.label).\
filter(conddb.GlobalTagMap.tag_name == args.name).\
order_by(conddb.GlobalTagMap.global_tag_name).\
all(),
['GT_name', 'Tag_name', 'record', 'label'],
)
def listGTs_(args):
connection = connect(args)
session = connection.session()
output_table(args,
session.query(conddb.GlobalTag.name, conddb.GlobalTag.description, conddb.GlobalTag.release, conddb.GlobalTag.insertion_time).\
order_by(conddb.GlobalTag.insertion_time, conddb.GlobalTag.name).\
all(),
['GT_name', 'Description', 'Release', 'Insertion_time'],
)
def list_(args):
connection = connect(args)
session = connection.session()
for name in args.name:
is_tag = _exists(session, conddb.Tag.name, name)
if is_tag:
if args.long:
_output_list_object(args, session.query(conddb.Tag).get(name))
logging.info('Listing with a limit of %s IOVs, starting from the highest since. If you need to see more, please increase the limit of returned results.', args.limit)
time_type = session.query(conddb.Tag.time_type).\
filter(conddb.Tag.name == name).\
scalar()
sinceLabel = 'Since: Run '
if time_type == conddb.TimeType.time:
sinceLabel = 'Since: UTC (timestamp)'
if time_type == conddb.TimeType.lumi:
sinceLabel = ' Run : Lumi (rawSince)'
output_table(args,
session.query(conddb.IOV.since, conddb.IOV.insertion_time, conddb.IOV.payload_hash, conddb.Payload.object_type).\
join(conddb.IOV.payload).\
filter(
conddb.IOV.tag_name == name,
_inserted_before(args.snapshot),
).\
order_by(conddb.IOV.since.desc(), conddb.IOV.insertion_time.desc()).\
limit(args.limit).\
from_self().\
order_by(conddb.IOV.since, conddb.IOV.insertion_time).\
all(),
[sinceLabel, 'Insertion Time', 'Payload', 'Object Type'],
filters = [_since_filter(time_type), None, None, None],
)
try:
is_global_tag = _exists(session, conddb.GlobalTag.name, name)
if is_global_tag:
if args.long:
_output_list_object(args, session.query(conddb.GlobalTag).get(name))
output_table(args,
session.query(conddb.GlobalTagMap.record, conddb.GlobalTagMap.label, conddb.GlobalTagMap.tag_name).\
filter(conddb.GlobalTagMap.global_tag_name == name).\
order_by(conddb.GlobalTagMap.record, conddb.GlobalTagMap.label).\
all(),
['Record', 'Label', 'Tag'],
)
except sqlalchemy.exc.OperationalError:
sys.stderr.write("No table for GlobalTags found in DB.\n\n")
if not is_tag and not is_global_tag:
raise Exception('There is no tag or global tag named %s in the database.' % name)
def _diff_tags(args, session1, session2, first, second):
tag1 = session1.query(conddb.Tag).get(first)
tag2 = session2.query(conddb.Tag).get(second)
if args.long:
_output_diff_objects(args, tag1, tag2)
if tag1.time_type != tag2.time_type:
output(args, 'Skipping diff of IOVs, since the time_type is different.')
else:
iovs1 = dict(session1.query(conddb.IOV.since, conddb.IOV.payload_hash).\
filter(
conddb.IOV.tag_name == first,
_inserted_before(args.snapshot),
).\
all()
)
iovs2 = dict(session2.query(conddb.IOV.since, conddb.IOV.payload_hash).\
filter(
conddb.IOV.tag_name == second,
_inserted_before(args.snapshot),
).\
all()
)
table = []
iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
# Since 1 != 2 and both are != than any payload,
# this will trigger printing the last line [last_since, Infinity)
iovs.append(('Infinity', 1, 2))
prev_since, prev_payload1, prev_payload2, prev_equal = None, None, None, None
for since, payload1, payload2 in iovs:
if prev_since is None:
# First time
prev_equal = payload1 == payload2
prev_since = since
prev_payload1, prev_payload2 = payload1, payload2
continue
# If None, the payloads are the previous one
if payload1 is None:
payload1 = prev_payload1
if payload2 is None:
payload2 = prev_payload2
if prev_equal:
# If the previous payloads were equal and these ones
# were too, we do not print anything (and we do not update
# the prev_since). If the previous were equal but these
# are different, the equal-range has finished: we print it.
if payload1 != payload2:
if not args.short:
table.append(('[%s, %s)' % (prev_since, since), '=', '='))
prev_since = since
else:
# If the previous payloads were not equal, we print them,
# since we print all the different ranges (even if they are
# contiguous). However, we skip in the case these payloads
# and both equal to the previous ones (and we do not
# update the prev_since). Should not be common, since
# there is no point on having contiguous IOVs with the same
# payloads in a tag.
if payload1 != prev_payload1 or payload2 != prev_payload2:
table.append(('[%s, %s)' % (prev_since, since), _default(prev_payload1), _default(prev_payload2)))
prev_since = since
prev_equal = payload1 == payload2
prev_payload1, prev_payload2 = payload1, payload2
output_table(args,
table,
['Range', '%s Payload' % str_db_object(args.db, first), '%s Payload' % str_db_object(args.destdb, second)],
)
def diff(args):
_check_same_object(args)
connection1, connection2 = connect(args)
session1, session2 = connection1.session(), connection2.session()
if args.second is None:
args.second = args.first
is_tag1 = _exists(session1, conddb.Tag.name, args.first)
is_tag2 = _exists(session2, conddb.Tag.name, args.second)
if is_tag1 and is_tag2:
_diff_tags(args, session1, session2, args.first, args.second)
is_global_tag1 = _exists(session1, conddb.GlobalTag.name, args.first)
is_global_tag2 = _exists(session2, conddb.GlobalTag.name, args.second)
if is_global_tag1 and is_global_tag2:
global_tag1 = session1.query(conddb.GlobalTag).get(args.first)
global_tag2 = session2.query(conddb.GlobalTag).get(args.second)
if args.long:
_output_diff_objects(args, global_tag1, global_tag2)
map1 = dict([(tuple(x[:2]), x[2]) for x in session1.query(conddb.GlobalTagMap.record, conddb.GlobalTagMap.label, conddb.GlobalTagMap.tag_name).\
filter(conddb.GlobalTagMap.global_tag_name == args.first)
])
map2 = dict([(tuple(x[:2]), x[2]) for x in session2.query(conddb.GlobalTagMap.record, conddb.GlobalTagMap.label, conddb.GlobalTagMap.tag_name).\
filter(conddb.GlobalTagMap.global_tag_name == args.second)
])
records = sorted(set(map1) | set(map2))
table = []
diff_tags = set([])
for record in records:
value1 = map1.get(record)
value2 = map2.get(record)
if value1 is None or value2 is None or value1 != value2:
table.append((record[0], record[1], _default(value1), _default(value2)))
diff_tags.add((value1, value2))
output_table(args,
table,
['Record', 'Label', '%s Tag' % str_db_object(args.db, args.first), '%s Tag' % str_db_object(args.destdb, args.second)],
)
if args.deep:
for tag1, tag2 in diff_tags:
_diff_tags(args, session1, session2, tag1, tag2)
if not (is_tag1 and is_tag2) and not (is_global_tag1 and is_global_tag2):
raise Exception('There are no tag or global tag pairs named %s and %s in the database(s).' % (args.first, args.second))
def convertRunToTimes(startRun, stopRun=None):
if not stopRun :
stopRun = startRun + 1
startTime1, stopTime1 = runToTime(startRun)
startTime2, stopTime2 = runToTime(stopRun)
timeMap = { 'start' : {
'hash' : None,
'run' : startRun,
'time' : startTime1-15., # the time we get may be a bit delayed (7-10 sec according to Salvatore)
'lumi' : startRun<<32|0x1,
},
'stop' : {
'hash' : None,
'run' : stopRun,
'time' : stopTime2+15., # the time we get may be a bit delayed (7-10 sec according to Salvatore)
'lumi' : stopRun<<32|0x1,
}
}
logging.debug("convertRunToTimes> start: %s stop %s \n timeMap: %s " % (startRun, stopRun, str(timeMap)))
return timeMap
def runToTime(runNr):
connStr = conddb._getCMSFrontierSQLAlchemyConnectionString('PromptProd', 'CMS_CONDITIONS')
connection = conddb.connect(connStr)
session = connection.session()
startIOV = session.query(conddb.IOV.insertion_time).filter(conddb.IOV.tag_name == 'runinfo_start_31X_hlt', conddb.IOV.since == runNr).all()
stopIOV = session.query(conddb.IOV.insertion_time).filter(conddb.IOV.tag_name == 'runinfo_31X_hlt', conddb.IOV.since == runNr).all()
startTime = time.mktime( startIOV[0][0].timetuple() )
stopTime = time.mktime( stopIOV [0][0].timetuple() )
return startTime, stopTime
def _copy_tag(args, session1, session2, first, second, fromIOV=None, toIOV=None, timeMap=None):
logging.info('Copying tag %s to %s ...', str_db_object(args.db, first), str_db_object(args.destdb, second))
# Copy the tag
tag = _rawdict(session1.query(conddb.Tag).get(first))
tag['name'] = second
tag['end_of_validity'] = 0 # XXX: SQLite does not work with long ints...
if not timeMap and not fromIOV and not toIOV:
raise Exception("_copy_tag> One of timeMap, fromIOV, toIOV needs to be given ...")
if timeMap and not fromIOV and not toIOV:
fromIOV = timeMap['start'][ tag['time_type'].lower().strip() ]
toIOV = timeMap['stop'] [ tag['time_type'].lower().strip() ]
session2.add(conddb.Tag(**tag))
# Get the closest smaller IOV than the given starting point (args.from),
# since it may lie between two sinces. For the ending point (args.to)
# is not needed, since the last IOV of a tag always goes up to infinity.
# In the case where the starting point is before any IOV, we do not need
# to cut the query anyway.
prev_iov = None
if fromIOV is not None:
fromVal = fromIOV
logging.debug("checking FROM %s of type %s for tag: %s " % (fromIOV, tag['time_type'], str(tag['name'])) )
prev_iov = session1.query(conddb.IOV.since).\
filter(
conddb.IOV.tag_name == first,
conddb.IOV.since <= fromVal,
).\
order_by(conddb.IOV.since.desc()).\
limit(1).\
scalar()
logging.debug('The closest smaller IOV than the given starting one (--from %s) is %s...', fromVal, prev_iov)
# Copy the distinct payloads referenced in the IOVs of the tag
# FIXME: Put the DISTINCT query as a subquery (we can't directly use distinct on BLOBs)
query = session1.query(conddb.IOV.payload_hash).filter(conddb.IOV.tag_name == first)
if prev_iov is not None:
query = query.filter(conddb.IOV.since >= prev_iov)
if toIOV is not None:
toVal = toIOV
logging.debug("filtering with TO %s of type %s for tag: %s to " % (toIOV, tag['time_type'], str(tag['name'])) )
query = query.filter(conddb.IOV.since <= toVal)
query = query.distinct()
for (payload_hash, ) in query:
if _exists(session2, conddb.Payload.hash, payload_hash):
logging.info('Skipping copy of payload %s to %s since it already exists...', str_db_object(args.db, payload_hash), str_db_object(args.destdb, payload_hash))
else:
logging.info('Copying payload %s to %s ...', str_db_object(args.db, payload_hash), str_db_object(args.destdb, payload_hash))
session2.add(conddb.Payload(** _rawdict(session1.query(conddb.Payload).filter(conddb.Payload.hash == payload_hash).one())))
# Copy the IOVs of the tag
query = session1.query(conddb.IOV).filter(conddb.IOV.tag_name == first)
if prev_iov is not None:
query = query.filter(conddb.IOV.since >= prev_iov)
if toIOV is not None:
query = query.filter(conddb.IOV.since <= toIOV)
for iov in query:
logging.debug('Copying IOV %s -> %s...', str_iov(iov.since, iov.insertion_time), iov.payload_hash)
iov = _rawdict(iov)
iov['tag_name'] = second
# In the first IOV of the tag we need to use the starting point given
# by the user, instead of the one coming from the source tag; unless
# the starting point was before any IOV: in such case, up to the first
# IOV there is no payload, so we use the one from the source tag.
# Note that we need to replace it for every insertion time (since
# the primary key is (since, insertion_time).
if prev_iov is not None and iov['since'] == prev_iov:
iov['since'] = getattr(args, 'from')
first_iov = False
session2.add(conddb.IOV(**iov))
def copy(args):
_check_same_object(args)
connection1, connection2 = connect(args, read_only=False)
session1, session2 = connection1.session(), connection2.session()
args.type, args.first = _identify_object(session1, args.type, args.first)
if args.type == 'payload':
if args.second is None:
args.second = args.first
elif args.first != args.second:
raise Exception('Cannot modify the name (hash) of a payload while copying, since the hash has to match the data.')
logging.info('Copying payload %s to %s ...', str_db_object(args.db, args.first), str_db_object(args.destdb, args.second))
# Copy the payload
session2.add(conddb.Payload(**_rawdict(session1.query(conddb.Payload).get(args.first))))
_confirm_changes(args)
session2.commit()
elif args.type == 'tag':
if args.second is None: