Back to home page

Project CMSSW displayed by LXR

 
 

    


Warning, /CondCore/Utilities/scripts/conddb is written in an unsupported language. File is not indexed.

0001 #!/usr/bin/env python3
0002 '''CMS Conditions DB command-line tool.
0003 '''
0004 
0005 import argparse
0006 import datetime
0007 import getpass
0008 import logging
0009 import os
0010 import re
0011 import sys
0012 import stat
0013 import subprocess
0014 import tempfile
0015 import textwrap
0016 import time
0017 import pwd
0018 import socket
0019 
0020 import calendar
0021 import sqlalchemy
0022 import json
0023 
0024 from prettytable import PrettyTable
0025 
0026 import CondCore.Utilities.conddblib as conddb
0027 import CondCore.Utilities.cond2xml as cond2xml
0028 import CondCore.Utilities.conddb_serialization_metadata as serialization_metadata
0029 import CondCore.Utilities.conddb_time as conddb_time
0030 
0031 from CondCore.Utilities.tier0 import Tier0Handler, Tier0Error, tier0Url
0032 # -------------------------------------------------------------------------------------------------------
0033 
0034 # TODO: Diffs may look better in the -/+ mode, instead the 2 columns mode.
0035 # TODO: Colored diff! (green +, red -)
0036 # TODO: Support the old connection string syntax, e.g. sqlite_file://...
0037 
0038 maxSince = 18446744073709551615
0039 sizeOfTimestamp = 19
0040 
0041 tag_db_lock_access_code   = 8
0042 tag_db_write_access_code  = 2
0043 tag_db_read_access_code   = 1
0044 tag_db_no_protection_code = 0
0045 
0046 db_key_credential_type_code     = 1
0047 db_cmsuser_credential_type_code = 2
0048 db_session_credential_type_code = 4
0049 
0050 db_access_code_map = { tag_db_write_access_code: "W", tag_db_lock_access_code: "L" }
0051 
0052 db_credential_type_map = { db_key_credential_type_code : ("DB KEY",'K'), 
0053                            db_cmsuser_credential_type_code : ("CMS USER",'U'),
0054                            db_session_credential_type_code : ("DB SESSION",'S'), }
0055 
0056 # Utility functions
0057 
0058 def _rawdict(obj):
0059     return dict([(str(column), getattr(obj, column)) for column in obj.__table__.columns.keys()])
0060 
0061 def _rawdict_selection(obj):
0062     return dict([(str(column), getattr(obj, column)) for column in obj._fields])
0063 
0064 def _get_payload_full_hash(session, payload, check=True):
0065     # Limited to 2 to know whether there is more than one in a single query
0066     Payload = session.get_dbtype(conddb.Payload)
0067     payloads = session.query(Payload.hash).\
0068             filter(Payload.hash.like('%s%%' % payload.lower())).\
0069             limit(2).\
0070             all()
0071 
0072     if check:
0073         if len(payloads) == 0:
0074             raise Exception('There is no payload matching %s in the database.' % payload)
0075         if len(payloads) > 1:
0076             raise Exception('There is more than one payload matching %s in the database. Please provide a longer prefix.' % payload)
0077 
0078     return payloads[0].hash if len(payloads) == 1 else None
0079 
0080 def _dump_payload(session, payload, loadonly):
0081 
0082     Payload = session.get_dbtype(conddb.Payload)
0083     data = session.query(Payload.data).\
0084         filter(Payload.hash == payload).\
0085         one()[0]
0086     logging.info('Loading %spayload %s of length %s ...', '' if loadonly else 'and dumping ', payload, len(data))
0087     print('Data (TODO: Replace with the call to the actual compiled C++ tool):', repr(data))
0088 
0089 
0090 def _identify_object(session, objtype, name):
0091     # We can't just use get() here since frontier fetches the entire
0092     # BLOBs by default when requesting them in a column
0093 
0094     Tag = session.get_dbtype(conddb.Tag)
0095     GlobalTag = session.get_dbtype(conddb.GlobalTag)
0096     if objtype is not None:
0097         # Check the type is correct (i.e. if the object exists)
0098         if objtype == 'tag':
0099             if not _exists(session, Tag.name, name):
0100                 raise Exception('There is no tag named %s in the database.' % name)
0101         elif objtype == 'gt':
0102             if not _exists(session, GlobalTag.name, name):
0103                 # raise Exception('There is no global tag named %s in the database.' % name)
0104                 logging.info('There is no global tag table in the database.')
0105         elif objtype == 'payload':
0106             # In the case of a payload, check and also return the full hash
0107             return objtype, _get_payload_full_hash(session, name)
0108 
0109         return objtype, name
0110 
0111     # Search for the object
0112     tag = _exists(session, Tag.name, name)
0113     global_tag = _exists(session, GlobalTag.name, name)
0114     payload_hash = _get_payload_full_hash(session, name, check = False)
0115 
0116     count = len([x for x in  filter(None, [tag, global_tag, payload_hash])])
0117     if count > 1:
0118         raise Exception('There is more than one object named %s in the database.' % name)
0119     if count == 0:
0120         raise Exception('There is no tag, global tag or (unique) payload named %s in the database.' % name)
0121 
0122     if tag:
0123         return 'tag', name
0124     elif global_tag:
0125         return 'gt', name
0126     elif payload_hash is not None:
0127         return 'payload', payload_hash
0128 
0129     raise Exception('Should not have arrived here.')
0130 
0131 
0132 def _get_editor(args):
0133     if args.editor is not None:
0134         return args.editor
0135 
0136     editor = os.environ.get('EDITOR')
0137     if editor is None:
0138         raise Exception('An editor was not provided and the EDITOR environment variable does not exist either.')
0139 
0140     return editor
0141 
0142 
0143 def _run_editor(editor, tempfd):
0144     tempfd.flush()
0145     subprocess.check_call('%s %s' % (editor, tempfd.name), shell=True)
0146     tempfd.seek(0)
0147 
0148 
0149 def _parse_timestamp(timestamp):
0150     try:
0151         return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
0152     except ValueError:
0153         pass
0154 
0155     try:
0156         return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')
0157     except ValueError:
0158         pass
0159 
0160     try:
0161         return datetime.datetime.strptime(timestamp, '%Y-%m-%d')
0162     except ValueError:
0163         pass
0164 
0165     raise Exception("Could not parse timestamp '%s'" % timestamp)
0166 
0167 
0168 def _confirm_changes(args):
0169     if not args.yes:
0170         output(args, 'Confirm changes? [n]', newline=False)
0171         if input().lower() not in ['y', 'yes']:
0172             raise Exception('Aborted by the user.')
0173 
0174 def _get_user_note(args,message):
0175     output(args, message, newline=False)
0176     note = input()
0177     if note == '' or note == ' ':
0178         output(args,'Provided note is invalid, please provide a non-empty string: ',newline=False)
0179         note = input()
0180         if note == '' or note == ' ':
0181             raise Exception('Sorry, bailing out...')
0182     return note 
0183 
0184 def _exists(session, primary_key, value):
0185     ret = None
0186     try: 
0187         ret = session.query(primary_key).\
0188             filter(primary_key == value).\
0189             count() != 0
0190     except sqlalchemy.exc.OperationalError:
0191         pass
0192 
0193     return ret
0194 
0195 def _regexp(connection, field, regexp):
0196     '''To be used inside filter().
0197     '''
0198 
0199     if connection.is_oracle or connection.is_frontier:
0200         return sqlalchemy.func.regexp_like(field, regexp)
0201     elif connection.is_sqlite:
0202         # Relies on being a SingletonThreadPool
0203         connection.engine.pool.connect().create_function('regexp', 2, lambda data, regexp: re.search(regexp, data) is not None)
0204         return sqlalchemy.func.regexp(field, regexp)
0205     else:
0206         raise Exception('Unimplemented.')
0207 
0208 
0209 def _ilike_or_regexp(args, connection, field, term):
0210     '''To be used inside filter().
0211     '''
0212 
0213     if args.regexp:
0214         return _regexp(connection, field, term)
0215 
0216     return field.ilike('%%%s%%' % term)
0217 
0218 
0219 def _ilike_or_regexp_highlight(args, string, term):
0220     '''Highlights the strings that would have matched _ilike_or_regexp()
0221     in the database, i.e. performs the same search client-side and adds
0222     colors around the matches
0223     '''
0224 
0225     highlight = colors.bold_red + '\\1' + colors.end
0226 
0227     if args.regexp:
0228         return re.sub('(%s)' % term, highlight, string)
0229 
0230     return re.sub('(%s)' % re.escape(term), highlight, string, flags=re.IGNORECASE)
0231 
0232 
0233 def _list_object(obj):
0234     table = []
0235 
0236     for column in obj.__table__.columns.keys():
0237         table.append([column, getattr(obj, column)])
0238 
0239     return table
0240 
0241 
0242 def _output_list_object(args, obj):
0243     output_table(args,
0244         _list_object(obj),
0245         ['Property', 'Value'],
0246     )
0247 
0248 
0249 def _diff_objects(object1, object2):
0250     table = []
0251 
0252     columns = object1.__table__.columns.keys()
0253     columns.remove('name')
0254     for column in columns:
0255         value1 = getattr(object1, column)
0256         value2 = getattr(object2, column)
0257         if value1 != value2:
0258             table.append([column, value1, value2])
0259 
0260     return table
0261 
0262 
0263 def _output_diff_objects(args, object1, object2):
0264     output_table(args,
0265         _diff_objects(object1, object2),
0266         ['Property', '%s Value' % str_db_object(args.db, args.first), '%s Value' % str_db_object(args.destdb, args.second)],
0267     )
0268 
0269 
0270 def _default(value, default_value='-'):
0271     return default_value if value is None else value
0272 
0273 
0274 def _truefalse(value):
0275     return 'Present' if value else '-'
0276 
0277 
0278 def _check_same_object(args):
0279     if (args.destdb is None or args.db == args.destdb) and (args.second is None or args.first == args.second):
0280         raise Exception('The source object and the destination object are the same (i.e. same database and same name): %s' % str_db_object(args.db, args.first))
0281 
0282 def _connect(db, init, read_only, args, as_admin=False):
0283 
0284     logging.debug('Preparing connection to %s ...', db)
0285 
0286     url = conddb.make_url( db, read_only)
0287     pretty_url = url
0288     if url.drivername == 'oracle+frontier':
0289         ws = url.host.rsplit('%2F')
0290         if ws is not None:
0291             pretty_url = 'frontier://%s/%s' %(ws[-1],url.database)
0292     connTo = '%s [%s]' %(db,pretty_url)
0293     logging.info('Connecting to %s', connTo)
0294     logging.debug('DB url: %s',url)
0295     verbose= 0 
0296     if args.verbose is not None:
0297        verbose = args.verbose - 1
0298     connection = conddb.connect(url, args.authPath, verbose, as_admin)
0299 
0300 
0301     if not read_only:
0302         if connection.is_read_only:
0303             raise Exception('Impossible to edit a read-only database.')
0304 
0305         if connection.is_official:
0306             if args.force:
0307                 if not args.yes:
0308                     logging.warning('You are going to edit an official database. If you are not one of the Offline DB experts but have access to the password for other reasons, please stop now.')
0309             else:
0310                 raise Exception('Editing official databases is forbidden. Use the official DropBox to upload conditions. If you need a special intervention on the database, see the contact help: %s' % conddb.contact_help)
0311         # for sqlite we trigger the implicit schema creation
0312         if url.drivername == 'sqlite':
0313             if init:
0314                 connection.init()
0315     if not connection._is_valid:
0316         raise Exception('No valid schema found in the database.')
0317 
0318     return connection
0319 
0320 
0321 def connect(args, init=False, read_only=True, as_admin=False):
0322     args.force = args.force if 'force' in dir(args) else False
0323 
0324     if 'destdb' in args:
0325         if args.destdb is None:
0326             args.destdb = args.db
0327         if args.db == args.destdb:
0328             conn1 = _connect(args.destdb, init, read_only, args)
0329             return conn1, conn1
0330         conn1 = _connect( args.db, init, True, args)
0331         conn2url = conddb.make_url(args.destdb, False)
0332         if conn2url.drivername == 'sqlite' and not os.path.exists(args.destdb): 
0333             init = True
0334         conn2 = _connect(args.destdb, init, False, args)
0335         return conn1, conn2
0336 
0337     return _connect( args.db, init, read_only, args, as_admin)
0338 
0339 def str_db_object(db, name):
0340     return '%s::%s' % (db, name)
0341 
0342 
0343 def str_iov(since, insertion_time):
0344     return '(%s, %s)' % (since, insertion_time)
0345 
0346 
0347 def str_record(record, label):
0348     return '(%s, %s)' % (record, label)
0349 
0350 
0351 class Colors(object):
0352     normal_template = '\033[9%sm'
0353     bold_template = '\033[9%s;1m'
0354 
0355     bold = '\033[1m'
0356 
0357     black   = normal_template % 0
0358     red     = normal_template % 1
0359     green   = normal_template % 2
0360     yellow  = normal_template % 3
0361     blue    = normal_template % 4
0362     magenta = normal_template % 5
0363     cyan    = normal_template % 6
0364     white   = normal_template % 7
0365 
0366     bold_black   = bold_template % 0
0367     bold_red     = bold_template % 1
0368     bold_green   = bold_template % 2
0369     bold_yellow  = bold_template % 3
0370     bold_blue    = bold_template % 4
0371     bold_magenta = bold_template % 5
0372     bold_cyan    = bold_template % 6
0373     bold_white   = bold_template % 7
0374 
0375     end = '\033[0m'
0376 
0377     def __init__(self, args):
0378 
0379         if ( stat.S_ISFIFO(os.fstat(sys.stdout.fileno()).st_mode)  or  # we are running in a pipe
0380              args.nocolors ):
0381             self.noColors()
0382 
0383     def noColors(self):
0384         for member in dir(self):
0385             if not member.startswith('_'):
0386                 setattr(self, member, '')
0387 
0388 colors = None
0389 
0390 
0391 def output(args, string, *parameters, **kwargs):
0392     if args.quiet:
0393         return
0394 
0395     output_file = kwargs.get('output_file', sys.stdout)
0396 
0397     to_print = string + colors.end
0398     if len(parameters)>0:
0399         to_print = string % parameters + colors.end
0400     print(to_print, end=' ', file=output_file)
0401 
0402     if kwargs.get('newline', True):
0403         print(file=output_file)
0404 
0405 
0406 def _strip_colors(args, string):
0407     '''Strips colors (i.e. ANSI sequences).
0408     '''
0409 
0410     if args.nocolors:
0411         return string
0412 
0413     return re.sub('\x1b\\[[;\\d]*[A-Za-z]', '', string)
0414 
0415 
0416 def _ljust_colors(args, string, width, fillchar=' '):
0417     '''Same as string.ljust(width, fillchar) but supporting colors.
0418     '''
0419 
0420     if args.nocolors:
0421         return string.ljust(width, fillchar)
0422 
0423     return string + fillchar * (width - len(_strip_colors(args, string)))
0424 
0425 
0426 def output_table(args, table, headers, filters=None, output_file=None, no_first_header=False, no_max_length=False):
0427     if args.quiet:
0428         return
0429 
0430     if output_file is None:
0431         output_file = sys.stdout
0432 
0433     if filters is None:
0434         filters = [None] * len(headers)
0435 
0436     def max_length_filter(s):
0437         #s = str(s).replace('\n', '\\n')
0438         s = str(s).replace('\n', '  ')
0439         s = str(s).replace(chr(13),'  ')
0440         return '%s...' % s[:conddb.name_length] if ( len(s) > conddb.name_length and not no_max_length ) else s
0441 
0442     new_table = [[] for i in range(len(table))]
0443     for column_index in range(len(headers)):
0444         for row_index, row in enumerate(table):
0445             cell = max_length_filter(row[column_index])
0446             if filters[column_index] is not None:
0447                 cell = filters[column_index](cell)
0448             new_table[row_index].append(cell)
0449 
0450     # Calculate the width of each column
0451     widths = []
0452     for column_index in range(len(headers)):
0453         width = len(headers[column_index])
0454         for row in new_table:
0455             width = max(width, len(_strip_colors(args, row[column_index])))
0456         widths.append(width)
0457 
0458     # Print the table
0459     header_separator = '-'
0460     column_separator = ''
0461 
0462     for column_index, header in enumerate(headers):
0463         output(args, colors.bold + _ljust_colors(args, header, widths[column_index]) + ' ' + column_separator, newline=False, output_file=output_file)
0464     output(args, '', output_file=output_file)
0465 
0466     for column_index in range(len(headers)):
0467         output(args, (' ' if column_index == 0 and no_first_header else header_separator) * widths[column_index] + ' ' + column_separator, newline=False, output_file=output_file)
0468     output(args, '', output_file=output_file)
0469 
0470     for row in new_table:
0471         for column_index, cell in enumerate(row):
0472             output(args, _ljust_colors(args, cell, widths[column_index]) + ' ' + column_separator, newline=False, output_file=output_file)
0473         output(args, '', output_file=output_file)
0474     output(args, '', output_file=output_file)
0475 
0476 
0477 # Commands
0478 def help(args):
0479     output(args, colors.bold + 'CMS Condition DB command-line tool.')
0480     output(args, '')
0481     output(args, colors.bold + 'Usage')
0482     output(args, colors.bold + '-----')
0483     output(args, '')
0484     output(args, '  This tool provides several subcommands, each of those')
0485     output(args, '  serves a well-defined purpose.')
0486     output(args, '')
0487     output(args, '  To see the list of available subcommands and the global options, run:')
0488     output(args, '')
0489     output(args, '    conddb -h')
0490     output(args, '')
0491     output(args, '  To see the help of a subcommand and its options, run:')
0492     output(args, '')
0493     output(args, '    conddb <command> -h.')
0494     output(args, '    e.g. conddb list -h')
0495     output(args, '')
0496     output(args, '')
0497     output(args, colors.bold + 'Exit status')
0498     output(args, colors.bold + '-----------')
0499     output(args, '')
0500     output(args, '  0  =  OK.')
0501     output(args, '  1  =  Runtime error (i.e. any kind of error not related to syntax).')
0502     output(args, '  2  =  Usage/syntax error.')
0503     output(args, '')
0504     output(args, '')
0505     output(args, colors.bold + 'Database parameter (--db)')
0506     output(args, colors.bold + '-------------------------')
0507     output(args, '  ' + '\n  '.join(textwrap.dedent(conddb.database_help).splitlines()))
0508     output(args, '')
0509     output(args, '')
0510     output(args, colors.bold + 'Contact help')
0511     output(args, colors.bold + '------------')
0512     output(args, '')
0513     output(args, '  ' + '\n  '.join(textwrap.wrap(conddb.contact_help)))
0514     output(args, '')
0515 
0516 
0517 def init(args):
0518     connection = connect(args, init=True, read_only=False)
0519 
0520 def status(args):
0521     connection = connect(args)
0522 
0523     valid = connection.is_valid()
0524 
0525     output(args, 'Database Status:')
0526     output(args, '')
0527     output(args, '         Schema:  %s', 'OK (required tables are present)' if valid else 'Wrong (missing required tables)')
0528     if not valid:
0529         return
0530 
0531     session = connection.session()
0532     Tag = session.get_dbtype(conddb.Tag)
0533     Payload = session.get_dbtype(conddb.Payload)
0534     GlobalTag = session.get_dbtype(conddb.GlobalTag)
0535 
0536     tag_count = session.query(Tag.name).count()
0537     payload_count = session.query(Payload.hash).count()
0538     global_tag_count = session.query(GlobalTag.name).count()
0539 
0540     output(args, '         # tags:  %s  %s', tag_count, '(the last %s inserted are shown below)' % args.limit if tag_count > 0 else '')
0541     output(args, '     # payloads:  %s  %s', payload_count, '(the last %s inserted are shown below)' % args.limit if payload_count > 0 else '')
0542     output(args, '  # global tags:  %s  %s', global_tag_count, '(the last %s inserted are shown below)' % args.limit if global_tag_count > 0 else '')
0543     output(args, '')
0544 
0545     if tag_count > 0:
0546         output_table(args,
0547             session.query(Tag.name, Tag.time_type, Tag.object_type, Tag.synchronization, Tag.insertion_time, Tag.description).\
0548                 order_by(Tag.insertion_time.desc()).\
0549                 limit(args.limit).\
0550                 all(),
0551             ['Name', 'Time Type', 'Object Type', 'Synchronization', 'Insertion Time', 'Description'],
0552         )
0553 
0554     if payload_count > 0:
0555         output_table(args,
0556             session.query(Payload.hash, Payload.object_type, Payload.version, Payload.insertion_time).\
0557                 order_by(Payload.insertion_time.desc()).\
0558                 limit(args.limit).\
0559                 all(),
0560             ['Payload', 'Object Type', 'Version', 'Insertion Time'],
0561         )
0562 
0563     if global_tag_count > 0:
0564         output_table(args,
0565             session.query(GlobalTag.name, GlobalTag.release, GlobalTag.insertion_time, GlobalTag.description).\
0566                 order_by(GlobalTag.insertion_time.desc()).\
0567                 limit(args.limit).\
0568                 all(),
0569             ['Global Tag', 'Release', 'Insertion Time', 'Description'],
0570         )
0571 
0572 
0573 def search(args):
0574     connection = connect(args)
0575     session = connection.session()
0576 
0577     max_limit = 100
0578     if args.limit is not None and int(args.limit) > max_limit:
0579         raise Exception('The limit on the number of returned results is capped at %s. Please use a reasonable limit.' % max_limit)
0580 
0581     if connection.is_frontier and ':' in args.string:
0582         raise Exception('Sorry, the colon : character is not allowed in queries to Frontier (yet). Please use another search term or connect to Oracle directly.')
0583 
0584     logging.info('Searching with a limit of %s results per type of object, starting from the latest inserted ones. If you do not find your object, please try to be more specific or increase the limit of returned results.', args.limit)
0585 
0586     if args.nocolors:
0587         _ilike_or_regexp_highlight_filter = None
0588     else:
0589         def _ilike_or_regexp_highlight_filter(cell):
0590             return _ilike_or_regexp_highlight(args, cell, args.string)
0591 
0592     def size(cell):
0593         return str( sys.getsizeof( bytearray(cell,encoding='utf8') ) )
0594 
0595     Tag = session.get_dbtype(conddb.Tag)
0596     output_table(args,
0597         session.query(Tag.name, Tag.time_type, Tag.object_type, Tag.synchronization, Tag.insertion_time, Tag.description).\
0598             filter(
0599                 _ilike_or_regexp(args, connection, Tag.name, args.string)
0600                 | _ilike_or_regexp(args, connection, Tag.object_type, args.string)
0601                 | _ilike_or_regexp(args, connection, Tag.description, args.string)
0602             ).\
0603             order_by(Tag.insertion_time.desc()).\
0604             limit(args.limit).\
0605             all(),
0606         ['Tag', 'Time Type', 'Object Type', 'Synchronization', 'Insertion Time', 'Description'],
0607         filters = [_ilike_or_regexp_highlight_filter, None, _ilike_or_regexp_highlight_filter, None, None, _ilike_or_regexp_highlight_filter],
0608     )
0609 
0610     Payload = session.get_dbtype(conddb.Payload)
0611     output_table(args,
0612         session.query(Payload.hash, Payload.object_type, Payload.version, Payload.insertion_time, Payload.data).\
0613             filter(
0614                 _ilike_or_regexp(args, connection, Payload.hash, args.string)
0615                 | _ilike_or_regexp(args, connection, Payload.object_type, args.string)
0616             ).\
0617             order_by(Payload.insertion_time.desc()).\
0618             limit(args.limit).\
0619             all(),
0620         ['Payload', 'Object Type', 'Version', 'Insertion Time', 'Size'],
0621         filters = [_ilike_or_regexp_highlight_filter, _ilike_or_regexp_highlight_filter, None, None, size],
0622     )
0623 
0624     try:
0625         GlobalTag = session.get_dbtype(conddb.GlobalTag)
0626         output_table(args,
0627             session.query(GlobalTag.name, GlobalTag.release, GlobalTag.insertion_time, GlobalTag.description).\
0628                 filter(
0629                     _ilike_or_regexp(args, connection, GlobalTag.name, args.string)
0630                     | _ilike_or_regexp(args, connection, GlobalTag.release, args.string)
0631                     | _ilike_or_regexp(args, connection, GlobalTag.description, args.string)
0632                 ).\
0633                 order_by(GlobalTag.insertion_time.desc()).\
0634                 limit(args.limit).\
0635                 all(),
0636             ['Global Tag', 'Release', 'Insertion Time', 'Description'],
0637             filters = [_ilike_or_regexp_highlight_filter, _ilike_or_regexp_highlight_filter, None, _ilike_or_regexp_highlight_filter],
0638          )
0639     except sqlalchemy.exc.OperationalError:
0640         sys.stderr.write("No table for GlobalTags found in DB.\n\n")
0641 
0642 
0643 def _inserted_before(_IOV,timestamp):
0644     '''To be used inside filter().
0645     '''
0646 
0647     if timestamp is None:
0648         # XXX: Returning None does not get optimized (skipped) by SQLAlchemy,
0649         #      and returning True does not work in Oracle (generates "and 1"
0650         #      which breaks Oracle but not SQLite). For the moment just use
0651         #      this dummy condition.
0652         return sqlalchemy.literal(True) == sqlalchemy.literal(True)
0653 
0654     return _IOV.insertion_time <= _parse_timestamp(timestamp)
0655 
0656 def _high(n):
0657     return int(n) >> 32
0658 
0659 def _low(n):
0660     return int(n) & 0xffffffff
0661 
0662 def _convertTimeType(since):
0663     #try:
0664     #    return str(datetime.datetime.utcfromtimestamp(_high(since)).replace(microsecond = _low(since)))
0665     #except ValueError:
0666     #    return str(datetime.datetime.utcfromtimestamp(_high(since)).replace(microsecond = _low(since)/1000))
0667     # looks like the format of the lower part of the encoding is broken. Better ignore it...
0668     return str(datetime.datetime.utcfromtimestamp(_high(since)).replace(microsecond = 0))
0669 
0670 def _since_filter(time_type):
0671     '''Returns a filter function for the given time type that returns
0672     a human-readable string of the given since.
0673 
0674     For run (sinces are 32-bit unsigned integers), hash (sinces are strings)
0675     and user (sinces are strings) the filter returns the sinces unchanged.
0676 
0677     The time sinces are 64-bit integers built from a pair (UNIX time,
0678     microseconds), each 32-bit wide. The filter returns a readable timestamp,
0679     including the microseconds.
0680 
0681     The lumi sinces are 64-bit integers built from a pair (run, lumi),
0682     each 32-bit wide. The filter returns a string with both numbers, split.
0683     '''
0684 
0685     if time_type == conddb.TimeType.Time.value:
0686         return lambda since: '%s (%s)' % (_convertTimeType(since), since)
0687 
0688     if time_type == conddb.TimeType.Lumi.value:
0689         return lambda since: '%s : %5s (%s)' % (_high(since), _low(since), since)
0690 
0691     return lambda since: since
0692 
0693 def _get_hlt_fcsr( session, timeType ):
0694     RunInfo = session.get_dbtype(conddb.RunInfo)
0695     lastRun = session.query(sqlalchemy.func.max(RunInfo.run_number)).scalar()
0696     fcsr = lastRun+1
0697     if timeType == 'Time':
0698         raise Exception('Cannot find time for non-existing runs.')
0699     ret = _convert_time( session, timeType, fcsr )
0700     if timeType == 'Run':
0701         logging.info('Found fcsr for hlt: %s' %fcsr)
0702     else:
0703         logging.info('Found fcsr for hlt: %s ( %s as %s type )' %(fcsr,ret,timeType))
0704     return ret
0705 
0706 def _get_prompt_fcsr( session, timeType ):
0707     tier0timeout = 5
0708     tier0maxtime = 60
0709     tier0retries = 3 
0710     tier0retryPeriod = 5
0711     tier0proxy = None
0712     try:
0713         t0DataSvc = Tier0Handler( tier0Url,
0714                                   tier0timeout, tier0maxtime, tier0retries, tier0retryPeriod,
0715                                   tier0proxy, False )
0716         try:
0717             fcsr = t0DataSvc.getFirstSafeRun()
0718         except ValueError as e:
0719             logging.error('ValueError for firstConditionSafeRun from Tier-0 %s ' % (str(e),) )
0720             # We got an answer but it is invalid. So far this usually means
0721             # "None" which is not JSON, when the Tier0 is stopped.
0722             raise Exception('Invalid firstConditionSafeRun from Tier-0')
0723         except Tier0Error:
0724             # Impossible to get anything from the server after retries,
0725             # i.e. unreachable, so no data.
0726             raise Exception('Tier-0 is unreachable, i.e. no firstConditionSafeRun')
0727     except Exception as e:
0728         raise Exception('Error setting up Tier-0 data service: %s' %str(e))
0729     ret = _convert_time( session, timeType, fcsr )
0730     if timeType == 'Run':
0731         logging.info('Found Tier0 fcsr for prompt: %s' %fcsr)
0732     else:
0733         logging.info('Found Tier0 fcsr for prompt: %s ( %s as %s type )' %(fcsr,ret,timeType))
0734     return ret
0735 
0736 def _get_last_frozen_since( session, tagName, fcsr=None ):
0737     IOV = session.get_dbtype(conddb.IOV)
0738     query = session.query(sqlalchemy.func.max(IOV.since)).filter(IOV.tag_name == tagName )
0739     if fcsr is not None:
0740         query = query.filter(IOV.since<fcsr)
0741     res = query.scalar()
0742     logging.debug('Last frozen since in destination tag is %s' %res)
0743     return res
0744 
0745 def _get_maxtime_for_boost_version( session, timeType, boost_version):
0746     BoostRunMap = session.get_dbtype(conddb.BoostRunMap)
0747     q = session.query(BoostRunMap).order_by(BoostRunMap.boost_version.asc())
0748     time = (maxSince,maxSince,maxSince)
0749     for r in q:
0750         r= _rawdict(r)
0751         if boost_version < r['boost_version']:
0752             tlumi = r['run_number']<<32|0x1
0753             time = (r['run_number'],tlumi,r['run_start_time'])
0754             break
0755     if timeType=='Run':
0756         return time[0]
0757     elif timeType=='Lumi':
0758         return time[1]
0759     elif timeType=='Time':
0760         return time[2]
0761     return None
0762 
0763 class run_to_timestamp( object ):
0764     def __init__( self, session ):
0765         self.session = session
0766 
0767     def convertOne( self, runNumber ):
0768         logging.debug('Converting run %s to timestamp...' %runNumber)
0769         RunInfo = self.session.get_dbtype(conddb.RunInfo)
0770         bestRun = self.session.query(RunInfo.run_number,RunInfo.start_time, RunInfo.end_time).filter(RunInfo.run_number >= runNumber).first()
0771         if bestRun is None:
0772             raise Exception("Run %s can't be matched with an existing run in the database." %runNumber)
0773         return bestRun[1],bestRun[2]
0774 
0775     def convertIovs( self, iovs ):
0776         ks = sorted(iovs.keys())
0777         logging.info('Converting %s run-based iovs to time-based' %len(iovs) )         
0778         RunInfo = self.session.get_dbtype(conddb.RunInfo)
0779         maxRun = ks[-1]
0780         upperRun = self.session.query(RunInfo.run_number).filter(RunInfo.run_number >= maxRun ).first()
0781         if upperRun is None:
0782             raise Exception("Upper limit run %s cannot be matched with an existing run in the database." %maxRun)
0783         upperRun = upperRun[0]
0784         runs = self.session.query(RunInfo.run_number,RunInfo.start_time).filter(RunInfo.run_number >= ks[0],RunInfo.run_number <=upperRun).\
0785             order_by(RunInfo.run_number).all()
0786         newiovs = {}
0787         for since in ks:
0788             # take the smallest greater or equal to the target
0789             bestRun = min([run for run in runs if run[0] >= since],key=lambda x: x[0])
0790             bestRunTime = calendar.timegm( bestRun[1].utctimetuple() ) << 32
0791             newiovs[bestRunTime] =  iovs[since]
0792         return newiovs
0793 
0794 class timestamp_to_run( object ):
0795     def __init__( self, session ):
0796         self.session = session
0797 
0798     def convertIovs( self, iovs ):
0799         ks = sorted(iovs.keys())
0800         logging.info('Converting %s time-based iovs to run-based' %len(iovs) )         
0801         RunInfo = self.session.get_dbtype(conddb.RunInfo)
0802         minTs = datetime.datetime.utcfromtimestamp( int(ks[0]) >> 32 )
0803         maxTs = datetime.datetime.utcfromtimestamp( int(ks[len(ks)-1]) >> 32 )
0804         if self.session.query(RunInfo.end_time).filter(RunInfo.end_time > minTs).count() == 0:
0805             raise Exception("Lower IOV sincetimestamp %s cannot be matched with an existing run in the database."%minTs)
0806         firstRun = self.session.query(sqlalchemy.func.min(RunInfo.run_number)).filter(RunInfo.end_time > minTs).one()[0]
0807         if self.session.query(RunInfo.start_time).filter(RunInfo.start_time < maxTs).count() == 0:
0808             raise Exception("Upper iov since timestamp %s cannot be matched with an existing run in the database"%maxTs)
0809         lastRun = self.session.query(sqlalchemy.func.max(RunInfo.run_number)).filter(RunInfo.start_time < maxTs).one()[0]
0810         runs = self.session.query(RunInfo.run_number,RunInfo.start_time,RunInfo.end_time).filter(RunInfo.run_number >= firstRun).filter(RunInfo.run_number <= lastRun ).order_by(RunInfo.run_number).all()
0811         newiovs = {}
0812         prevRun = None
0813         for since in ks:
0814             ts = datetime.datetime.utcfromtimestamp( since >> 32 )
0815             run = None
0816             for r in runs:
0817                 if ts >= r[1] and ts <= r[2]:
0818                     run = r[0]
0819                     break
0820             if run is not None:
0821                 if run == prevRun:
0822                     logging.info('Skipping iov with since %s, because it corresponds to an already mapped run %s' %(since,run))
0823                 else:
0824                     prevRun = run
0825                     newiovs[run] = iovs[since]
0826             else:                                     
0827                 logging.info('Skipping iov with since %s, because no run is matching that time.'%since )
0828         return newiovs
0829 
0830 def _convert_time( session, toTimeType, runNumber ):
0831     if toTimeType == 'Run':
0832         return runNumber
0833     elif toTimeType == 'Lumi':
0834         lumiId = runNumber<<32|0x1
0835         logging.debug('Run %s encoded in lumi id %s' %(runNumber,lumiId))
0836         return lumiId
0837     elif toTimeType == 'Time':
0838         converter = run_to_timestamp( session )
0839         start, stop = converter.convertOne( runNumber )
0840         logging.debug('Run %s coverted in time (start) %s' %(runNumber,start))
0841         timest = calendar.timegm( start.utctimetuple() ) << 32
0842         return timest
0843     else:
0844         raise Exception('Cannot convert to runs to time type %s' %toTimeType)
0845 
0846 class hlt_synchro_policy( object ):
0847     def __init__(self, session1, session2, timeType, destTag):
0848         if timeType in ('Time','Lumi'):
0849             raise Exception("Can't synchronize a tag with time type '%s' to hlt" %timeType) 
0850         session = conddb.getSessionOnMasterDB( session1, session2 )
0851         self.fcsr = _get_hlt_fcsr( session, timeType  ) 
0852         self.lastFrozenSince = _get_last_frozen_since( session2, destTag, self.fcsr ) 
0853 
0854     def validate( self, iovs ):
0855         new_iovs = {}
0856         late_iovs = []
0857         for since in sorted(iovs.keys(),reverse=True):
0858             if since >= self.fcsr:
0859                 new_iovs[since] = iovs[since]
0860             else:
0861                 if self.lastFrozenSince is None or since > self.lastFrozenSince:
0862                     if self.fcsr not in new_iovs.keys():
0863                         new_iovs[self.fcsr] = iovs[since]
0864                     else:
0865                         late_iovs.append(since)
0866                 else:
0867                     late_iovs.append(since)
0868         nlate = len(late_iovs)
0869         if nlate>0:
0870             logging.warning('%s IOV(s) with since less than the hlt FCSR (%s) have been discarded.' %(nlate,self.fcsr) ) 
0871         return True, new_iovs
0872 
0873 class prompt_synchro_policy( object ):
0874     def __init__(self, session1, session2, timeType, destTag):
0875         session = conddb.getSessionOnMasterDB( session1, session2 )
0876         self.fcsr = _get_prompt_fcsr( session, timeType ) 
0877         self.lastFrozenSince = _get_last_frozen_since( session2, destTag, self.fcsr ) 
0878 
0879     def validate( self, iovs ):
0880         new_iovs = {}
0881         late_iovs = []
0882         for since in sorted(iovs.keys(),reverse=True):
0883             if since >= self.fcsr:
0884                 new_iovs[since] = iovs[since]
0885             else:
0886                 if self.lastFrozenSince is None or since > self.lastFrozenSince:
0887                     if self.fcsr not in new_iovs.keys():
0888                         new_iovs[self.fcsr] = iovs[since]
0889                     else:
0890                         late_iovs.append(since)
0891                 else:
0892                     late_iovs.append(since)
0893         nlate = len(late_iovs)
0894         if nlate>0:
0895             logging.warning('%s IOV(s) with since less than the tier0 FCSR (%s) have been discarded.' %(nlate,self.fcsr) ) 
0896         return True, new_iovs
0897 
0898 class pcl_synchro_policy( object ):
0899     def __init__(self, session1, session2, timeType, destTag):
0900         session = conddb.getSessionOnMasterDB( session1, session2 )
0901         self.fcsr = _get_prompt_fcsr(session, timeType) 
0902 
0903     def validate( self, iovs ):
0904         new_iovs = {}
0905         late_iovs = []
0906         ret = True
0907         for since in sorted(iovs.keys(),reverse=True):
0908             if since >= self.fcsr:
0909                 new_iovs[since] = iovs[since]
0910             else:
0911                 late_iovs.append(since)
0912         nlate = len(late_iovs)
0913         if nlate>0:
0914             logging.error('%s IOV(s) with since less than the tier0 FCSR (%s) have been discarded.' %(nlate,self.fcsr) ) 
0915             ret = False
0916         return ret, new_iovs
0917 
0918 class mc_synchro_policy( object ):
0919     def __init__(self, session1, session2, timeType, destTag):
0920         self.lastFrozenSince = _get_last_frozen_since( session2, destTag ) 
0921 
0922     def validate( self, iovs ):
0923         new_iovs = {}
0924         niovs = len(iovs)
0925         ret = True
0926         if self.lastFrozenSince is None:
0927             if 1 not in iovs.keys():
0928                 raise Exception( 'Could not find an iov with since 1 in the source tag.')
0929             new_iovs[1] = iovs[1]
0930             if niovs>1:
0931                 logging.warning('%s IOV(s) with since greater than the expected mc since=1 will be discarded.' %(niovs-1))
0932         else:
0933             if niovs>0:
0934                 ret = False
0935                 logging.warning('The destination tag is frozen - no iov can be added.')
0936         return ret, new_iovs
0937 
0938 class runmc_synchro_policy( object ):
0939     def __init__(self, session1, session2, timeType, destTag):
0940         self.lastFrozenSince = _get_last_frozen_since( session2, destTag ) 
0941 
0942     def validate( self, iovs ):
0943         new_iovs = {}
0944         niovs = len(iovs)
0945         ret = True
0946         if self.lastFrozenSince is not None:
0947             if niovs>0:
0948                 ret = False
0949                 logging.warning('The destination tag is frozen - no iov can be added.')
0950         return ret, new_iovs
0951 
0952 class offline_synchro_policy( object ):
0953     def __init__(self, session1, session2, timeType, destTag):
0954         self.lastFrozenSince = _get_last_frozen_since( session2, destTag ) 
0955 
0956     def validate( self, iovs ):
0957         new_iovs = {}
0958         late_iovs = []
0959         for since in sorted(iovs.keys(),reverse=True):
0960             if self.lastFrozenSince is None or since > self.lastFrozenSince:
0961                 new_iovs[since] = iovs[since]
0962             else:
0963                 late_iovs.append(since)
0964         nlate = len(late_iovs)
0965         ret = True
0966         if nlate>0:
0967             ret = False
0968             logging.warning('%s IOV(s) with since less than the last since in the destination tag (%s) have been discarded.' %(nlate,self.lastFrozenSince) ) 
0969         return ret, new_iovs
0970 
0971 class no_synchro_policy( object ):
0972     def __init__(self, session1=None, session2=None, timeType=None, destTag=None):
0973         pass
0974 
0975     def validate( self, iovs ):
0976         return True, iovs
0977 
0978 _synchro_map = { 'hlt': hlt_synchro_policy, 'express': hlt_synchro_policy, 'prompt': prompt_synchro_policy, 'pcl':pcl_synchro_policy, 
0979                  'mc': mc_synchro_policy, 'offline': offline_synchro_policy, 'any':no_synchro_policy, 'validation':no_synchro_policy,
0980                  'runmc': runmc_synchro_policy }
0981 def _get_synchro_policy( synchronization ):
0982     if synchronization not in _synchro_map.keys():
0983         raise Exception('Cannot handle synchronization %s' %synchronization)
0984     return _synchro_map[synchronization]
0985 
0986 def mc_validate( session, tag ):
0987     IOV = session.get_dbtype(conddb.IOV)
0988     niovs = session.query(IOV).filter(IOV.tag_name==tag).count()
0989     if niovs>1:
0990         logging.error('Validation of tag content for synchronization "mc" failed: more than one IOV found.')
0991         return False
0992     if niovs>0:
0993         r = int(session.query(IOV.since).filter(IOV.tag_name==tag).one()[0])
0994         if r!=1:
0995             logging.error('Validation of tag content for synchronization "mc" failed: IOV since=%s (expected=1)' %r)
0996             return False
0997     return True
0998 
0999 def listTags_(args):
1000     connection = connect(args)
1001     session = connection.session()
1002     Tag = session.get_dbtype(conddb.Tag)
1003     output_table(args,
1004         session.query(Tag.name, Tag.time_type, Tag.object_type, Tag.synchronization, Tag.end_of_validity, Tag.insertion_time, Tag.description ).\
1005             order_by(Tag.insertion_time, Tag.name).\
1006             all(),
1007         ['Name', 'TimeType', 'ObjectType', 'Synchronisation', 'EndOfValidity', 'Insertion_time', 'Description'],
1008     )
1009     return 0
1010 
1011 
1012 
1013 def listParentTags_(args):
1014     connection = connect(args)
1015     session = connection.session()
1016 
1017     IOV = session.get_dbtype(conddb.IOV)
1018     Tag = session.get_dbtype(conddb.Tag)
1019 
1020     query_result = session.query(IOV.tag_name).filter(IOV.payload_hash == args.hash_name).all()
1021     tag_names = map(lambda entry : entry[0], query_result)
1022 
1023     listOfOccur=[]
1024 
1025     for tag in tag_names:
1026         synchro = session.query(Tag.synchronization).filter(Tag.name == tag).all()
1027         iovs = session.query(IOV.since).filter(IOV.tag_name == tag).filter(IOV.payload_hash == args.hash_name).all()
1028         times = session.query(IOV.insertion_time).filter(IOV.tag_name == tag).filter(IOV.payload_hash == args.hash_name).all()
1029 
1030         synchronization = [item[0] for item in synchro]
1031         listOfIOVs  = [item[0] for item in iovs]
1032         listOfTimes = [str(item[0]) for item in times]
1033 
1034         for iEntry in range(0,len(listOfIOVs)):
1035             listOfOccur.append({"tag": tag,
1036                                 "synchronization" : synchronization[0],
1037                                 "since" : listOfIOVs[iEntry] ,
1038                                 "insertion_time" : listOfTimes[iEntry] })
1039 
1040     t = PrettyTable(['hash', 'since','tag','synch','insertion time'])
1041     for element in listOfOccur:
1042         t.add_row([args.hash_name,element['since'],element['tag'],element['synchronization'],element['insertion_time']])
1043 
1044     print(t)
1045 
1046 
1047 
1048 
1049 def diffGlobalTagsAtRun_(args):
1050     connection = connect(args)
1051     session = connection.session()
1052 
1053     IOV     = session.get_dbtype(conddb.IOV)
1054     TAG     = session.get_dbtype(conddb.Tag)
1055     GT      = session.get_dbtype(conddb.GlobalTag)
1056     GTMAP   = session.get_dbtype(conddb.GlobalTagMap)
1057     RUNINFO = session.get_dbtype(conddb.RunInfo)
1058 
1059     ####################################
1060     # Get the time info for the test run
1061     ####################################
1062 
1063     if(not args.lastIOV):
1064 
1065         if(int(args.testRunNumber)<0):
1066             raise Exception("Run %s (default) can't be matched with an existing run in the database. \n\t\t Please specify a run with the option --run." % args.testRunNumber)
1067 
1068         if(int(args.testRunNumber)!=1):
1069             bestRun = session.query(RUNINFO.run_number, RUNINFO.start_time, RUNINFO.end_time).filter(RUNINFO.run_number == int(args.testRunNumber)).first()
1070             if bestRun is None:
1071                 raise Exception("Run %s can't be matched with an existing run in the database." % args.testRunNumber)
1072 
1073             print("Run",args.testRunNumber," |Start time",bestRun[1]," |End time",bestRun[2],".")
1074 
1075     ####################################
1076     # Get the Global Tag snapshots
1077     ####################################
1078 
1079     refSnap = session.query(GT.snapshot_time).\
1080         filter(GT.name == args.refGT).all()[0][0]
1081 
1082     tarSnap = session.query(GT.snapshot_time).\
1083         filter(GT.name == args.tarGT).all()[0][0]
1084 
1085     print("reference GT (",args.refGT ,") snapshot: ",refSnap," | target GT (",args.tarGT,") snapshot",tarSnap)
1086 
1087     ####################################
1088     # Get the Global Tag maps
1089     ####################################
1090 
1091     GTMap_ref = session.query(GTMAP.record, GTMAP.label, GTMAP.tag_name).\
1092         filter(GTMAP.global_tag_name == args.refGT).\
1093         order_by(GTMAP.record, GTMAP.label).\
1094         all()
1095 
1096     GTMap_tar = session.query(GTMAP.record, GTMAP.label, GTMAP.tag_name).\
1097         filter(GTMAP.global_tag_name == args.tarGT).\
1098         order_by(GTMAP.record, GTMAP.label).\
1099         all()
1100 
1101     text_file = open(("diff_%s_vs_%s.twiki") % (args.refGT,args.tarGT), "w")
1102 
1103     differentTags = {}
1104 
1105     for element in GTMap_ref:
1106         RefRecord = element[0]
1107         RefLabel  = element[1]
1108         RefTag    = element[2]
1109 
1110         for element2 in GTMap_tar:
1111             if (RefRecord == element2[0] and RefLabel==element2[1]):
1112                 if RefTag != element2[2]:
1113                     differentTags[(RefRecord,RefLabel)]=(RefTag,element2[2])
1114 
1115     ####################################
1116     ## Search for Records,Label not-found in the other list
1117     ####################################
1118 
1119     temp1 = [item for item in GTMap_ref if (item[0],item[1]) not in list(zip(list(zip(*GTMap_tar))[0],list(zip(*GTMap_tar))[1]))]
1120     for elem in temp1:
1121         differentTags[(elem[0],elem[1])]=(elem[2],"")
1122 
1123     temp2 = [item for item in GTMap_tar if (item[0],item[1]) not in list(zip(list(zip(*GTMap_ref))[0],list(zip(*GTMap_ref))[1]))]
1124     for elem in temp2:
1125         differentTags[(elem[0],elem[1])]=("",elem[2])
1126 
1127     text_file.write("| *Record* | *"+args.refGT+"* | *"+args.tarGT+"* | Remarks | \n")
1128 
1129     t = PrettyTable()
1130 
1131     if(args.isVerbose):
1132         t.field_names = ['/','',args.refGT,args.tarGT,refSnap,tarSnap]
1133     else:
1134         t.field_names = ['/','',args.refGT,args.tarGT]
1135 
1136     t.hrules=1
1137 
1138     if(args.isVerbose):
1139         t.add_row(['Record','label','Reference Tag','Target Tag','hash1:time1:since1','hash2:time2:since2'])
1140     else:
1141         t.add_row(['Record','label','Reference Tag','Target Tag'])
1142 
1143     isDifferent=False
1144 
1145     ####################################
1146     # Loop on the difference
1147     ####################################
1148 
1149     for Rcd in sorted(differentTags):
1150 
1151         # empty lists at the beginning
1152         refTagIOVs=[]
1153         tarTagIOVs=[]
1154 
1155         if( differentTags[Rcd][0]!=""):
1156             refTagIOVs = session.query(IOV.since,IOV.payload_hash,IOV.insertion_time).filter(IOV.tag_name == differentTags[Rcd][0]).all()
1157             refTagInfo = session.query(TAG.synchronization,TAG.time_type).filter(TAG.name == differentTags[Rcd][0]).all()[0]
1158         if( differentTags[Rcd][1]!=""):
1159             tarTagIOVs = session.query(IOV.since,IOV.payload_hash,IOV.insertion_time).filter(IOV.tag_name == differentTags[Rcd][1]).all()
1160             tarTagInfo = session.query(TAG.synchronization,TAG.time_type).filter(TAG.name == differentTags[Rcd][1]).all()[0]
1161 
1162         if(differentTags[Rcd][0]!="" and differentTags[Rcd][1]!=""):
1163             if(tarTagInfo[1] != refTagInfo[1]):
1164                 print(colors.bold_red+" *** Warning *** found mismatched time type for",Rcd,"entry. \n"+differentTags[Rcd][0],"has time type",refTagInfo[1],"while",differentTags[Rcd][1],"has time type",tarTagInfo[1]+". These need to be checked by hand. \n\n"+ colors.end)
1165 
1166         if(args.lastIOV):
1167 
1168             if(sorted(differentTags).index(Rcd)==0):
1169                 print("\n")
1170                 print(33 * "=")
1171                 print("|| COMPARING ONLY THE LAST IOV ||")
1172                 print(33 * "=")
1173                 print("\n")
1174 
1175             lastSinceRef=-1
1176             lastSinceTar=-1
1177 
1178             hash_lastRefTagIOV = ""
1179             time_lastRefTagIOV = ""
1180 
1181             hash_lastTarTagIOV = ""
1182             time_lastTarTagIOV = ""
1183 
1184             for i in refTagIOVs:
1185                 if (i[0]>lastSinceRef):
1186                     lastSinceRef = i[0]
1187                     hash_lastRefTagIOV = i[1]
1188                     time_lastRefTagIOV = str(i[2])
1189 
1190             for j in tarTagIOVs:
1191                 if (j[0]>lastSinceTar):
1192                     lastSinceTar = j[0]
1193                     hash_lastTarTagIOV = j[1]
1194                     time_lastTarTagIOV = str(j[2])
1195 
1196             if(hash_lastRefTagIOV!=hash_lastTarTagIOV):
1197                 isDifferent=True
1198                 text_file.write("| ="+Rcd[0]+"= ("+Rcd[1]+") | =="+differentTags[Rcd][0]+"==  | =="+differentTags[Rcd][1]+"== | | \n")
1199                 text_file.write("|^|"+hash_lastRefTagIOV+" <br> ("+time_lastRefTagIOV+") "+ str(lastSinceRef) +" | "+hash_lastTarTagIOV+" <br> ("+time_lastTarTagIOV+") " + str(lastSinceTar)+" | ^| \n")
1200 
1201                 if(args.isVerbose):
1202                     t.add_row([Rcd[0],Rcd[1],differentTags[Rcd][0],differentTags[Rcd][1],str(hash_lastRefTagIOV)+"\n"+str(time_lastRefTagIOV)+"\n"+str(lastSinceRef),str(hash_lastTarTagIOV)+"\n"+str(time_lastTarTagIOV)+"\n"+str(lastSinceTar)])
1203                 else:
1204                     t.add_row([Rcd[0],Rcd[1],differentTags[Rcd][0]+"\n"+str(hash_lastRefTagIOV),differentTags[Rcd][1]+"\n"+str(hash_lastTarTagIOV)])
1205 
1206         else:
1207 
1208             ### reset all defaults
1209 
1210             theGoodRefIOV=-1
1211             theGoodTarIOV=-1
1212             sinceRefTagIOV=0
1213             sinceTarTagIOV=0
1214 
1215             RefIOVtime = datetime.datetime(1970, 1, 1, 0, 0, 0)
1216             TarIOVtime = datetime.datetime(1970, 1, 1, 0, 0, 0)
1217 
1218             theRefPayload=""
1219             theTarPayload=""
1220             theRefTime=""
1221             theTarTime=""
1222 
1223             ### loop on the reference IOV list
1224             for refIOV in refTagIOVs:
1225 
1226                 ## logic for retrieving the the last payload active on a given IOV
1227                 ## - the new IOV since is less than the run under consideration
1228                 ## - the payload insertion time is lower than the GT snapshot
1229                 ## - finall either:
1230                 ##   - the new IOV since is larger then the last saved
1231                 ##   - the new IOV since is equal to the last saved but it has a more recent insertion time
1232 
1233                 if ( (refIOV[0] <= int(args.testRunNumber)) and (refIOV[0]>sinceRefTagIOV) or ((refIOV[0]==sinceRefTagIOV) and (refIOV[2]>RefIOVtime)) and (refIOV[2]<=refSnap) ):
1234                     sinceRefTagIOV = refIOV[0]
1235                     RefIOVtime = refIOV[2]
1236                     theGoodRefIOV=sinceRefTagIOV
1237                     theRefPayload=refIOV[1]
1238                     theRefTime=str(refIOV[2])
1239 
1240             ### loop on the target IOV list
1241             for tarIOV in tarTagIOVs:
1242                 if ( (tarIOV[0] <= int(args.testRunNumber)) and (tarIOV[0]>sinceTarTagIOV) or ((tarIOV[0]==sinceTarTagIOV) and (tarIOV[2]>=TarIOVtime)) and (tarIOV[2]<=tarSnap) ):
1243                     sinceTarTagIOV = tarIOV[0]
1244                     tarIOVtime = tarIOV[2]
1245                     theGoodTarIOV=sinceTarTagIOV
1246                     theTarPayload=tarIOV[1]
1247                     theTarTime=str(tarIOV[2])
1248 
1249             #print Rcd[0],theRefPayload,theTarPayload
1250 
1251             if(theRefPayload!=theTarPayload):
1252                 isDifferent=True
1253                 text_file.write("| ="+Rcd[0]+"= ("+Rcd[1]+") | =="+differentTags[Rcd][0]+"==  | =="+differentTags[Rcd][1]+"== |\n")
1254                 text_file.write("|^|"+theRefPayload+" ("+theRefTime+") | "+theTarPayload+" ("+theTarTime+") |\n")
1255 
1256                 ### determinte if it is to be shown
1257 
1258                 isMatched=False
1259                 tokens=args.stringToMatch.split(",")
1260                 decisions = [bool(Rcd[0].find(x)!=-1) for x in tokens]
1261                 for decision in decisions:
1262                     isMatched = (isMatched or decision)
1263 
1264                 if(args.isVerbose):
1265                     if (args.stringToMatch=="" or isMatched):
1266                         t.add_row([Rcd[0],Rcd[1],differentTags[Rcd][0],differentTags[Rcd][1],str(theRefPayload)+"\n"+str(theRefTime)+"\n"+str(theGoodRefIOV),str(theTarPayload)+"\n"+str(theTarTime)+"\n"+str(theGoodTarIOV)])
1267                 else:
1268                     if (args.stringToMatch=="" or isMatched):
1269                         t.add_row([Rcd[0],Rcd[1],differentTags[Rcd][0]+"\n"+str(theRefPayload),differentTags[Rcd][1]+"\n"+str(theTarPayload)])
1270 
1271     if(not isDifferent):
1272         if(args.isVerbose):
1273             t.add_row(["None","None","None","None","None","None"])
1274         else:
1275             t.add_row(["None","None","None"])
1276     print(t)
1277 
1278 
1279 
1280 def listGTsForTag_(args):
1281     connection = connect(args)
1282     session = connection.session()
1283     GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
1284     output_table(args,
1285         session.query(GlobalTagMap.global_tag_name, GlobalTagMap.tag_name, GlobalTagMap.record, GlobalTagMap.label).\
1286             filter(GlobalTagMap.tag_name == args.name).\
1287             order_by(GlobalTagMap.global_tag_name).\
1288             all(),
1289         ['GT_name', 'Tag_name', 'record', 'label'],
1290     )
1291 
1292 
1293 def listGTs_(args):
1294     connection = connect(args)
1295     session = connection.session()
1296     GlobalTag = session.get_dbtype(conddb.GlobalTag)
1297     output_table(args,
1298         session.query(GlobalTag.name, GlobalTag.description, GlobalTag.release, GlobalTag.snapshot_time, GlobalTag.insertion_time).\
1299             order_by(GlobalTag.insertion_time, GlobalTag.name).\
1300             all(),
1301         ['GT_name', 'Description', 'Release', 'Snapshot_time', 'Insertion_time'],
1302     )
1303 
1304 
1305 def listRuns_(args):
1306     connection = connect(args)
1307     session = connection.session()
1308     RunInfo = session.get_dbtype(conddb.RunInfo)
1309     fromRun = None
1310     toRun = None
1311     match = args.match
1312     limit = None
1313     if args.last:
1314         match = session.query(sqlalchemy.func.max(RunInfo.run_number)).one()[0]
1315     if match is None:
1316         fromTime = getattr(args, 'from')
1317         if fromTime is not None:
1318             fromTime = str(fromTime)
1319             fromRun = fromTime
1320             start = None
1321             if fromTime.isnumeric():
1322                 if len(fromTime) >=  sizeOfTimestamp:
1323                     start = datetime.datetime.utcfromtimestamp( int(fromTime) >> 32 )
1324             else:
1325                 start = datetime.datetime.strptime(fromTime,'%Y-%m-%d %H:%M:%S')
1326             if start is not None:
1327                 fromRun = session.query(sqlalchemy.func.min(RunInfo.run_number)).filter(RunInfo.end_time > start).one()[0]
1328                 logging.debug('run lower boundary: %s (%s)'%(fromRun, start.strftime('%Y-%m-%d %H:%M:%S')))
1329         toTime = getattr(args, 'to')
1330         if toTime is not None:   
1331             toTime = str(toTime)
1332             toRun = toTime
1333             end = None
1334             if toTime.isnumeric():
1335                 if len(toTime) >=  sizeOfTimestamp:
1336                     end = datetime.datetime.utcfromtimestamp( int(toTime) >> 32 )
1337             else:
1338                 end=datetime.datetime.strptime(toTime,'%Y-%m-%d %H:%M:%S')
1339             if end is not None:
1340                 toRun = session.query(sqlalchemy.func.max(RunInfo.run_number)).filter(RunInfo.start_time < end).one()[0]
1341                 logging.debug('run upper boundary: %s (%s)' %(toRun,end.strftime('%Y-%m-%d %H:%M:%S')))
1342     q = session.query(RunInfo.run_number,RunInfo.start_time,RunInfo.end_time)
1343     sel = False
1344     if match is not None:
1345         q = q.filter(RunInfo.run_number == match)
1346     else:
1347         if fromRun is not None:
1348             q = q.filter(RunInfo.run_number >= fromRun)
1349             sel = True
1350         if toRun is not None:
1351             q = q.filter(RunInfo.run_number <= toRun)
1352             sel = True
1353         if not sel and args.limit is not None:
1354             limit = args.limit
1355             q = q.order_by(RunInfo.run_number.desc())
1356             q = q.limit(limit)
1357             q = q.from_self()
1358     table = q.order_by(RunInfo.run_number).all()
1359     if len(table)==0:
1360         sel = ''
1361         msg = 'No Run found'
1362         if args.match is not None:
1363             sel = 'matching Run=%s' %args.match
1364         else:
1365             fromTime = getattr(args, 'from')
1366             if fromTime is not None:
1367                 sel = "with from='%s'"%fromTime
1368             if args.to is not None:
1369                 if len(sel):
1370                     sel = sel +' and '
1371                 else:
1372                     sel = 'with '
1373                 sel = sel + "to='%s'"%args.to
1374         msg = msg + ' %s'%sel
1375         print(msg)
1376         return 1
1377     else:
1378         if limit is not None:
1379             logging.info('Run entries limited to %s'%limit)
1380         else:
1381             logging.info('Found %s Run entries.'%len(table))
1382     for i in range(len(table)):
1383         table[i] = table[i] + ( (calendar.timegm( table[i][1].utctimetuple() ) << 32), (calendar.timegm( table[i][2].utctimetuple() ) << 32) )
1384     # check if last run is ongoing
1385     last_start = table[len(table)-1][1]
1386     last_end = table[len(table)-1][2]
1387     if last_start==last_end:
1388         table[len(table)-1]=(table[len(table)-1][0],table[len(table)-1][1],'on going...',table[len(table)-1][3],'-')
1389     output_table(args, table, ['Run_number','Start_time','End_time','Start_IOV','End_IOV'],
1390     )
1391     return 0
1392 
1393 def showFcsr_(args):
1394     connection = connect(args)
1395     session = connection.session()
1396     session = conddb.getSessionOnMasterDB( session, session )
1397     run_hlt_fcsr = _get_hlt_fcsr( session, 'Run' ) 
1398     lumi_hlt_fcsr = _convert_time( session, 'Lumi', run_hlt_fcsr )
1399     run_pcl_fcsr = _get_prompt_fcsr( session, 'Run' )
1400     lumi_pcl_fcsr = _convert_time( session, 'Lumi', run_pcl_fcsr )
1401     time_converter = run_to_timestamp( session )
1402     start, stop = time_converter.convertOne( run_pcl_fcsr )
1403     time_pcl_fcsr = calendar.timegm( start.utctimetuple() ) << 32
1404     output(args,'FCSR for HLT (last Run started +1): %s ( Lumi: %s, Time: undefined )' %(run_hlt_fcsr,lumi_hlt_fcsr))
1405     output(args,'FCSR for PCL (from Tier0 service) : %s ( Lumi: %s, Time: %s [%s])' %(run_pcl_fcsr,lumi_pcl_fcsr,time_pcl_fcsr,start), newLine=False)
1406 
1407 def list_(args):
1408     connection = connect(args)
1409     session = connection.session()
1410     Tag = session.get_dbtype(conddb.Tag)
1411     IOV = session.get_dbtype(conddb.IOV)
1412     Payload = session.get_dbtype(conddb.Payload)
1413     GlobalTag = session.get_dbtype(conddb.GlobalTag)
1414     GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
1415     for name in args.name:
1416         is_tag = _exists(session, Tag.name, name)
1417         if is_tag:
1418             if args.long:
1419                 _output_list_object(args, session.query(Tag).get(name))
1420 
1421             logging.info('Listing with a limit of %s IOVs, starting from the highest since. If you need to see more, please increase the limit of returned results.', args.limit)
1422 
1423             time_type = session.query(Tag.time_type).\
1424                 filter(Tag.name == name).\
1425                 scalar()
1426 
1427             sinceLabel = 'Since: Run '
1428             if time_type == conddb.TimeType.Time.value:
1429                 sinceLabel = 'Since: UTC          (timestamp)'
1430             if time_type == conddb.TimeType.Lumi.value:
1431                 sinceLabel = '  Run  : Lumi  (rawSince)'
1432 
1433             output_table(args,
1434                 session.query(IOV.since, IOV.insertion_time, IOV.payload_hash, Payload.object_type).\
1435                     join(IOV.payload).\
1436                     filter(
1437                         IOV.tag_name == name,
1438                         _inserted_before(IOV,args.snapshot),
1439                     ).\
1440                     order_by(IOV.since.desc(), IOV.insertion_time.desc()).\
1441                     limit(args.limit).\
1442                     from_self().\
1443                     order_by(IOV.since, IOV.insertion_time).\
1444                     all(),
1445                 [sinceLabel, 'Insertion Time', 'Payload', 'Object Type'],
1446                 filters = [_since_filter(time_type), None, None, None],
1447             )
1448 
1449         try:
1450             is_global_tag = _exists(session, GlobalTag.name, name)
1451             if is_global_tag:
1452                 if args.long:
1453                     _output_list_object(args, session.query(GlobalTag).get(name))
1454 
1455                 output_table(args,
1456                     session.query(GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name).\
1457                         filter(GlobalTagMap.global_tag_name == name).\
1458                         order_by(GlobalTagMap.record, GlobalTagMap.label).\
1459                         all(),
1460                     ['Record', 'Label', 'Tag'],
1461                 )
1462         except sqlalchemy.exc.OperationalError:
1463             sys.stderr.write("No table for GlobalTags found in DB.\n\n")
1464 
1465 
1466         if not is_tag and not is_global_tag:
1467             raise Exception('There is no tag or global tag named %s in the database.' % name)
1468 
1469 
1470 def _diff_tags(args, session1, session2, first, second):
1471     Tag1 = session1.get_dbtype(conddb.Tag)
1472     Tag2 = session2.get_dbtype(conddb.Tag)
1473     IOV1 = session1.get_dbtype(conddb.IOV)
1474     IOV2 = session2.get_dbtype(conddb.IOV)
1475     tag1 = session1.query(Tag1).get(first)
1476     tag2 = session2.query(Tag2).get(second)
1477 
1478     if args.long:
1479         _output_diff_objects(args, tag1, tag2)
1480 
1481     if tag1.time_type != tag2.time_type:
1482         output(args, 'Skipping diff of IOVs, since the time_type is different.')
1483     else:
1484         iovs1 = dict(session1.query(IOV1.since, IOV1.payload_hash).\
1485             filter(
1486                 IOV1.tag_name == first,
1487                 _inserted_before(IOV1,args.snapshot),
1488             ).\
1489             all()
1490         )
1491         iovs2 = dict(session2.query(IOV2.since, IOV2.payload_hash).\
1492             filter(
1493                 IOV2.tag_name == second,
1494                 _inserted_before(IOV2,args.snapshot),
1495             ).\
1496             all()
1497         )
1498 
1499         table = []
1500         iovs = [(x, iovs1.get(x), iovs2.get(x)) for x in sorted(set(iovs1) | set(iovs2))]
1501 
1502         # Since 1 != 2 and both are != than any payload,
1503         # this will trigger printing the last line [last_since, Infinity)
1504         iovs.append(('Infinity', 1, 2))
1505 
1506         prev_since, prev_payload1, prev_payload2, prev_equal = None, None, None, None
1507         for since, payload1, payload2 in iovs:
1508             if prev_since is None:
1509                 # First time
1510                 prev_equal = payload1 == payload2
1511                 prev_since = since
1512                 prev_payload1, prev_payload2 = payload1, payload2
1513                 continue
1514 
1515             # If None, the payloads are the previous one
1516             if payload1 is None:
1517                 payload1 = prev_payload1
1518             if payload2 is None:
1519                 payload2 = prev_payload2
1520 
1521             if prev_equal:
1522                 # If the previous payloads were equal and these ones
1523                 # were too, we do not print anything (and we do not update
1524                 # the prev_since). If the previous were equal but these
1525                 # are different, the equal-range has finished: we print it.
1526                 if payload1 != payload2:
1527                     if not args.short:
1528                         table.append(('[%s, %s)' % (prev_since, since), '=', '='))
1529                     prev_since = since
1530             else:
1531                 # If the previous payloads were not equal, we print them,
1532                 # since we print all the different ranges (even if they are
1533                 # contiguous). However, we skip in the case these payloads
1534                 # and both equal to the previous ones (and we do not
1535                 # update the prev_since). Should not be common, since
1536                 # there is no point on having contiguous IOVs with the same
1537                 # payloads in a tag.
1538                 if payload1 != prev_payload1 or payload2 != prev_payload2:
1539                     table.append(('[%s, %s)' % (prev_since, since), _default(prev_payload1), _default(prev_payload2)))
1540                     prev_since = since
1541 
1542             prev_equal = payload1 == payload2
1543             prev_payload1, prev_payload2 = payload1, payload2
1544 
1545         output_table(args,
1546             table,
1547             ['Range', '%s Payload' % str_db_object(args.db, first), '%s Payload' % str_db_object(args.destdb, second)],
1548         )
1549 
1550 
1551 def diff(args):
1552     _check_same_object(args)
1553 
1554     connection1, connection2 = connect(args)
1555     session1, session2 = connection1.session(), connection2.session()
1556 
1557     if args.second is None:
1558         args.second = args.first
1559 
1560     Tag1 = session1.get_dbtype(conddb.Tag)
1561     Tag2 = session2.get_dbtype(conddb.Tag)
1562     is_tag1 = _exists(session1, Tag1.name, args.first)
1563     is_tag2 = _exists(session2, Tag2.name, args.second)
1564     if is_tag1 and is_tag2:
1565         _diff_tags(args, session1, session2, args.first, args.second)
1566 
1567     GlobalTag1 = session1.get_dbtype(conddb.GlobalTag)
1568     GlobalTag2 = session2.get_dbtype(conddb.GlobalTag)
1569     is_global_tag1 = _exists(session1, GlobalTag1.name, args.first)
1570     is_global_tag2 = _exists(session2, GlobalTag2.name, args.second)
1571     if is_global_tag1 and is_global_tag2:
1572         global_tag1 = session1.query(GlobalTag1).get(args.first)
1573         global_tag2 = session2.query(GlobalTag2).get(args.second)
1574 
1575         if args.long:
1576             _output_diff_objects(args, global_tag1, global_tag2)
1577 
1578         GlobalTagMap1 = session1.get_dbtype(conddb.GlobalTagMap)
1579         GlobalTagMap2 = session2.get_dbtype(conddb.GlobalTagMap)
1580         map1 = dict([(tuple(x[:2]), x[2]) for x in session1.query(GlobalTagMap1.record, GlobalTagMap1.label, GlobalTagMap1.tag_name).\
1581             filter(GlobalTagMap1.global_tag_name == args.first)
1582         ])
1583         map2 = dict([(tuple(x[:2]), x[2]) for x in session2.query(GlobalTagMap2.record, GlobalTagMap2.label, GlobalTagMap2.tag_name).\
1584             filter(GlobalTagMap2.global_tag_name == args.second)
1585         ])
1586 
1587         records = sorted(set(map1) | set(map2))
1588 
1589         table = []
1590         diff_tags = set([])
1591         for record in records:
1592             value1 = map1.get(record)
1593             value2 = map2.get(record)
1594 
1595             if value1 is None or value2 is None or value1 != value2:
1596                 table.append((record[0], record[1], _default(value1), _default(value2)))
1597                 diff_tags.add((value1, value2))
1598 
1599         output_table(args,
1600             table,
1601             ['Record', 'Label', '%s Tag' % str_db_object(args.db, args.first), '%s Tag' % str_db_object(args.destdb, args.second)],
1602         )
1603 
1604         if args.deep:
1605             for tag1, tag2 in diff_tags:
1606                 _diff_tags(args, session1, session2, tag1, tag2)
1607 
1608     if not (is_tag1 and is_tag2) and not (is_global_tag1 and is_global_tag2):
1609         raise Exception('There are no tag or global tag pairs named %s and %s in the database(s).' % (args.first, args.second))
1610 
1611 def convertRunToTimes( session, fromRun, toRun ):
1612 
1613     fromTime = None
1614     fromLumi = None
1615     toTime = None
1616     toLumi = None
1617     # the time we get may be a bit delayed (7-10 sec according to Salvatore) 
1618     if not fromRun is None:
1619         if fromRun == 1:
1620             fromTime = 1
1621         else:
1622             conv = run_to_timestamp( session )
1623             startTime1, stopTime1 = conv.convertOne( fromRun )
1624             fromTime =  time.mktime( startTime1.timetuple() )-15.
1625         fromLumi = fromRun<<32|0x1
1626     if not toRun is None:
1627         if toRun == 1:
1628             toTime = 1
1629         else:
1630             conv = run_to_timestamp( session )
1631             startTime2, stopTime2 = conv.convertOne( toRun )
1632             toTime = time.mktime( stopTime2.timetuple() )+15.
1633         toLumi = toRun<<32|0x1
1634 
1635     timeMap = { 'from' : {
1636                             'hash' : None,
1637                             'run'  : fromRun,
1638                             'time' : fromTime, # the time we get may be a bit delayed (7-10 sec according to Salvatore)
1639                             'lumi' : fromLumi,
1640                           },
1641                 'to'  : {
1642                             'hash' : None,
1643                             'run'  : toRun,
1644                             'time' : toTime, # the time we get may be a bit delayed (7-10 sec according to Salvatore)
1645                             'lumi' : toLumi,
1646                           } 
1647               }
1648 
1649     logging.debug("convertRunToTimes> start: %s stop %s \n    timeMap: %s " % (fromRun, toRun, str(timeMap)))
1650 
1651     return timeMap
1652 
1653 def _update_tag_log(session,the_tag,the_timestamp,the_action,note):
1654     # run parameters
1655     userName = pwd.getpwuid(os.getuid()).pw_name
1656     hostName = socket.getfqdn()
1657     userCommand = " ".join(sys.argv[0:])
1658     TagLog = session.get_dbtype(conddb.TagLog)
1659     session.add(TagLog(tag_name=the_tag, event_time=the_timestamp, action=the_action, user_name=userName, host_name=hostName, command=userCommand, user_text=note ))
1660 
1661 def _copy_payload( args, copyTime, session1, session2, payloadHash, payloadSerializationVersionMap=None ):
1662     Payload1 = session1.get_dbtype(conddb.Payload)
1663     Payload2 = session2.get_dbtype(conddb.Payload)
1664     ret = False
1665     streamerInfo = None
1666     if _exists(session2, Payload2.hash, payloadHash):
1667         logging.debug('Skipping copy of payload %s to %s since it already exists...', str_db_object(args.db, payloadHash), str_db_object(args.destdb, payloadHash))
1668         if payloadSerializationVersionMap is not None:
1669             q = session1.query(Payload1.streamer_info).filter(Payload1.hash == payloadHash).one()
1670             streamerInfo = q[0]
1671     else:
1672         logging.info('Copying payload %s to %s ...', str_db_object(args.db, payloadHash), str_db_object(args.destdb, payloadHash))
1673         q = session1.query(Payload1).filter(Payload1.hash == payloadHash).one()
1674         payload = _rawdict(q)
1675         payload['insertion_time'] = copyTime
1676         streamerInfo = payload['streamer_info']
1677         session2.add(Payload2(** payload))
1678         ret = True
1679     if payloadSerializationVersionMap is not None:
1680         serialization_version = serialization_metadata.get_boost_version_from_streamer_info(streamerInfo)
1681         payloadSerializationVersionMap[payloadHash] = serialization_version
1682     return ret
1683 
1684 def _copy_tag(args, copyTime, session1, session2, first, second, fromIOV=None, toIOV=None, timeMap=None):
1685     ret = True
1686     Tag1 = session1.get_dbtype(conddb.Tag)
1687     Tag2 = session2.get_dbtype(conddb.Tag)
1688     # Copy the tag
1689     obj = session1.query(Tag1.name, Tag1.time_type, Tag1.object_type, Tag1.synchronization, Tag1.description, Tag1.last_validated_time, Tag1.end_of_validity, Tag1.insertion_time, Tag1.modification_time).filter(Tag1.name == first).first()
1690     tag = _rawdict_selection(obj)
1691     tag['name'] = second
1692 
1693     if session2._is_sqlite:
1694         if tag['end_of_validity'] >= maxSince:
1695             tag['end_of_validity'] = -1
1696     else:
1697         if tag['end_of_validity'] == -1 or tag['end_of_validity'] > maxSince :
1698             tag['end_of_validity'] = maxSince
1699     tag['insertion_time'] = copyTime
1700     tag['modification_time'] = copyTime
1701 
1702     if timeMap:
1703         fromIOV = timeMap['from'][ tag['time_type'].lower().strip() ]
1704         toIOV   = timeMap['to'] [ tag['time_type'].lower().strip() ]
1705 
1706     if fromIOV is None:
1707         fromIOV = 1
1708 
1709     selectStr = 'from since=%s' %fromIOV
1710     if toIOV is not None:
1711         selectStr += ' to since=%s' %toIOV
1712     if args.snapshot is not None:
1713         selectStr += ' selecting insertion time < %s' %args.snapshot
1714 
1715     logging.info('Copying tag %s to %s %s', str_db_object(args.db, first), str_db_object(args.destdb, second), selectStr)
1716     query = session2.query(Tag2.time_type, Tag2.object_type, Tag2.synchronization).filter(Tag2.name == second )
1717     destExists = False
1718     destPayloadType = None
1719     destTimeType = None
1720     destSynchro = None
1721     for t in query:
1722         destExists = True
1723         t = _rawdict_selection(t)
1724         destPayloadType = t['object_type']
1725         destTimeType = t['time_type']
1726         destSynchro = t['synchronization']
1727     if args.toTimestamp:
1728         if tag['time_type'] == 'Time':
1729             logging.info('Source Tag timeType=Time. Ignoring request of conversion to Timestamp')
1730             args.toTimestamp = False
1731         else:
1732             if not tag['time_type']=='Run':
1733                 logging.error('Conversion from %s to Timestamp is not supported.' %tag['time_type'])
1734                 raise Exception("Cannot execute the copy.")
1735     if args.toRun:
1736         if tag['time_type'] == 'Run':
1737             logging.info('Source Tag timeType=Run. Ignoring request of conversion to Run')
1738             args.toRun = False
1739         else:
1740             if not tag['time_type']=='Time':
1741                 logging.error('Conversion from %s to Run is not supported.' %tag['time_type'])
1742                 raise Exception("Cannot execute the copy.")
1743     if destExists:
1744         logging.warning('Destination tag "%s" already exists.' %second )
1745         if destPayloadType != tag['object_type']:
1746             logging.error('Cannot copy iovs from tag %s (object type: %s) to tag %s (object type: %s), since the object types are different.' %(first,tag['object_type'],second,destPayloadType))
1747             raise Exception('Object type mismatch, bailing out.')
1748         destTimeTypeOk = (destTimeType == tag['time_type'])
1749         if args.toTimestamp:
1750             if not destTimeType=='Time':
1751                 logging.error('TimeType of target tag %s does not allow conversion to Time.' %destTimeType)
1752                 raise Exception("Cannot execute the copy.")
1753             else:
1754                 destTimeTypeOk = True
1755         if args.toRun:
1756             if not destTimeType=='Run':
1757                 logging.error('TimeType of target tag %s does not allow conversion to Run.' %destTimeType)
1758                 raise Exception("Cannot execute the copy.")
1759             else:
1760                 destTimeTypeOk = True
1761         if not destTimeTypeOk:
1762             logging.error('Cannot copy iovs from tag %s (time type: %s) to tag %s (time type: %s), since the time types are different.' %(first,tag['time_type'],second,destTimeType))
1763             raise Exception('Time type mismatch, bailing out.')
1764         if not args.yes:
1765             output(args, 'Confirm the update of the existing tag "%s" in %s [n]?' %(second,args.destdb), newline=False)
1766             if input().lower() not in ['y', 'yes']:
1767                 raise Exception('Aborted by the user.')
1768     else:
1769         destSynchro = 'any'
1770         if args.toTimestamp:
1771             tag['time_type'] = 'Time'
1772         if args.toRun:
1773             tag['time_type'] = 'Run'
1774         destTimeType = tag['time_type']
1775         dest = Tag2(**tag)
1776         dest.synchronization = destSynchro
1777         session2.add(dest)
1778         note = args.note 
1779         if note is None or note=='' or note==' ':
1780             note = '-'
1781         _update_tag_log(session2,second,copyTime,'New tag created.',note)
1782 
1783     IOV1 = session1.get_dbtype(conddb.IOV)
1784     IOV2 = session2.get_dbtype(conddb.IOV)
1785     # Get the closest smaller IOV than the given starting point (args.from),
1786     # since it may lie between two sinces. For the ending point (args.to)
1787     # is not needed, since the last IOV of a tag always goes up to infinity.
1788     # In the case where the starting point is before any IOV, we do not need
1789     # to cut the query anyway.
1790     prev_iov = None
1791     if fromIOV is not None:
1792         fromVal = fromIOV
1793         logging.debug("checking FROM %s of type %s for tag: %s "  % (fromIOV, tag['time_type'], str(tag['name'])) )
1794         prev_iov = session1.query(IOV1.since).\
1795             filter(
1796                 IOV1.tag_name == first,
1797                 IOV1.since <= fromVal,
1798                 _inserted_before(IOV1,args.snapshot)
1799             ).\
1800             order_by(IOV1.since.desc()).\
1801             limit(1).\
1802             scalar()
1803         logging.debug('The closest smaller IOV than the given starting one (--from %s) is %s...', fromVal, prev_iov)
1804 
1805     # Select the input IOVs
1806     query = session1.query(IOV1).filter(IOV1.tag_name == first)
1807     if prev_iov is not None:
1808         query = query.filter(IOV1.since >= prev_iov)
1809     if toIOV is not None:
1810         query = query.filter(IOV1.since <= toIOV)
1811     query = query.filter(_inserted_before(IOV1,args.snapshot))
1812     iovs = {}
1813     hashes = set()
1814     payloadSerializationVersionMap = None
1815     if session2.is_oracle:
1816         payloadSerializationVersionMap = {}
1817     if not args.o2oTest:
1818         query = query.order_by(IOV1.since, IOV1.insertion_time.desc())
1819         for iov in query:
1820             iov = _rawdict(iov)
1821 
1822             # In the first IOV of the tag we need to use the starting point given
1823             # by the user, instead of the one coming from the source tag; unless
1824             # the starting point was before any IOV: in such case, up to the first
1825             # IOV there is no payload, so we use the one from the source tag.
1826             # Note that we need to replace it for every insertion time (since
1827             # the primary key is (since, insertion_time).
1828             if prev_iov is not None and iov['since'] == prev_iov:
1829                 iov['since'] = fromIOV
1830             since = iov['since']
1831 
1832             if since not in iovs.keys():
1833             # for a given since, only the most recent will be added.
1834                 iovs[since] = iov['payload_hash']
1835             else:
1836                 logging.warning('Skipping older iov with since %s...', since)
1837         sourceIovSize = len(iovs)
1838         logging.info('Selected %s source iov(s)' %sourceIovSize)
1839         if not args.nosynchro:
1840             # synchronize lower boundary when required
1841             logging.info('Destination tag synchronization is %s' %destSynchro)
1842             policy_type = _get_synchro_policy( destSynchro )
1843             synchro_policy = policy_type( session1, session2, destTimeType, second )
1844             ret, iovs = synchro_policy.validate( iovs )
1845         if args.toTimestamp:
1846             converter = run_to_timestamp( conddb.getSessionOnMasterDB( session1, session2 ) )
1847             iovs = converter.convertIovs( iovs )
1848         if args.toRun:
1849             converter = timestamp_to_run( conddb.getSessionOnMasterDB( session1, session2 ) )
1850             iovs = converter.convertIovs( iovs )            
1851         # making the list of the payloads to export...         
1852         for since in iovs.keys():
1853             hashes.add( iovs[since] )
1854 
1855         logging.debug('%s iov(s) to copy with %s payload(s)' %(len(iovs),len(hashes)))
1856     else:
1857         maxTime = _get_maxtime_for_boost_version( session1, tag['time_type'], cond2xml.boost_version_for_this_release())
1858         logging.info('Max time for boost version %s is %s'%(cond2xml.boost_version_for_this_release(),maxTime))
1859         query = query.order_by(IOV1.since.desc(), IOV1.insertion_time.desc())
1860         lastIov = None
1861         prevIovSince = None
1862         targetIovSince = None
1863         targetIovPayload = None
1864         for iov in query:
1865             iov = _rawdict(iov)
1866             since = iov['since'] 
1867             if lastIov is None:
1868                 lastIov = since
1869             else:
1870                 if lastIov != since:
1871                     if targetIovSince is None:
1872                         targetIovSince = since
1873                     if since < maxTime: 
1874                         targetIovPayload = iov['payload_hash']
1875                         prevIovSince = since
1876                         break
1877         iovs[prevIovSince]=targetIovPayload
1878         iovs[targetIovSince]=targetIovPayload
1879         hashes.add(targetIovPayload)
1880     logfun = logging.info 
1881     if len(iovs)==0:
1882         logfun = logging.warning
1883     logfun('Found %s iovs and %s referenced payloads to copy.',len(iovs), len(hashes))
1884     # Copy the payloads referenced in the selected iovs
1885     np = 0
1886     for h in hashes:
1887         if _copy_payload( args, copyTime, session1, session2, h, payloadSerializationVersionMap ):
1888             np += 1
1889     if not np==0:
1890         logging.info('%s payload(s) copied.',np)
1891     # Calculate if extra iovs are needed - for the override mode ( they will have already their payloads copied )                        
1892     extraiovs = {}
1893     if args.override:
1894         # the interval to be overriden is defined by the new iov set boundaries, 
1895         # or by the actual user-provided boundaries - when available 
1896         l_b = sorted(iovs.keys())[0]
1897         h_b = sorted(iovs.keys())[-1]
1898         if fromIOV is not None:
1899             l_b = fromIOV
1900         if toIOV is not None:
1901             h_b = toIOV
1902         query = session2.query(IOV2).filter(IOV2.tag_name == second)
1903         query = query.filter(IOV2.since >= l_b).filter(IOV2.since <= h_b)
1904         query = query.order_by(IOV2.since, IOV2.insertion_time.desc())
1905         for iov in query:
1906             iov = _rawdict(iov)
1907             since = iov['since']
1908             if since not in extraiovs.keys() and since not in iovs.keys():
1909                 for newSince in sorted(iovs.keys(),reverse=True):
1910                     if newSince < since:
1911                         extraiovs[since] = iovs[newSince]
1912                         break
1913 
1914     # re-assemble the 2 iov set
1915     if len(extraiovs):
1916         logging.info('Adding %s extra iovs for overriding the existing ones with the payloads from the new iovs...' %len(extraiovs))
1917     if args.override and len(extraiovs)==0:
1918         logging.info('No extra iovs required for overriding the existing ones with the new ones.')
1919     for k,v in extraiovs.items():
1920         iovs[k] = extraiovs[k]
1921 
1922     # Copy the set of IOVs collected        
1923     session2.merge(Tag2(name=second,modification_time=copyTime))
1924     minIov = None
1925     if payloadSerializationVersionMap is not None:
1926         BoostRunMap = session2.get_dbtype(conddb.BoostRunMap)
1927         q = session2.query(BoostRunMap).order_by(BoostRunMap.run_number)
1928         boost_run_map = []
1929         for r in q:
1930             r = _rawdict(r)
1931             boost_run_map.append( (r['run_number'],r['run_start_time'],r['boost_version']) )
1932         TagMetadata = session2.get_dbtype(conddb.TagMetadata)
1933         q = session2.query(TagMetadata.min_serialization_v,TagMetadata.min_since).filter(TagMetadata.tag_name == second )
1934         tagBoostVersion = None
1935         for r in q:
1936             tagBoostVersion = r[0]
1937             minIov = r[1]
1938             break
1939         currentTagBoostVersion = tagBoostVersion
1940         if len(iovs)>0 and destExists and currentTagBoostVersion is None:
1941             raise Exception('No information found about the destination tag boost version. Cannot proceed with the update.')
1942         logging.info('Destination tag boost version is %s' %currentTagBoostVersion )
1943     niovs = 0
1944     for k,v in iovs.items():
1945         logging.debug('Copying IOV %s -> %s...', k, v)
1946         session2.add(IOV2(tag_name=second,since=k,insertion_time=copyTime,payload_hash=v))
1947         niovs += 1
1948         if payloadSerializationVersionMap is not None:
1949             if v in payloadSerializationVersionMap.keys():
1950                 tagBoostVersion = serialization_metadata.do_update_tag_boost_version(tagBoostVersion,minIov,payloadSerializationVersionMap[v], k, destTimeType, boost_run_map )
1951         if minIov is None or k<minIov:
1952             minIov = k
1953     if not niovs==0:
1954         logging.info('%s iov(s) copied.',niovs)
1955         merge = False
1956         if payloadSerializationVersionMap is not None and tagBoostVersion is not None:
1957             if currentTagBoostVersion is not None: 
1958                 if currentTagBoostVersion != tagBoostVersion:
1959                     if serialization_metadata.cmp_boost_version( currentTagBoostVersion, tagBoostVersion )<0:
1960                         if destSynchro not in ['any','validation']:
1961                             raise Exception('Cannot update existing tag %s, since the minimum serialization version %s is not compatible with the combined boost version of the payloads to add (%s)' %(second,currentTagBoostVersion,tagBoostVersion))
1962             merge = True
1963         if merge:
1964             session2.merge(TagMetadata(tag_name=second,min_serialization_v=tagBoostVersion,min_since=minIov,modification_time=copyTime))
1965             logging.info('Destination Tag boost Version set to %s ( was %s )' %(tagBoostVersion,currentTagBoostVersion) )
1966     return ret, niovs
1967 
1968 def copy(args):
1969     _check_same_object(args)
1970 
1971     connection1, connection2 = connect(args, read_only=False)
1972     session1, session2 = connection1.session(), connection2.session()
1973 
1974     args.type, args.first = _identify_object(session1, args.type, args.first)
1975 
1976     copyTime = datetime.datetime.utcnow()
1977 
1978     if args.type == 'payload':
1979         if args.second is None:
1980             args.second = args.first
1981         elif args.first != args.second:
1982             raise Exception('Cannot modify the name (hash) of a payload while copying, since the hash has to match the data.')
1983 
1984         if _copy_payload( args, copyTime, session1, session2, args.first ):
1985             _confirm_changes(args)
1986             session2.commit()
1987 
1988     elif args.type == 'tag':
1989         if args.second is None:
1990             args.second = args.first
1991         if args.force and args.yes:
1992             if args.note is None or args.note=='' or args.note==' ':
1993                 raise Exception('Cannot run in force edit mode without to provide a non-empty editing note.')
1994         if args.o2oTest:
1995             if args.to is not None or getattr(args, 'from') is not None or args.snapshot is not None or args.override or args.nosynchro or args.toTimestamp:
1996                 raise Exception('Cannot execute the copy for the o2o test with the options from, to, override, nosynchro, snapshot or toTimestamp.')
1997 
1998         try:
1999             ret, niovs = _copy_tag(args, copyTime, session1, session2, args.first, args.second, getattr(args, 'from'), args.to)
2000             if niovs!=0:
2001                 _confirm_changes(args)
2002                 note = args.note
2003                 if args.force and args.note is None:
2004                     note = _get_user_note(args,'Force edit mode requires an editing note to be provided: ')
2005                 if note is None or note=='' or note==' ':
2006                     note = '-'
2007                 _update_tag_log(session2,args.second,copyTime,'%s iov(s) inserted.' %niovs,note)
2008                 session2.commit()
2009                 logging.info('Changes committed.')
2010         except Exception as e:
2011             session2.rollback()
2012             logging.error('Changes rolled back.')
2013             raise e
2014 
2015         return 1*( not ret )
2016 
2017 
2018     elif args.type == 'gt':
2019         if args.second is None:
2020             args.second = args.first
2021 
2022         # 'from' is a keyword!
2023         session = conddb.getSessionOnMasterDB( session1, session2 )
2024         timeMap = convertRunToTimes(session, getattr(args, 'from'), args.to)
2025 
2026         logging.info('Copying global tag %s to %s ...', str_db_object(args.db, args.first), str_db_object(args.destdb, args.second))
2027 
2028         GlobalTag1 = session1.get_dbtype(conddb.GlobalTag)
2029         GlobalTag2 = session2.get_dbtype(conddb.GlobalTag)
2030         GlobalTagMap1 = session1.get_dbtype(conddb.GlobalTagMap)
2031         GlobalTagMap2 = session2.get_dbtype(conddb.GlobalTagMap)
2032         # Prepare the copy
2033         global_tag = _rawdict(session1.query(GlobalTag1).get(args.first))
2034         global_tag['name'] = args.second
2035         global_tag['validity'] = 0 # XXX: SQLite does not work with long ints...
2036         if args.snapshot is None:
2037             args.snapshot = str(global_tag['snapshot_time'].strftime("%Y-%m-%d %H:%M:%S"))
2038         else:
2039             global_tag['snapshot_time'] = _parse_timestamp(args.snapshot)
2040         if _exists(session2, GlobalTag2.name, args.second):
2041             raise Exception('A GlobalTag named "%s" already exists in %s' %(args.second, args.destdb))   
2042 
2043         # Copy the tags of the global tag
2044         logging.debug('Creating query for tag %s filter %s ...', GlobalTagMap1.tag_name, args.first)
2045         query = session1.query(GlobalTagMap1.tag_name).filter(GlobalTagMap1.global_tag_name == args.first).distinct()
2046         copyTime = datetime.datetime.utcnow()
2047         for (tag, ) in query:
2048             logging.debug('Copying tag %s to %s for GT %s ...', str_db_object(args.db, tag), str_db_object(args.destdb, tag), str_db_object(args.destdb, args.second))
2049             Tag2 = session2.get_dbtype(conddb.Tag)
2050             if _exists(session2, Tag2.name, tag ):
2051                 logging.warn('Skipping copy of tag %s to %s since it already exists... *The tags may differ in content*', str_db_object(args.db, tag), str_db_object(args.destdb, tag))
2052             else:
2053                 logging.debug('going to copy tag %s to %s ... ', str_db_object(args.db, tag), str_db_object(args.destdb, tag))
2054                 _copy_tag(args, copyTime, session1, session2, tag, tag, timeMap=timeMap)
2055 
2056         # Copy the global tag
2057         global_tag2 = GlobalTag2(**global_tag)
2058         copyTime = datetime.datetime.utcnow()
2059         global_tag2.snapshot_time = copyTime
2060         global_tag2.insertion_time = copyTime
2061         session2.add(global_tag2)
2062         # Copy the map of the global tag
2063         query = session1.query(GlobalTagMap1).filter(GlobalTagMap1.global_tag_name == args.first)
2064         for map_ in query:
2065             logging.debug('Copying global tag map %s -> %s ...', str_record(map_.record, map_.label), map_.tag_name)
2066             map_ = _rawdict(map_)
2067             map_['global_tag_name'] = args.second
2068             session2.add(GlobalTagMap2(**map_))
2069 
2070         _confirm_changes(args)
2071         session2.commit()
2072         return 0
2073 
2074 
2075 def edit(args):
2076 
2077     global colors
2078     colors.noColors()
2079 
2080     connection = connect(args, read_only=False)
2081     session = connection.session()
2082 
2083     args.type, name = _identify_object(session, args.type, args.name)
2084 
2085     if args.editor is None:
2086         editor = _get_editor(args)
2087 
2088     with tempfile.NamedTemporaryFile(mode='r+') as tempfd:
2089 
2090         if args.type == 'payload':
2091             raise Exception('TODO')
2092 
2093             Payload = session1.get_dbtype(conddb.Payload)
2094 
2095             properties = session.query(Payload.object_type, Payload.version, Payload.insertion_time).\
2096                 filter(Payload.hash == name).\
2097                 one()
2098             columns = properties.keys()
2099 
2100             tempfd.write('''# Editing payload %s
2101 #
2102 # You can modify rows/lines after the headers. Then, save the file and
2103 # quit the editor. The changes will be recognized and you will be asked
2104 # for confirmation before the changes are written into the database.
2105 #
2106 # The order of the rows does not matter. Whitespace is not important.
2107 # Lines starting with # are ignored.
2108 #
2109 # You can edit the insertion time -- however, note that if these conditions
2110 # are to be uploaded to an official database, the times will be anyway
2111 # replaced with the actual insertion times.
2112 
2113 ''' % payload_hash)
2114 
2115             table = zip(columns, properties)
2116             output_table(args,
2117                 table,
2118                 ['Property', 'Value'],
2119                 output_file = tempfd,
2120             )
2121 
2122             _run_editor(editor, tempfd)
2123 
2124             new_table = []
2125             in_table = False
2126             for line in tempfd.readlines():
2127                 line = line.strip()
2128                 if len(line) == 0 or line.startswith('#'):
2129                     continue
2130 
2131                 if not in_table:
2132                     if all([x == '-' for x in line.replace(' ','')]):
2133                         in_table = True
2134                     continue
2135 
2136                 key, value = line.split(None, 1)
2137 
2138                 if key == 'insertion_time':
2139                     value = _parse_timestamp(value)
2140 
2141                 new_table.append((key, value))
2142 
2143             table = set(table)
2144             new_table = set(new_table)
2145 
2146             added = new_table - table
2147             deleted = table - new_table
2148 
2149             if len(added) == 0 and len(deleted) == 0:
2150                 raise Exception('No changes found.')
2151 
2152             values = dict(new_table)
2153             if set(values.keys()) != set(columns):
2154                 raise Exception('It is not possible to modify the name of the properties or add/remove them. Please only modify the values.')
2155 
2156             changes = [('+' if x in added else '-', x[0], x[1]) for x in added | deleted]
2157             output_table(args,
2158                 sorted(changes, key=lambda x: (x[1], 0 if x[0] == '-' else 1)),
2159                 ['', 'Property', 'Value'],
2160                 no_first_header = True,
2161             )
2162 
2163             _confirm_changes(args)
2164 
2165             payload = session.query(Payload).\
2166                 filter(Payload.hash == payload_hash).\
2167                 update(dict(added | deleted))
2168             session.commit()
2169 
2170 
2171         elif args.type == 'tag':
2172             if args.header:
2173                 Tag = session.get_dbtype(conddb.Tag)
2174                 table = session.query(Tag.description,Tag.synchronization,Tag.end_of_validity).\
2175                     filter(Tag.name == name).\
2176                     all()
2177                 table = [ (str(x[0].strip()),str(x[1]),str(x[2])) for x in table ] 
2178 
2179                 output_table( args,
2180                     table,
2181                     ['Description','Synchronization','End of Validity'],
2182                     output_file = tempfd, 
2183                     no_max_length = True
2184                 )
2185 
2186                 tempfd.write('''
2187 # Editing tag %s
2188 #
2189 # You can add, remove or modify rows/lines after the headers.
2190 # Then, save the file and quit the editor.
2191 # The changes will be recognized and you will be asked for confirmation
2192 # before the changes are written into the database.
2193 #
2194 # Whitespace is not important.
2195 # Lines starting with # are ignored.
2196 ''' % name)
2197                 _run_editor(editor, tempfd)
2198                 new_table = []
2199                 editRe = re.compile(r'^(.*)\s+([a-z]+)\s+([-]?\d+)\s*$')
2200                 for index, line in enumerate(tempfd.readlines()):
2201                     if index in {0, 1}:
2202                         continue
2203 
2204                     line = line.strip()
2205 
2206                     if len(line) == 0 or line.startswith('#'):
2207                         continue
2208 
2209                     editMatch = editRe.match(line)
2210                     if editMatch:
2211                         description,synchronization,endOfValidity = editMatch.groups()
2212                         if synchronization not in conddb.synch_list:
2213                             raise Exception('Invalid Synchronization value set: "%s"' %synchronization )
2214                         if int(endOfValidity)< -1:
2215                             raise Exception('Invalid End Of Validity set: "%s"' %endOfValidity ) 
2216                     else:
2217                         raise Exception('Each line must contain the Description, Synchronization and End Of Validity fields in the required format.')
2218 
2219                     new_table.append((description.strip(),synchronization,endOfValidity))
2220 
2221                 header = table[0]
2222                 new_header = new_table[0]
2223 
2224                 if new_table == table:
2225                     logging.info('No changes found.')
2226                     session.rollback()
2227                     return
2228 
2229                 changes = []
2230                 changes.append(('+',new_header[0],new_header[1],new_header[2]))
2231                 changes.append(('-',header[0],header[1],header[2]))
2232                 output_table(args,
2233                              sorted(changes, key=lambda x: (0 if x[0] == '-' else 1)),
2234                              ['', 'Description', 'Synchronization', 'End Of Validity'],
2235                              no_first_header = True,
2236                              )
2237 
2238                 _confirm_changes(args)
2239                 note = '-'
2240                 if args.force:
2241                     note = _get_user_note(args,'Please provide an editing note: ')
2242                 action = ''
2243                 if header[0] != new_header[0]:
2244                     action += 'Description updated'
2245                 if header[1] != new_header[1]:
2246                     # validate the synchro requested( based on the tag IOV content )
2247                     if new_header[1] ==  'mc':
2248                         if not mc_validate( session, name ):
2249                             return
2250                     if len(action): action += ', '
2251                     action += 'Synchronization changed'
2252                 if header[2] != new_header[2]:
2253                     if len(action): action += ', '
2254                     action += 'End Of Validity changed'
2255                 if len(action): action += '.'
2256 
2257                 updatedHeader = Tag(name=name,description=new_header[0],synchronization=new_header[1],end_of_validity=new_header[2],modification_time=datetime.datetime.utcnow()) 
2258                 session.merge(updatedHeader)
2259                 _update_tag_log(session,name,datetime.datetime.utcnow(),action,note)
2260                 session.commit()
2261                 logging.info('Tag header updated. Action(s): %s' %action)
2262                 return
2263 
2264             IOV = session.get_dbtype(conddb.IOV)
2265             Payload = session.get_dbtype(conddb.Payload)
2266             table = session.query(IOV.since, IOV.insertion_time, IOV.payload_hash).\
2267                 filter(IOV.tag_name == name).\
2268                 order_by(IOV.since, IOV.insertion_time).\
2269                 all()
2270 
2271             output_table(args,
2272                 table,
2273                 ['Since', 'Insertion Time', 'Payload'],
2274                 output_file = tempfd,
2275             )
2276 
2277             tempfd.write('''
2278 # Editing tag %s
2279 #
2280 # You can add, remove or modify rows/lines after the headers.
2281 # Then, save the file and quit the editor.
2282 # The changes will be recognized and you will be asked for confirmation
2283 # before the changes are written into the database.
2284 #
2285 # The order of the rows does not matter. Whitespace is not important.
2286 # Lines starting with # are ignored.
2287 #
2288 # Payload hashes do not need to be full -- a prefix is enough if unique.
2289 # The program will fill find the full hash.
2290 #
2291 # You can edit insertion times -- however, note that if these conditions
2292 # are to be uploaded to an official database, the times will be anyway
2293 # replaced with the actual insertion times. The format must be
2294 # one of the following: '2013-01-20', '2013-01-20 10:11:12' or
2295 # '2013-01-20 10:11:12.123123'.
2296 # If the insertion time desired is the one of the command execution, 
2297 # you can simply write a '-' in the corresponding column
2298 #
2299 # Suggestion: open another terminal to copy the payloads you need.
2300 ''' % name)
2301 
2302             _run_editor(editor, tempfd)
2303 
2304             new_table = []
2305             for index, line in enumerate(tempfd.readlines()):
2306                 if index in {0, 1}:
2307                     continue
2308 
2309                 line = line.strip()
2310 
2311                 if len(line) == 0 or line.startswith('#'):
2312                     continue
2313 
2314                 splitted = line.split()
2315                 if len(splitted) == 3:
2316                     since, insertion_timestamp, payload = splitted
2317                 elif len(splitted) == 4:
2318                     since, insertion_date, insertion_time, payload = splitted
2319                     insertion_timestamp = '%s %s' % (insertion_date, insertion_time)
2320                 else:
2321                     raise Exception('Each line must contain the since, timestamp and payload fields in the required format.')
2322 
2323                 # check payload hash format and existance...
2324                 if len(payload) > conddb.hash_length:
2325                     raise Exception('Payload hash "%s" too long.' %payload )
2326                 elif len(payload) < conddb.hash_length:
2327                     raise Exception('Payload hash "%s" too short.' %payload )
2328                 if not _exists(session, Payload.hash, payload):
2329                     raise Exception('Payload hash "%s" not found in the database' %payload )
2330 
2331                 if insertion_timestamp == '-':
2332                     insertion_time = datetime.datetime.utcnow()
2333                 else:
2334                     insertion_time = _parse_timestamp(insertion_timestamp)
2335                 new_table.append((int(since), insertion_time, payload))
2336 
2337             table = set(table)
2338             new_table = set(new_table)
2339 
2340             added = new_table - table
2341             deleted = table - new_table
2342 
2343             sizeNew = len(new_table)
2344             sizeUnique = len( set([(x[0],x[1]) for x in new_table]) )
2345             if connection.is_official:
2346                 added = set([(x[0],'-',x[2]) for x in added])       
2347                 sizeNew = len(added) + len(table)
2348                 sizeUnique = len( set([x[0] for x in added]) ) + len(table)
2349                 if len(deleted):
2350                     logging.info("The %s deleted entries won't be removed." %len(deleted))
2351                     deleted = set()
2352 
2353             if len(added) == 0 and len(deleted) == 0:
2354                 logging.info('No changes found.')
2355                 session.rollback()
2356                 return
2357 
2358             changes = [('+' if x in added else '-', x[0], x[1], x[2]) for x in added | deleted]
2359             output_table(args,
2360                 sorted(changes, key=lambda x: (x[1], 0 if x[0] == '-' else 1)),
2361                 ['', 'Since', 'Insertion Time', 'Payload'],
2362                 no_first_header = True,
2363             )
2364 
2365             logging.debug('size of modified table: %s - unique (since+timestamp) entries: %s' %(sizeNew,sizeUnique))
2366             if sizeNew != sizeUnique:
2367                 raise Exception('Duplicated since.')
2368 
2369             _confirm_changes(args)
2370             note = '-'
2371             if args.force:
2372                 note = _get_user_note(args,'Please provide an editing note: ')
2373             action = ''
2374             if len(added):
2375                 action += '%s iov(s) inserted' %len(added)
2376             if len(deleted):
2377                 if len(action): action += ', '
2378                 action += '%s iov(s) deleted' %len(deleted)
2379 
2380             # Use session.delete() instead of bulk delete to let SQLAlchemy use UPDATE
2381             # (since we may disable DELETE in Oracle for the tables)
2382             for since, insertion_time, _ in deleted:
2383                 session.query(IOV).filter(IOV.tag_name==name, IOV.since==since, IOV.insertion_time==insertion_time).delete()
2384                 #session.delete(session.query(IOV).filter(IOV.tag_name==name, IOV.since==since, IOV.insertion_time==insertion_time))
2385             for since, insertion_time, payload in added:
2386                 if connection.is_official:
2387                     insertion_time = datetime.datetime.utcnow()
2388                 session.add(IOV(tag_name=name, since=since, insertion_time=insertion_time, payload_hash=payload))
2389             _update_tag_log(session,name,datetime.datetime.utcnow(),action,note)
2390             session.commit()
2391 
2392         elif args.type == 'gt':
2393             GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
2394             table = session.query(GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name).\
2395                 filter(GlobalTagMap.global_tag_name == name).\
2396                 order_by(GlobalTagMap.record, GlobalTagMap.label).\
2397                 all()
2398 
2399             output_table(args,
2400                 table,
2401                 ['Record', 'Label', 'Tag'],
2402                 output_file = tempfd,
2403             )
2404 
2405             tempfd.write('''
2406 # Editing global tag %s
2407 #
2408 # You can add, remove or modify rows/lines after the headers.
2409 # Then, save the file and quit the editor.
2410 # The changes will be recognized and you will be asked for confirmation
2411 # before the changes are written into the database.
2412 #
2413 # To mark records without label, use a single '%s' character.
2414 #
2415 # The order of the rows does not matter. Whitespace is not important.
2416 # Lines starting with # are ignored.
2417 ''' % (name, conddb.empty_label))
2418 
2419             _run_editor(editor, tempfd)
2420 
2421             new_table = []
2422             for index, line in enumerate(tempfd.readlines()):
2423                 if index in {0, 1}:
2424                     continue
2425 
2426                 line = line.strip()
2427 
2428                 if len(line) == 0 or line.startswith('#'):
2429                     continue
2430 
2431                 record, label, tag = line.split()
2432 
2433                 new_table.append((record, label, tag))
2434 
2435             if len(new_table) != len(set([(x[0], x[1]) for x in new_table])):
2436                 raise Exception('Duplicated (record, label) pair.')
2437 
2438             table = set(table)
2439             new_table = set(new_table)
2440 
2441             added = new_table - table
2442             deleted = table - new_table
2443 
2444             if len(added) == 0 and len(deleted) == 0:
2445                 raise Exception('No changes found.')
2446 
2447             changes = [('+' if x in added else '-', x[0], x[1], x[2]) for x in added | deleted]
2448             output_table(args,
2449                 sorted(changes, key=lambda x: (x[1], 0 if x[0] == '-' else 1)),
2450                 ['', 'Record', 'Label', 'Tag'],
2451                 no_first_header = True,
2452             )
2453 
2454             _confirm_changes(args)
2455 
2456             # Use session.delete() instead of bulk delete to let SQLAlchemy use UPDATE
2457             # (since we may disable DELETE in Oracle for the tables)
2458             for record, label, _ in deleted:
2459                 session.delete(session.query(GlobalTagMap).get((name, record, label)))
2460             for record, label, tag in added:
2461                 session.add(GlobalTagMap(global_tag_name=name, record=record, label=label, tag_name=tag))
2462             session.commit()
2463 
2464 
2465 def delete(args):
2466     connection = connect(args, read_only=False)
2467     session = connection.session()
2468 
2469     args.type, name = _identify_object(session, args.type, args.name)
2470 
2471     if args.type == 'payload':
2472         output_table(args,
2473             [('-', name, )],
2474             ['', 'Payload'],
2475             no_first_header = True,
2476         )
2477 
2478         _confirm_changes(args)
2479 
2480 
2481         Payload = session.get_dbtype(conddb.Payload)
2482         session.query(Payload).\
2483             filter(Payload.hash == name).\
2484             delete()
2485         session.commit()
2486 
2487     elif args.type == 'tag':
2488         output_table(args,
2489             [('-', name, )],
2490             ['', 'Tag'],
2491             no_first_header = True,
2492         )
2493 
2494         _confirm_changes(args)
2495 
2496         Tag = session.get_dbtype(conddb.Tag)
2497         IOV = session.get_dbtype(conddb.IOV)
2498         TagLog = session.get_dbtype(conddb.TagLog)
2499         TagMetadata = session.get_dbtype(conddb.TagMetadata)
2500         session.query(IOV).\
2501             filter(IOV.tag_name == name).\
2502             delete()
2503         session.query(TagLog).\
2504             filter(TagLog.tag_name == name).\
2505             delete()
2506         session.query(TagMetadata).\
2507             filter(TagMetadata.tag_name == name).\
2508             delete()
2509         session.query(Tag).\
2510             filter(Tag.name == name).\
2511             delete()
2512         session.commit()
2513 
2514     elif args.type == 'gt':
2515         output_table(args,
2516             [('-', name, )],
2517             ['', 'Global Tag'],
2518             no_first_header = True,
2519         )
2520 
2521         _confirm_changes(args)
2522 
2523         GlobalTag = session.get_dbtype(conddb.GlobalTag)
2524         GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
2525         session.query(GlobalTagMap).\
2526             filter(GlobalTagMap.global_tag_name == name).\
2527             delete()
2528         session.query(GlobalTag).\
2529             filter(GlobalTag.name == name).\
2530             delete()
2531         session.commit()
2532 
2533 
2534 def dump(args):
2535     connection = connect(args)
2536     session = connection.session()
2537     IOV = session.get_dbtype(conddb.IOV)
2538     GlobalTag = session.get_dbtype(conddb.GlobalTag)
2539     GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
2540 
2541     args.type, name = _identify_object(session, args.type, args.name)
2542 
2543     xmlProcessor = None
2544     if args.format == 'xml':
2545         xmlProcessor = cond2xml.CondXmlProcessor(conddb)
2546 
2547     if args.destfile != None: #The file for XML dump will be cleaned up and automatically closed after this
2548         with open(args.destfile, 'w'):
2549             pass 
2550 
2551     if args.type == 'payload':
2552         if args.format == 'xml':
2553             xmlProcessor.payload2xml(session, name, args.destfile)
2554         else:
2555             _dump_payload(session, name, args.loadonly)
2556 
2557     elif args.type == 'tag':
2558         for payload, in session.query(IOV.payload_hash).\
2559             filter(IOV.tag_name == name).\
2560             distinct():
2561             if args.format == 'xml':
2562                 xmlProcessor.payload2xml(session, payload, args.destfile)
2563             else:
2564                 _dump_payload(session, payload, args.loadonly)
2565 
2566     elif args.type == 'gt' and _exists(session, GlobalTag.name, name) != None:
2567         for payload, in session.query(IOV.payload_hash).\
2568             filter(GlobalTagMap.global_tag_name == name, IOV.tag_name == GlobalTagMap.tag_name).\
2569             distinct():
2570             if args.format == 'xml':
2571                 xmlProcessor.payload2xml(session, payload, args.destfile)
2572             else:
2573                 _dump_payload(session, payload, args.loadonly)
2574 
2575     if xmlProcessor: del xmlProcessor
2576 
2577 def toLumi_(args):
2578     lumiTime = conddb_time.to_lumi_time( int(args.run), int(args.lumi_id) )
2579     if args.quiet:
2580         print('%s' %lumiTime)
2581     else:
2582         output(args,'Lumi timetype for run=%s, lumisection id=%s: %s' %(args.run,args.lumi_id, lumiTime))
2583 
2584 def fromLumi_(args):
2585     run, lumisection_id = conddb_time.from_lumi_time( int(args.lumiTime) )
2586     if args.quiet:
2587         print('%s,%s' %(run, lumisection_id))
2588     else:
2589         output(args,'For Lumi timetype %s: run=%s, lumisection id=%s' %(args.lumiTime,run,lumisection_id))
2590 
2591 
2592 def toTimestamp_( args ):
2593     ts = conddb_time.string_to_timestamp( args.datetime )
2594     if args.quiet:
2595         print('%s' %ts)
2596     else:
2597         output(args,"Time timetype for string '%s': %s" %(args.datetime, ts))
2598 
2599 def fromTimestamp_( args ):
2600     sdt = conddb_time.string_from_timestamp( int(args.timestamp) )
2601     if args.quiet:
2602         print('%s' %sdt)
2603     else:
2604         output(args,"String date time for timestamp %s: '%s'" %(args.timestamp, sdt))
2605 
2606 def showProtections( args ):
2607     connection = connect(args, False, True, True)
2608     session = connection.session()
2609     Tag = session.get_dbtype(conddb.Tag)
2610     result = session.query(Tag.name, Tag.protection_code).filter(Tag.name==args.tag).all()
2611     get_authorizations = False
2612     table = []
2613     for res in result:
2614         protection_fl = '-'
2615         protection_str = 'not protected'
2616         if res[1]!=tag_db_no_protection_code:
2617             get_authorizations = True
2618             write_flag = res[1] & tag_db_write_access_code
2619             lock_flag = res[1] & tag_db_lock_access_code
2620             protection_fl = ''
2621             protection_str = ''
2622             if write_flag != 0:
2623                 protection_fl += db_access_code_map[tag_db_write_access_code]
2624                 protection_str += 'write protected'
2625             if lock_flag != 0:
2626                 protection_fl += db_access_code_map[tag_db_lock_access_code]
2627                 if protection_str != '':
2628                     protection_str += ','
2629                 protection_str += 'locked'
2630         table.append((args.tag,protection_fl,protection_str))
2631         break
2632     if len(table)==0:
2633         logging.error('Tag %s does not exists.'%args.tag)
2634         return 1
2635     output_table(args,table,['Tag','Flags','Protection'])
2636     if get_authorizations:
2637         TagAuthorization = session.get_dbtype(conddb.TagAuthorization)
2638         results = session.query(TagAuthorization.access_type, TagAuthorization.credential, TagAuthorization.credential_type).filter(TagAuthorization.tag_name==args.tag).all()
2639         table = []
2640         for r in results:
2641             table.append((db_access_code_map[r[0]],db_credential_type_map[r[2]][0],r[1]))
2642         output_table(args,table,['Access Type','Credential Type','Credential'])
2643     return 0
2644 
2645 def setProtection( args ):
2646     if not args.remove and args.accesstype == db_access_code_map[tag_db_lock_access_code]:
2647         logging.error("Lock can't be set by command line tool action.")
2648         return 2        
2649     connection = connect(args, False, True, True)
2650     session = connection.session()
2651     Tag = session.get_dbtype(conddb.Tag)
2652     result = session.query(Tag.name, Tag.protection_code).filter(Tag.name==args.tag).all()
2653     full_protection_code = None
2654     for res in result:
2655         full_protection_code = res[1]
2656     if full_protection_code is None:
2657         logging.error('Tag %s does not exists.'%args.tag)
2658         return 1
2659     input_access_code = None
2660     for k in db_access_code_map.keys():
2661         if db_access_code_map[k] == args.accesstype:
2662             input_access_code = k
2663     new_protection_code = 0
2664     action = 'Access restriction altered.'
2665     note = ''
2666     if args.remove:
2667         TagAuthorization = session.get_dbtype(conddb.TagAuthorization)
2668         query = session.query(TagAuthorization).filter(TagAuthorization.tag_name == args.tag)
2669         query = query.filter(TagAuthorization.access_type == input_access_code)
2670         for code in db_access_code_map.keys():
2671             if code != input_access_code:
2672                 new_protection_code |= (full_protection_code & code)
2673         note = '%s restrictions removed'%args.accesstype
2674         query = query.delete()
2675     else:
2676         new_protection_code = full_protection_code | input_access_code
2677         note = '%s restriction set'%args.accesstype
2678     session.merge(Tag(name=args.tag,protection_code=new_protection_code))
2679     _update_tag_log(session,args.tag,datetime.datetime.utcnow(),action,note)
2680     session.commit()
2681     logging.info(note)
2682     logging.info('Tag header updated. Action(s): %s' %action)
2683     return 0
2684 
2685 def setPermission( args ):
2686     if args.credential is not None and args.credentialtype is None:
2687         logging.error('Specified option "credential" requires option "credentialtype')
2688         return 1
2689     if args.accesstype == db_access_code_map[tag_db_lock_access_code]:
2690         logging.error("Lock ownership can't be altered.")
2691         return 1        
2692     connection = connect(args, False, True, True)
2693     session = connection.session()
2694     Tag = session.get_dbtype(conddb.Tag)
2695     result = session.query(Tag.name, Tag.protection_code).filter(Tag.name==args.tag).all()
2696     full_protection_code = None
2697     for res in result:
2698         full_protection_code = res[1]
2699     if full_protection_code is None:
2700         logging.error('Tag %s does not exists.'%args.tag)
2701         return 2
2702     if full_protection_code == 0:
2703         logging.error('Tag %s is not protected.' %args.tag)
2704         return 3
2705     TagAuthorization = session.get_dbtype(conddb.TagAuthorization)
2706     input_access_code = None
2707     for k in db_access_code_map.keys():
2708         if db_access_code_map[k] == args.accesstype:
2709             input_access_code = k
2710     if full_protection_code & input_access_code==0:
2711         logging.error('Tag %s is not protected for access %s.'%(args.tag,args.accessType))
2712         return 3
2713     input_credential = None
2714     input_credential_code = None
2715     input_credential_type = None
2716     if args.credential is not None:
2717         input_credential = args.credential
2718         for k in db_credential_type_map.keys():
2719             if db_credential_type_map[k][1] == args.credentialtype:
2720                 input_credential_code = k
2721                 input_credential_type = db_credential_type_map[k][0]
2722     action = 'Access permission altered.'
2723     note = ''
2724     if args.remove:
2725         query = session.query(TagAuthorization).filter(TagAuthorization.tag_name == args.tag)
2726         query = query.filter(TagAuthorization.access_type == input_access_code)
2727         if input_credential is not None:
2728             query = query.filter(TagAuthorization.credential == input_credential)
2729             query = query.filter(TagAuthorization.credential_type == input_credential_code)
2730             note = '%s permission for %s "%s" removed'%(args.accesstype,input_credential_type,args.credential)
2731         else:
2732             note = '%s restrictions removed'%args.accesstype
2733         query = query.delete()
2734     else:
2735         if input_credential is not None:
2736             session.add(TagAuthorization(tag_name=args.tag, access_type=input_access_code, 
2737                                          credential=input_credential, credential_type=input_credential_code ))
2738             note = '%s permission for %s "%s" added'%(args.accesstype,input_credential_type,args.credential)
2739         else:
2740             note = '%s restriction set'%args.accesstype
2741     _update_tag_log(session,args.tag,datetime.datetime.utcnow(),action,note)
2742     session.commit()
2743     logging.info(note)
2744     logging.info('Tag access permissions updated. Action(s): %s' %action)
2745     return 0
2746 
2747 def query_object(args):
2748     connection = connect(args)
2749     session = connection.session()
2750     if not args.tag and not args.global_tag and not args.payload:
2751         logging.error('The object type for the query has not been specified.')
2752         return -1
2753     if (args.tag and args.global_tag) or (args.tag and args.payload) or (args.global_tag and args.payload):
2754         logging.error('Only one object type for the query can be specified.')
2755         return -1
2756     found = 1
2757     if args.tag:
2758         Tag = session.get_dbtype(conddb.Tag)
2759         query = session.query(Tag.description,Tag.time_type,Tag.object_type,Tag.synchronization,Tag.insertion_time,Tag.modification_time).filter(Tag.name == args.unique_key).all()
2760         table = []
2761         for res in query:
2762             found = 0
2763             table.append((args.unique_key,res[0],res[1],res[2],res[3],res[4],res[5]))
2764         if found==0:
2765             output_table(args,table,['Tag name','Description','Time Type','Object Type','Synchronization','Insertion Time','Modification Time']) 
2766         else:
2767             logging.info('Tag %s has not been found.'%args.unique_key)
2768     if args.global_tag:
2769         GlobalTag = session.get_dbtype(conddb.GlobalTag)
2770         query =  session.query(GlobalTag.description,GlobalTag.release,GlobalTag.insertion_time,GlobalTag.snapshot_time).filter(GlobalTag.name == args.unique_key).all()
2771         table = []
2772         for res in query:
2773             found = 0
2774             table.append((args.unique_key,res[0],res[1],res[2],res[3]))
2775         if found==0:
2776             output_table(args,table,['Global Tag name','Description','Release','Insertion Time','Snapshot Time']) 
2777         else:
2778             logging.info('Global Tag %s has not been found.'%args.unique_key)
2779     if args.payload:
2780         Payload = session.get_dbtype(conddb.Payload)
2781         query =  session.query(Payload.object_type,Payload.streamer_info,Payload.insertion_time).filter(Payload.hash == args.unique_key).all()
2782         table = []
2783         header = None
2784         for res in query:
2785             found = 0
2786             streamer_info = res[1]
2787             row = (args.unique_key,res[0],)
2788             header = ['Payload hash','Object type']
2789             if streamer_info != b'0':
2790                 payload_md = json.loads(streamer_info)
2791                 for k in sorted(payload_md.keys()):
2792                     header.append(k)
2793                     row += (payload_md[k],)
2794             else:
2795                 row += (' ',)
2796                 header.append('Streamer Info')
2797             row += (res[2],)
2798             table.append(row)
2799             header.append('Insertion Time')
2800         if found==0:
2801             output_table(args,table,header) 
2802         else:
2803             logging.info('Payload %s has not been found.'%args.unique_key)
2804         
2805     return found
2806 
2807     
2808 def main():
2809     '''Entry point.
2810     '''
2811 
2812     global colors
2813 
2814     if len(sys.argv) == 1:
2815         class Args(object):
2816             quiet = False
2817             nocolors = False
2818         colors = Colors(Args())
2819         help(Args())
2820         sys.exit(2)
2821 
2822     parser = argparse.ArgumentParser(description='CMS Condition DB command-line tool. For general help (manual page), use the help subcommand.', epilog='Contact help: %s' % conddb.contact_help)
2823     parser.add_argument('--db', '-d', default='pro', help='Database to run the command on. Run the help subcommand for more information: conddb help')
2824     parser.add_argument('--verbose', '-v', action='count', help='Verbosity level. -v prints debugging information of this tool, like tracebacks in case of errors. -vv prints, in addition, all SQL statements issued. -vvv prints, in addition, all results returned by queries.')
2825     parser.add_argument('--quiet', '-q', action='store_true', help='Quiet mode. Disables all standard output.')
2826     parser.add_argument('--yes', '-y', action='store_true', help='Acknowledged mode. Disables confirmation prompts before writes to the database.')
2827     parser.add_argument('--nocolors', action='store_true', help='Disable colors. This is automatically done when the output is connected to a pipe (e.g. " conddb ... | less" ).')
2828     parser.add_argument('--editor', '-e', default=None, help='Editor to use. Default: the content of the EDITOR environment variable.')
2829     parser.add_argument('--force', action='store_true', help='Force edit in official databases. Only meant for experts.')
2830     parser.add_argument('--noLimit', action='store_true', help='Ignore the limit setting for the subcommand. This may generate a _lot_ of output and put some load on the DB, so please use with care.')
2831     parser.add_argument('--authPath','-a', default=None, help='Path of the authentication .netrc file. Default: the content of the COND_AUTH_PATH environment variable, when specified.')
2832     parser_subparsers = parser.add_subparsers(title='Available subcommands')
2833 
2834     parser_help = parser_subparsers.add_parser('help', description='General help (manual page).')
2835     parser_help.set_defaults(func=help)
2836 
2837     parser_init = parser_subparsers.add_parser('init', description='Initializes a CMS Condition DB, i.e. creates tables, sequences, indexes, etc. if they do not exist.')
2838     parser_init.set_defaults(func=init)
2839 
2840     parser_status = parser_subparsers.add_parser('status', description='Shows a summary of the status of a database.')
2841     parser_status.add_argument('--limit', '-L', type=int, default=5, help='Limit on the number of results per type of object. The returned results are the latest N inserted into the database.')
2842     parser_status.set_defaults(func=status)
2843 
2844     parser_list = parser_subparsers.add_parser('list', description='Lists the contents of objects. For a tag, a list of IOVs. For a global tag, a mapping tag <-> record. If there is ambiguity, all are listed.')
2845     parser_list.add_argument('name', nargs='+', help="Name of the object. This can be a tag's name or a global tag's name. It must exactly match -- if needed, use the search command first to look for it.")
2846     parser_list.add_argument('--long', '-l', action='store_true', help='Long output. Lists the properties (e.g. description) of the objects as well (not only their content).')
2847     parser_list.add_argument('--snapshot', '-T', default=None, help="Snapshot time. If provided, the output will represent the state of the IOVs inserted into database up to the given time. The format of the string must be one of the following: '2013-01-20', '2013-01-20 10:11:12' or '2013-01-20 10:11:12.123123'.")
2848     parser_list.add_argument('--limit', '-L', type=int, default=500, help='Limit on the number of IOVs returned. The returned results are the latest N IOVs. Only applies when listing tags.')
2849     parser_list.set_defaults(func=list_)
2850 
2851     parser_listTags = parser_subparsers.add_parser('listTags', description='Lists all the Tags available in the DB.')
2852     parser_listTags.set_defaults(func=listTags_)
2853 
2854     parser_listParentTags = parser_subparsers.add_parser('listParentTags', description='Lists all the Tags available in the DB, containing the matched payload hash.')
2855     parser_listParentTags.add_argument('hash_name', help="Payload hash to match.")
2856     parser_listParentTags.set_defaults(func=listParentTags_)
2857 
2858     parser_diffGlobalTagsAtRun = parser_subparsers.add_parser('diffGlobalTagsAtRun', description='Diffs two global tags, but showing only the differences relevant for a given run number.')
2859     parser_diffGlobalTagsAtRun.add_argument('--last', '-L', dest='lastIOV', action='store_true', default=False, help='Diff the Global tags at the last open IOV.')
2860     parser_diffGlobalTagsAtRun.add_argument('--reference', '-R', dest='refGT', help="Reference Global Tag")
2861     parser_diffGlobalTagsAtRun.add_argument('--target', '-T', dest='tarGT', help="Target Global Tag")
2862     parser_diffGlobalTagsAtRun.add_argument('--run', '-r', dest='testRunNumber', help="target run to compare",default=-1)
2863     parser_diffGlobalTagsAtRun.add_argument('--verbose','-v',help='returns more info', dest='isVerbose',action='store_true',default=False)
2864     parser_diffGlobalTagsAtRun.add_argument('--match','-m',help='print only matching',dest='stringToMatch',action='store',default='')
2865     parser_diffGlobalTagsAtRun.set_defaults(func=diffGlobalTagsAtRun_)
2866 
2867     parser_listGTsForTag = parser_subparsers.add_parser('listGTsForTag', description='Lists the GTs which contain a given tag.')
2868     parser_listGTsForTag.add_argument('name', help="Name of the tag.")
2869     parser_listGTsForTag.set_defaults(func=listGTsForTag_)
2870 
2871     parser_listGTs = parser_subparsers.add_parser('listGTs', description='Lists the GTs available in the DB.')
2872     parser_listGTs.set_defaults(func=listGTs_)
2873 
2874     parser_listRuns = parser_subparsers.add_parser('listRuns', description='Lists all the Runs available in the DB, possibly applying the optional search criteria.')
2875     parser_listRuns.add_argument('--limit','-L',type=int,default=50,help='Limit on the number of Run entries returned. The returned results are the latest N Runs. Only applies when no selection is specified.')
2876     parser_listRuns.add_argument('--from', '-f', type=str, help='Select items from this "Time" onwards. Supported "Time" formats: Run, encoded TimeStamp, string TimeStamp (format: YYYY-mm-dd hh24:MM:SS)')
2877     parser_listRuns.add_argument('--to', '-t', type=str, help='Ignore items from this "Time" onwards. Supported "Time" formats: Run, encoded TimeStamp, string TimeStamp (format: YYYY-mm-dd hh24:MM:SS)')
2878     parser_listRuns.add_argument('--match', '-m', type=int, help='Search an exact match with this Run.')
2879     parser_listRuns.add_argument('--last','-l', action='store_true', help='Select the last available Run entry.')
2880     parser_listRuns.set_defaults(func=listRuns_)
2881 
2882     parser_diff = parser_subparsers.add_parser('diff', description='Compares the contents of two objects. For tags, their IOVs are compared to determine which ranges have different payloads. For global tags, their tag names are compared. Both objects must be of the same type. If there is more than one valid pair (ambiguity), all diffs are listed.')
2883     parser_diff.add_argument('first', help="Name of the first object (i.e. source, old). This can be a tag's name or a global tag's name. It must exactly match -- if needed, use the search command first to look for it.")
2884     parser_diff.add_argument('second', nargs='?', default=None, help='Name of the second object (i.e. destination, new). Ditto. Default: same as the first object (i.e. useful to compare the same object in different databases).')
2885     parser_diff.add_argument('--destdb', '-d', default=None, help='Database of the second object (destination database). Same values allowed as for --db. Default: same as the first database.')
2886     parser_diff.add_argument('--short', '-s', action='store_true', help='Short diff. In tag diffs, do not include the ranges where IOVs are equal (while they do not provide more information, they make the output readable).')
2887     parser_diff.add_argument('--long', '-l', action='store_true', help='Long output. Compares the properties (e.g. description) of the objects as well (not only their content).')
2888     parser_diff.add_argument('--deep', '-D', action='store_true', help='Deep diff. In global tag diffs, if two tag names are different for the same record, it compares the tags themselves with a tag diff (different tags are probably similar in a global tag, e.g. two versions of a tag).')
2889     parser_diff.add_argument('--payload', '-p', action='store_true', help='TODO: Payload diff. In a tag diff or a --deep global tag diff, for each range where a payload is different, the payloads are compared via a diff on the dump of both payloads.')
2890     parser_diff.add_argument('--snapshot', '-T', default=None, help="Snapshot time. If provided, the output will represent the state of the IOVs inserted into database up to the given time. The format of the string must be one of the following: '2013-01-20', '2013-01-20 10:11:12' or '2013-01-20 10:11:12.123123'.")
2891     parser_diff.set_defaults(func=diff)
2892 
2893     parser_search = parser_subparsers.add_parser('search', description='Searches various types of objects matching a case-insensitive string: tags (by name, object type and description), payloads (by SHA1 hash), global tags (by name, release and description) and records (by name, label and object type). The returned list is limited, by default, to 10 per type of object.')
2894     parser_search.add_argument('string', help='Search string. Case-insensitive.')
2895     parser_search.add_argument('--regexp', '-r', action='store_true', help='Regexp mode. The search string is a regular expression.')
2896     parser_search.add_argument('--limit', '-L', type=int, default=100, help='Limit on the number of results per type of object. The returned results are the latest N inserted into the database.')
2897     parser_search.set_defaults(func=search)
2898 
2899     parser_copy = parser_subparsers.add_parser('copy', description='Copies objects between databases. For tags, their dependent payloads are copied automatically if they do not exist in the destination database yet (or skipped if they already do). For global tags, their dependent tags are copied automatically if they do not exist in the destination database yet. However, if they exist, a warning is printed (TODO: do not print the warning if they do not differ).')
2900     parser_copy.add_argument('first', help="Name of the first object (i.e. source, old). This can be a tag's name, a global tag's name or a payload's SHA1 hexadecimal hash (or a prefix if unique). It must exactly match -- if needed, use the search command first to look for it.")
2901     parser_copy.add_argument('second', nargs='?', default=None, help='Name of the second object (i.e. destination, new). Ditto. Default: same as the first object. (i.e. useful to keep the name when copying an object between databases). Note that for payloads the names must be equal (since it is the SHA1 hash of the data) -- therefore, when copying payloads you should omit this parameter to take the default (same name).')
2902     parser_copy.add_argument('--destdb', '-d', default=None, help='Database of the second object (destination database). Same values allowed as for --db. Default: same as the first database.')
2903     parser_copy.add_argument('--from', '-f', type=int, help='From IOV: copy only from this IOV onwards. Only valid when copying tags.')
2904     parser_copy.add_argument('--to', '-t', type=int, help='To IOV: copy only up to this IOV. Only valid when copying tags.')
2905     parser_copy.add_argument('--type', default=None, choices=['tag', 'gt', 'payload'], help='Type of the objects. Use it if there is ambiguity (should be really rare).')
2906     parser_copy.add_argument('--note','-n', help='Editing note.')
2907     parser_copy.add_argument('--override','-o', action='store_true', help='Override the existing iovs for the interval covered by the new iovs.') 
2908     parser_copy.add_argument('--snapshot','-s', help="Timestamp of the snapshot to consider for the source iovs. The format of the string must be one of the following: '2013-01-20', '2013-01-20 10:11:12' or '2013-01-20 10:11:12.123123'.")
2909     parser_copy.add_argument('--o2oTest', action='store_true', help='Special copy for o2o test. Copy the second to last iov of the source tag, to allow to run the o2o procedure to add the last iov. It cannot be executed with the from, to, ovveride and snapshot options.')
2910     parser_copy.add_argument('--synchronize',action='store_true',help='No effect, since the synchronization is applied by default for tags. The option is kept for backward compatibility') 
2911     parser_copy.add_argument('--nosynchro',action='store_true',help='For tags, disable the synchronization of the destination iovs. No effect for other object type copy') 
2912     parser_copy.add_argument('--toTimestamp',action='store_true',help='For tags, triggers the conversion from run-based iovs to timestamp-based iovs. It will return an error for any combination with input tag non run-based, and existing destination tag non timestamp-based. Not supported with synchronization.') 
2913     parser_copy.add_argument('--toRun',action='store_true',help='For tags, triggers the conversion from timestamp-based iovs to run-based iovs. When multiple timestamped IOVs are found matching the same run, only the first is considered. IOVs with timestamp not matching any run are skipped. It will return an error for any combination with input tag non timestamp-based, and existing destination tag non run-based. Not supported with synchronization.') 
2914     parser_copy.set_defaults(func=copy)
2915     parser_edit = parser_subparsers.add_parser('edit', description='Edits an object. Opens up your $EDITOR with prefilled text about the object. There you can modify the data. Save the file and quit the editor. The modified data will be written into the database. e.g. for a tag, its attributes and the list of IOVs/payloads appears and are modifiable.')
2916     parser_edit.add_argument('name', help="Name of the object. This can be a tag's name (edits its attributes and its IOVs/payloads), a global tag's name (edits its attributes and its mapping records <-> tags) or a payload's SHA1 hexadecimal hash (or a prefix if unique; TODO: edits its attributes). It must exactly match -- if needed, use the search command first to look for it.")
2917     parser_edit.add_argument('--header', default=False, action='store_true', help='Edit the header attributes of the object.')
2918     parser_edit.add_argument('--type', default=None, choices=['tag', 'gt', 'payload'], help='Type of the object. Use it if there is ambiguity (should be really rare).')
2919     parser_edit.set_defaults(func=edit)
2920 
2921     parser_delete = parser_subparsers.add_parser('delete', description='Deletes an object. Fails if the object is referenced somewhere else in the database.')
2922     parser_delete.add_argument('name', help="Name of the object. This can be a tag's name, a global tag's name or a payload's SHA1 hexadecimal hash (or a prefix if unique). It must exactly match -- if needed, use the search command first to look for it.")
2923     parser_delete.add_argument('--deep', '-D', action='store_true', help='TODO: Deep delete. In tag deletes, deletes its payloads (fails if they are used in other tags). In global tag deletes, deletes its tags (fails if they are used by another global tag).')
2924     parser_delete.add_argument('--type', default=None, choices=['tag', 'gt', 'payload'], help='Type of the object. Use it if there is ambiguity (should be really rare).')
2925     parser_delete.set_defaults(func=delete)
2926 
2927     parser_dump = parser_subparsers.add_parser('dump', description='Dumps deserialized payloads, using the current CMSSW release.')
2928     parser_dump.add_argument('name', help="Name of the object. This can be a payload's SHA1 hexadecimal hash (or a prefix if unique), a tag's name (all payloads referenced in the tag will be dumped) or a global tag's name (all payloads referenced in the global tag will be dumped).")
2929     parser_dump.add_argument('--loadonly', action='store_true', help='Load only: Do not dump, only load the (deserialize) payload in memory -- useful for testing the load of an entire global tag with the current CMSSW release.')
2930     parser_dump.add_argument('--type', default=None, choices=['payload', 'tag', 'gt'], help='Type of the object. Use it if there is ambiguity (should be really rare).')
2931     parser_dump.add_argument('--format', default="xml", choices=['xml', 'raw'], help='Output format. Choice between XML and raw hexdump.')
2932     parser_dump.add_argument('--destfile','-d',default=None,help="Destination file for the dump.")
2933     parser_dump.set_defaults(func=dump)
2934 
2935     parser_showFcsr =  parser_subparsers.add_parser('showFCSR', description='Dumps the FCSR values for hlt and pcl')
2936     parser_showFcsr.set_defaults(func=showFcsr_)
2937 
2938     parser_toLumi = parser_subparsers.add_parser('toLumi', description='Generates the Lumi timetype from run number and lumisection id') 
2939     parser_toLumi.add_argument('run', help="Run id")
2940     parser_toLumi.add_argument('lumi_id', help="Lumisection id")
2941     parser_toLumi.set_defaults(func=toLumi_)
2942 
2943     parser_fromLumi = parser_subparsers.add_parser('fromLumi', description='Decodes the Lumi timetype extracting run number and lumisection id') 
2944     parser_fromLumi.add_argument('lumiTime', help="The Lumi timetype value")
2945     parser_fromLumi.set_defaults(func=fromLumi_)
2946 
2947     parser_toTimestamp = parser_subparsers.add_parser('toTimestamp', description='Generates the Time timetype from the date string') 
2948     parser_toTimestamp.add_argument('datetime', help="The string representing the date time, in the format 'Y-m-d H:M:S.f'" )
2949     parser_toTimestamp.set_defaults(func=toTimestamp_)
2950 
2951     parser_fromTimestamp = parser_subparsers.add_parser('fromTimestamp', description='Decodes the Time timetype extracting the date time string') 
2952     parser_fromTimestamp.add_argument('timestamp', help="The Time timetype value")
2953     parser_fromTimestamp.set_defaults(func=fromTimestamp_)
2954 
2955     parser_showProtections = parser_subparsers.add_parser('showProtections', description='Display the access restrictions and permissions for the specified tag')
2956     parser_showProtections.add_argument('tag', help="The tag name")
2957     parser_showProtections.set_defaults(func=showProtections)
2958 
2959     parser_setProtection = parser_subparsers.add_parser('setProtection', description='Set an access restriction for the specified tag')
2960     parser_setProtection.add_argument('tag', help="The tag name")
2961     parser_setProtection.add_argument('--accesstype','-a', choices=[db_access_code_map[tag_db_write_access_code],db_access_code_map[tag_db_lock_access_code]], required=True, help='The access type of the protection (flag)')
2962     parser_setProtection.add_argument('--remove','-r',action='store_true', help='Remove the specified protection')
2963     parser_setProtection.set_defaults(func=setProtection)
2964 
2965     parser_setPermission = parser_subparsers.add_parser('setPermission', description='Set an access permission for the specified tag')
2966     parser_setPermission.add_argument('tag', help="The tag name")
2967     parser_setPermission.add_argument('--accesstype','-a', choices=[db_access_code_map[tag_db_write_access_code],db_access_code_map[tag_db_lock_access_code]], required=True, help='The access type of the permission (flag)')
2968     parser_setPermission.add_argument('--credential','-c', help='The credential entitled with the permission')
2969     parser_setPermission.add_argument('--credentialtype','-t', choices=[db_credential_type_map[db_key_credential_type_code][1],db_credential_type_map[db_cmsuser_credential_type_code][1]],help='The type of the credential provided. Required for "credential" option')
2970     parser_setPermission.add_argument('--remove','-r',action='store_true', help='Remove the specified permission')
2971     parser_setPermission.set_defaults(func=setPermission)
2972 
2973     parser_query = parser_subparsers.add_parser('query', description='Query the database to get information about condition metadata')
2974     parser_query.add_argument('unique_key', help="The unique key for the query")
2975     parser_query.add_argument('--tag','-t',action='store_true', help="Query a tag object")
2976     parser_query.add_argument('--global_tag','-gt',action='store_true', help="Query a global tag object")
2977     parser_query.add_argument('--payload','-p',action='store_true', help="Query a payload object")
2978     parser_query.set_defaults(func=query_object)
2979 
2980     args = parser.parse_args()
2981     logging.basicConfig(
2982         format = '[%(asctime)s] %(levelname)s: %(message)s',
2983         level = logging.DEBUG if args.verbose is not None and args.verbose >= 1 else logging.INFO,
2984     )
2985 
2986     colors = Colors(args)
2987 
2988     if args.noLimit:
2989         args.limit = None
2990         logging.info("noLimit specified, setting limit to %s" % str(args.limit))
2991 
2992     if args.verbose is not None and args.verbose >= 1:
2993         # Include the traceback
2994         return args.func(args)
2995     else:
2996         # Only one error line
2997         try:
2998             sys.exit(args.func(args))
2999         except Exception as e:
3000             logging.error(e)
3001             sys.exit(1)
3002 
3003 
3004 if __name__ == '__main__':
3005     main()
3006