Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2025-06-03 00:11:45

0001 #!/usr/bin/env python3
0002 import argparse
0003 import bisect
0004 import datetime
0005 import sys
0006 import logging
0007 import sqlalchemy
0008 import copy
0009 import h5py
0010 import numpy as np
0011 import multiprocessing as mp
0012 from collections import OrderedDict
0013 
0014 from CondCore.CondHDF5ESSource.hdf5Writer import writeH5File
0015 import CondCore.Utilities.conddblib as conddb
0016 
0017 #Global tags hold a list of Tags
0018 # Tags give the
0019 #      record name,
0020 #      list of data products
0021 #      list of IOVs
0022 #      list of payloads per IOV
0023 # Payloads give
0024 #      a payload name and
0025 #      the serialized data for a data product
0026 #      the type of data for the data product
0027 #
0028 
0029 
0030 
0031 #from conddb
0032 def _inserted_before(_IOV,timestamp):
0033     '''To be used inside filter().
0034     '''
0035 
0036     if timestamp is None:
0037         # XXX: Returning None does not get optimized (skipped) by SQLAlchemy,
0038         #      and returning True does not work in Oracle (generates "and 1"
0039         #      which breaks Oracle but not SQLite). For the moment just use
0040         #      this dummy condition.
0041         return sqlalchemy.literal(True) == sqlalchemy.literal(True)
0042 
0043     return _IOV.insertion_time <= _parse_timestamp(timestamp)
0044 
0045 def _parse_timestamp(timestamp):
0046     try:
0047         return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
0048     except ValueError:
0049         pass
0050 
0051     try:
0052         return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')
0053     except ValueError:
0054         pass
0055 
0056     try:
0057         return datetime.datetime.strptime(timestamp, '%Y-%m-%d')
0058     except ValueError:
0059         pass
0060 
0061     raise Exception("Could not parse timestamp '%s'" % timestamp)
0062 
0063 def _exists(session, primary_key, value):
0064     ret = None
0065     try: 
0066         ret = session.query(primary_key).\
0067             filter(primary_key == value).\
0068             count() != 0
0069     except sqlalchemy.exc.OperationalError:
0070         pass
0071 
0072     return ret
0073 
0074 def _connect(db, init, read_only, args, as_admin=False):
0075 
0076     logging.debug('Preparing connection to %s ...', db)
0077 
0078     url = conddb.make_url( db, read_only)
0079     pretty_url = url
0080     if url.drivername == 'oracle+frontier':
0081         ws = url.host.rsplit('%2F')
0082         if ws is not None:
0083             pretty_url = 'frontier://%s/%s' %(ws[-1],url.database)
0084     connTo = '%s [%s]' %(db,pretty_url)
0085     logging.info('Connecting to %s', connTo)
0086     logging.debug('DB url: %s',url)
0087     verbose= 0 
0088     if args.verbose is not None:
0089        verbose = args.verbose - 1
0090     connection = conddb.connect(url, args.authPath, verbose, as_admin)
0091 
0092 
0093     if not read_only:
0094         if connection.is_read_only:
0095             raise Exception('Impossible to edit a read-only database.')
0096 
0097         if connection.is_official:
0098             if args.force:
0099                 if not args.yes:
0100                     logging.warning('You are going to edit an official database. If you are not one of the Offline DB experts but have access to the password for other reasons, please stop now.')
0101             else:
0102                 raise Exception('Editing official databases is forbidden. Use the official DropBox to upload conditions. If you need a special intervention on the database, see the contact help: %s' % conddb.contact_help)
0103         # for sqlite we trigger the implicit schema creation
0104         if url.drivername == 'sqlite':
0105             if init:
0106                 connection.init()
0107     if not connection._is_valid:
0108         raise Exception('No valid schema found in the database.')
0109 
0110     return connection
0111 
0112 
0113 def connect(args, init=False, read_only=True, as_admin=False):
0114     args.force = args.force if 'force' in dir(args) else False
0115 
0116     if 'destdb' in args:
0117         if args.destdb is None:
0118             args.destdb = args.db
0119         if args.db == args.destdb:
0120             conn1 = _connect(args.destdb, init, read_only, args)
0121             return conn1, conn1
0122         conn1 = _connect( args.db, init, True, args)
0123         conn2url = conddb.make_url(args.destdb, False)
0124         if conn2url.drivername == 'sqlite' and not os.path.exists(args.destdb): 
0125             init = True
0126         conn2 = _connect(args.destdb, init, False, args)
0127         return conn1, conn2
0128 
0129     return _connect( args.db, init, read_only, args, as_admin)
0130 
0131 
0132 def _high(n):
0133     return int(n) >> 32
0134 
0135 def _low(n):
0136     return int(n) & 0xffffffff
0137 
0138 #end from conddb
0139 
0140 #based on conddb._dump_payload
0141 def get_payloads_objtype_data(session, payloads):
0142 
0143     Payload = session.get_dbtype(conddb.Payload)
0144     table = session.query(Payload.hash, Payload.object_type, Payload.data).\
0145         filter(Payload.hash.in_(payloads)).order_by(Payload.hash).all()
0146     return table
0147 
0148 def external_process_get_payloads_objtype_data(queue, args, payloads):
0149     connection = connect(args)
0150     session = connection.session()
0151     queue.put(get_payloads_objtype_data(session, payloads))
0152 #local
0153 
0154 class IOVSyncValue(object):
0155     def __init__(self, high, low):
0156         self.high = high
0157         self.low = low
0158 
0159 class DBPayloadIterator(object):
0160     def __init__(self, args, payloads):
0161         self._args = args
0162         self._payloadHashs = payloads
0163         self._payloadCache = {}
0164         self._payloadHashsIndex = 0
0165         self._cacheChunking = 1
0166         self._safeChunkingSize = 1
0167         self._nextIndex = 0
0168     def __iter__(self):
0169         return self
0170     def __next__(self):
0171         if self._nextIndex >= len(self._payloadHashs):
0172             raise StopIteration()
0173         payloadHash = self._payloadHashs[self._nextIndex]
0174         if not self._payloadCache:
0175             self._cacheChunking = self._safeChunkingSize
0176             queue = mp.Queue()
0177             p=mp.Process(target=external_process_get_payloads_objtype_data, args=(queue, self._args, self._payloadHashs[self._payloadHashsIndex:self._payloadHashsIndex+self._cacheChunking]))
0178             p.start()
0179             table = queue.get()
0180             p.join()
0181             #table = get_payloads_objtype_data(session, payloadHashs[payloadHashsIndex:payloadHashsIndex+cacheChunking])
0182             #print(table)
0183             self._payloadHashsIndex +=self._cacheChunking
0184             for r in table:
0185                 self._payloadCache[r[0]] = (r[1],r[2])
0186         objtype,data = self._payloadCache[payloadHash]
0187         if len(data) < 1000000:
0188             self._safeChunkingSize = 10
0189         del self._payloadCache[payloadHash]
0190         self._nextIndex +=1
0191         return DBPayload(payloadHash, canonicalProductName(objtype.encode("ascii")), data)
0192 
0193 
0194 class DBPayload(object):
0195     def __init__(self,hash_, type_, data):
0196         self._hash = hash_
0197         self._type = type_
0198         self._data = data
0199     def name(self):
0200         return self._hash
0201     def actualType(self):
0202         return self._type
0203     def data(self):
0204         return self._data
0205 
0206 class DBDataProduct(object):
0207     def __init__(self, ctype, label, payloadHashes, args):
0208         self._type = ctype
0209         self._label = label
0210         self._payloadHashs = payloadHashes
0211         self._args = args
0212 
0213     def name(self):
0214         return self._type +"@"+self._label
0215     def objtype(self):
0216         return self._type
0217     def payloads(self):
0218         return DBPayloadIterator(self._args, self._payloadHashs)
0219 
0220 class DBTag(object):
0221     def __init__(self, session, args, record, productNtags):
0222         self._session = session
0223         self._args = args
0224         self._snapshot = args.snapshot
0225         self._record = record
0226         self._productLabels = [x[0] for x in productNtags]
0227         self._dbtags = [x[1] for x in productNtags]
0228         self._type = None
0229         self._iovsNPayloads = None
0230         self._time_type = None
0231     def record(self):
0232         return self._record
0233     def name(self):
0234         if len(self._dbtags) == 1:
0235             return self._dbtags[0]
0236         return self._dbtags[0]+"@joined"
0237     def __type(self):
0238         if self._type is None:
0239             self._type = recordToType(self._record)
0240         return self._type
0241     def time_type(self):
0242         if self._time_type is None:
0243             self.iovsNPayloadNames()
0244         return timeTypeName(self._time_type)
0245     def originalTagNames(self):
0246         return self._dbtags
0247     def iovsNPayloadNames(self):
0248         if self._iovsNPayloads is None:
0249             finalIOV = []
0250             for tag in self._dbtags:
0251                 time_type, iovAndPayload = tagInfo(self._session, tag, self._snapshot)
0252                 self._time_type = time_type
0253                 if not finalIOV:
0254                     finalIOV = [ [i[0],[i[1]]] for i in iovAndPayload]
0255                 else:
0256                     finalIOV = mergeIOVs(finalIOV, iovAndPayload)
0257 
0258             firstValues, lastValues = sinceToIOV( (x[0] for x in finalIOV), time_type)
0259             if self._args.run is None:
0260                 # include all IOVs
0261                 self._iovsNPayloads = list(zip((IOVSyncValue(x[0],x[1]) for x in firstValues), (IOVSyncValue(x[0], x[1]) for x in lastValues), (x[1] for x in finalIOV)))
0262             else:
0263                 # include only the IOVs that contain the given run
0264                 if time_type == conddb.TimeType.Time.value:
0265                     # time-based IOVs
0266                     # TODO map run number to time with "conddb --noLimit listRuns", including a few minutes of tolerance
0267                     self._iovsNPayloads = list(zip((IOVSyncValue(x[0],x[1]) for x in firstValues), (IOVSyncValue(x[0], x[1]) for x in lastValues), (x[1] for x in finalIOV)))
0268                 else:
0269                     # run and lumi-based IOVs
0270                     firstRunLumi = (self._args.run, 1)
0271                     firstIndex = bisect.bisect(firstValues, firstRunLumi) - 1
0272                     lastRunLumi = (self._args.run, 0xffffffff)
0273                     lastIndex = bisect.bisect(lastValues, lastRunLumi) + 1
0274                     self._iovsNPayloads = list(zip((IOVSyncValue(x[0],x[1]) for x in firstValues[firstIndex:lastIndex]), (IOVSyncValue(x[0], x[1]) for x in lastValues[firstIndex:lastIndex]), (x[1] for x in finalIOV[firstIndex:lastIndex])))
0275             self._session.flush()
0276             self._session.commit()
0277         return self._iovsNPayloads
0278 
0279     def dataProducts(self):
0280         t = self.__type()
0281         iovs = self.iovsNPayloadNames()
0282         payloadForProducts = []
0283         for p in self._productLabels:
0284             payloadForProducts.append(OrderedDict())
0285         for first,last,payloads in iovs:
0286             for i,p in enumerate(payloads):
0287                 if p is not None:
0288                     payloadForProducts[i][p]=None
0289         return [DBDataProduct(t,v,list(payloadForProducts[i]), self._args) for i,v in enumerate(self._productLabels)]
0290 
0291 class DBGlobalTag(object):
0292     def __init__(self, args, session, name):
0293         self._session = session
0294         self._args = args
0295         self._snapshot = args.snapshot
0296         self._name = name
0297         self._tags = []
0298         gt = globalTagInfo(session,name)
0299         lastRcd = None
0300         tags = []
0301         for rcd, label, tag in gt:
0302             if rcd != lastRcd:
0303                 if lastRcd is not None:
0304                     self._tags.append(DBTag(session,args, lastRcd,tags))
0305                 lastRcd = rcd
0306                 tags = []
0307             tags.append((label,tag))
0308         if lastRcd is not None:
0309             self._tags.append(DBTag(session,args, lastRcd, tags))
0310     def tags(self):
0311         return self._tags
0312             
0313 def timeTypeName(time_type):
0314     if time_type == conddb.TimeType.Time.value:
0315         return 'time'
0316     if time_type == conddb.TimeType.Run.value or time_type == conddb.TimeType.Lumi.value:
0317         return 'run_lumi'
0318     raise RuntimeError("unknown since time %s:"% str(time_type))
0319                        
0320         
0321 
0322 def parseSince(time_type, since):
0323     if time_type == conddb.TimeType.Time.value:
0324         return (_high(since), _low(since))
0325     if time_type == conddb.TimeType.Run.value:
0326         return (_high(since), 0)
0327     if time_type == conddb.TimeType.Lumi.value:
0328         return (_high(since), _low(since))
0329 
0330 def previousSyncValue(syncValue):
0331     if syncValue[1] == 0:
0332         return (syncValue[0]-1, 0xffffffff)
0333     return (syncValue[0], syncValue[1]-1)
0334     
0335 def sinceToIOV(sinceList, time_type):
0336     firstValues = []
0337     lastValues = []
0338     for since in sinceList:
0339         syncValue = parseSince(time_type, since)
0340         firstValues.append(syncValue)
0341         if len(firstValues) != 1:
0342             lastValues.append(previousSyncValue(syncValue))
0343     lastValues.append((0xFFFFFFFF,0xFFFFFFFF))
0344     return [firstValues,lastValues]
0345     
0346 def globalTagInfo(session,name):
0347     GlobalTag = session.get_dbtype(conddb.GlobalTag)
0348     GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
0349     try:
0350         is_global_tag = _exists(session, GlobalTag.name, name)
0351         if is_global_tag:
0352             return session.query(GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name).\
0353                 filter(GlobalTagMap.global_tag_name == name).\
0354                 order_by(GlobalTagMap.record, GlobalTagMap.label).\
0355                 all()
0356     except sqlalchemy.exc.OperationalError:
0357         sys.stderr.write("No table for GlobalTags found in DB.\n\n")
0358     return None
0359 
0360 def tagInfo(session, name, snapshot):
0361     Tag = session.get_dbtype(conddb.Tag)
0362     IOV = session.get_dbtype(conddb.IOV)
0363     is_tag = _exists(session, Tag.name, name)
0364     if is_tag:
0365         time_type = session.query(Tag.time_type).\
0366             filter(Tag.name == name).\
0367             scalar()
0368             
0369         rawTagInfo = session.query(IOV.since, IOV.insertion_time, IOV.payload_hash).\
0370                      filter(
0371                          IOV.tag_name == name,
0372                          _inserted_before(IOV,snapshot),
0373                      ).\
0374                      order_by(IOV.since.desc(), IOV.insertion_time.desc()).\
0375                     from_self().\
0376                      order_by(IOV.since, IOV.insertion_time).\
0377                      all()
0378         filteredTagInfo = []
0379         lastSince = -1
0380         for since,insertion,payload in rawTagInfo:
0381             if lastSince == since:
0382                 continue
0383             lastSince = since
0384             if time_type == conddb.TimeType.Run.value:
0385                 #need to make Run and RunLumi directly comparable since some records
0386                 # use a mix of the two for their IOVs
0387                 since = int(since) << 32
0388             filteredTagInfo.append((since,payload))
0389 
0390         if time_type == conddb.TimeType.Run.value:
0391             time_type = conddb.TimeType.Lumi.value
0392 
0393         return time_type, filteredTagInfo
0394 #                     [sinceLabel, 'Insertion Time', 'Payload', 'Object Type'],
0395 #                     filters = [_since_filter(time_type), None, None, None],
0396 #        )
0397    
0398 def _checkMerge(previousIOV, newIOV, debugCopy, nExistingDataProducts):
0399     #sanity check
0400     #check proper number of entries
0401     previousSince = -1
0402     for i,e in enumerate(previousIOV):
0403         if len(e[1]) != nExistingDataProducts+1:
0404             raise RuntimeError("entry %i has wrong number of elements %i instead of %i"%(i,len(e[1]),nExistingDataProducts+1))
0405         if previousSince >= e[0]:
0406             #print(previousIOV,newIOV)
0407             raise RuntimeError("IOV not in order for index %i"%i)
0408         previousSince = e[0]
0409 
0410     previousIndex = 0
0411     debugIndex =0
0412     while debugIndex < len(debugCopy) and previousIndex < len(previousIOV):
0413         previousSince = previousIOV[previousIndex][0]
0414         debugSince = debugCopy[debugIndex][0]
0415         #print("debugSince: %i, prevSince: %i"%(debugSince,previousSince))
0416         #print(debugCopy)
0417         #print(previousIOV)
0418         if debugSince != previousSince:
0419             previousIndex +=1
0420             continue
0421         if debugCopy[debugIndex][1] != previousIOV[previousIndex][1][:nExistingDataProducts]:
0422             raise RuntimeError("packaged were not properly copied for index %i original:%s new:%s"%(debugIndex,",".join(debugCopy[debugIndex][1]),",".join(previousIOV[previousIndex][1][:nExistingDataProducts])))
0423         debugIndex +=1
0424         previousIndex +=1
0425     if debugIndex != len(debugCopy):
0426         raise RuntimeError("failed to copy forward index %i"%debugIndex)
0427     newIndex = 0
0428     previousIndex = 0
0429     while newIndex < len(newIOV) and previousIndex < len(previousIOV):
0430         previousSince = previousIOV[previousIndex][0]
0431         newSince = newIOV[newIndex][0]
0432         if newSince != previousSince:
0433             previousIndex +=1
0434             continue
0435         if previousIOV[previousIndex][1][-1] != newIOV[newIndex][1]:
0436             raise RuntimeError("failed to append package at index %i"%newIndex)
0437         previousIndex +=1
0438         newIndex +=1
0439     if newIndex != len(newIOV):
0440         raise RuntimeError("failed to merge IOV entry %i"%newIndex)
0441     
0442 
0443 def mergeIOVs(previousIOV, newIOV):
0444     debugCopy = copy.deepcopy(previousIOV)
0445     previousSize = len(previousIOV)
0446     newSize = len(newIOV)
0447     previousIndex = 0
0448     newIndex =0
0449     nExistingDataProducts = len(previousIOV[0][1])
0450     while newIndex < newSize and previousIndex < previousSize:
0451 
0452         previousSince = previousIOV[previousIndex][0]
0453         newSince = newIOV[newIndex][0]
0454         if previousSince == newSince:
0455             previousIOV[previousIndex][1].append(newIOV[newIndex][1])
0456             newIndex +=1
0457             previousIndex +=1
0458             continue
0459         elif newSince < previousSince:
0460             if previousIndex == 0:
0461                 payloads = [None]*nExistingDataProducts
0462                 payloads.append(newIOV[newIndex][1])
0463                 previousIOV.insert(0,[newSince,payloads])
0464             else:
0465                 payloads = previousIOV[previousIndex-1][1][:nExistingDataProducts]
0466                 payloads.append(newIOV[newIndex][1])
0467                 previousIOV.insert(previousIndex,[newSince,payloads])
0468             newIndex +=1
0469             previousIndex +=1
0470             previousSize +=1
0471         elif newSince > previousSince:
0472             if newIndex == 0:
0473                 previousIOV[previousIndex][1].append(None)
0474             else:
0475                 if len(previousIOV[previousIndex][1]) == nExistingDataProducts:
0476                     previousIOV[previousIndex][1].append(newIOV[newIndex-1][1])
0477             previousIndex +=1
0478     if newIndex != newSize:
0479         #print("NEED TO EXTEND")
0480         #need to append to end
0481         previousPayloads = previousIOV[-1][1]
0482         while newIndex != newSize:
0483             newPayloads = previousPayloads[:]
0484             newPayloads[nExistingDataProducts] = newIOV[newIndex][1]
0485             previousIOV.append([newIOV[newIndex][0], newPayloads])
0486             newIndex +=1
0487     if previousIndex != previousSize:
0488         #need to add new item to all remaining entries
0489         while previousIndex < previousSize:
0490             previousIOV[previousIndex][1].append(newIOV[-1][1])
0491             previousIndex +=1
0492     _checkMerge(previousIOV, newIOV, debugCopy, nExistingDataProducts)
0493     return previousIOV
0494 
0495 def writeTagImpl(tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames):
0496     tagGroup = tagsGroup.create_group(name)
0497     tagGroup.attrs["time_type"] = time_type.encode("ascii") #timeTypeName(time_type).encode("ascii")
0498     tagGroup.attrs["db_tags"] = [x.encode("ascii") for x in originalTagNames]
0499     tagGroup.attrs["record"] = recName.encode("ascii")
0500     firstValues = [x[0] for x in IOV_payloads]
0501     lastValues = [x[1] for x in IOV_payloads]
0502     syncValueType = np.dtype([("high", np.uint32),("low", np.uint32)])
0503     first_np = np.empty(shape=(len(IOV_payloads),), dtype=syncValueType)
0504     first_np['high'] = [ x.high for x in firstValues]
0505     first_np['low'] = [ x.low for x in firstValues]
0506     last_np = np.empty(shape=(len(lastValues),), dtype=syncValueType)
0507     last_np['high'] = [ x.high for x in lastValues]
0508     last_np['low'] = [ x.low for x in lastValues]
0509     #tagGroup.create_dataset("first",data=np.array(firstValues), dtype=syncValueType)
0510     #tagGroup.create_dataset("last", data=np.array(lastValues),dtype=syncValueType)
0511     payloads = [ [ payloadToRefs[y] for y in x[2]] for x in IOV_payloads]
0512     compressor = None
0513     if len(first_np) > 100:
0514         compressor = 'gzip'
0515     tagGroup.create_dataset("first",data=first_np, compression = compressor)
0516     tagGroup.create_dataset("last",data=last_np, compression = compressor)
0517     tagGroup.create_dataset("payload", data=payloads, dtype=h5py.ref_dtype, compression = compressor)
0518     return tagGroup.ref
0519 
0520     
0521 def writeTag(tagsGroup, time_type, IOV_payloads, payloadToRefs, originalTagNames, recName):
0522     name = originalTagNames[0]
0523     if len(originalTagNames) != 1:
0524         name = name+"@joined"
0525     return writeTagImpl(tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames)
0526     
0527 
0528 def recordToType(record):
0529     import subprocess
0530     return subprocess.run(["condRecordToDataProduct",record], capture_output = True, check=True, text=True).stdout
0531 
0532 __typedefs = {b"ESCondObjectContainer<ESPedestal>":"ESPedestals",
0533               b"ESCondObjectContainer<float>":"ESFloatCondObjectContainer",
0534               b"ESCondObjectContainer<ESChannelStatusCode>":"ESChannelStatus",
0535               b"EcalCondObjectContainer<EcalPedestal>":"EcalPedestals",
0536               b"EcalCondObjectContainer<EcalXtalGroupId>":"EcalWeightXtalGroups",
0537               b"EcalCondObjectContainer<EcalMGPAGainRatio>":"EcalGainRatios",
0538               b"EcalCondObjectContainer<float>":"EcalFloatCondObjectContainer",
0539               b"EcalCondObjectContainer<EcalChannelStatusCode>":"EcalChannelStatus",
0540               b"EcalCondObjectContainer<EcalMappingElement>":"EcalMappingElectronics",
0541               b"EcalCondObjectContainer<EcalTPGPedestal>":"EcalTPGPedestals",
0542               b"EcalCondObjectContainer<EcalTPGLinearizationConstant>":"EcalTPGLinearizationConst",
0543               b"EcalCondObjectContainer<EcalTPGCrystalStatusCode>":"EcalTPGCrystalStatus",
0544               b"EcalCondTowerObjectContainer<EcalChannelStatusCode>":"EcalDCSTowerStatus",
0545               b"EcalCondTowerObjectContainer<EcalDAQStatusCode>":"EcalDAQTowerStatus",
0546               b"EcalCondObjectContainer<EcalDQMStatusCode>":"EcalDQMChannelStatus",
0547               b"EcalCondTowerObjectContainer<EcalDQMStatusCode>":"EcalDQMTowerStatus",
0548               b"EcalCondObjectContainer<EcalPulseShape>":"EcalPulseShapes",
0549               b"EcalCondObjectContainer<EcalPulseCovariance>":"EcalPulseCovariances",
0550               b"EcalCondObjectContainer<EcalPulseSymmCovariance>":"EcalPulseSymmCovariances",
0551               b"HcalItemCollById<HFPhase1PMTData>": "HFPhase1PMTParams",
0552               b"l1t::CaloParams":"CaloParams",
0553               b"StorableDoubleMap<AbsOOTPileupCorrection>":"OOTPileupCorrectionMapColl",
0554               b"PhysicsTools::Calibration::Histogram3D<double,double,double,double>":"PhysicsTools::Calibration::HistogramD3D",
0555               b"PhysicsTools::Calibration::MVAComputerContainer":"MVAComputerContainer"
0556 }
0557 def canonicalProductName(product):
0558     return __typedefs.get(product,product)
0559 
0560 def main():
0561     parser = argparse.ArgumentParser(description='Read from CMS Condition DB and write to HDF5 file')
0562     parser.add_argument('--db', '-d', default='pro', help='Database to run the command on. Run the help subcommand for more information: conddb help')
0563     parser.add_argument('name', nargs='+', help="Name of the global tag.")
0564     parser.add_argument('--verbose', '-v', action='count', help='Verbosity level. -v prints debugging information of this tool, like tracebacks in case of errors. -vv prints, in addition, all SQL statements issued. -vvv prints, in addition, all results returned by queries.')
0565     parser.add_argument('--authPath','-a', default=None, help='Path of the authentication .netrc file. Default: the content of the COND_AUTH_PATH environment variable, when specified.')
0566     parser.add_argument('--run', '-r', default=None, type=int, help='Include only the payloads with an IOV that is part of the given run.')
0567     parser.add_argument('--snapshot', '-T', default=None, help="Snapshot time. If provided, the output will represent the state of the IOVs inserted into database up to the given time. The format of the string must be one of the following: '2013-01-20', '2013-01-20 10:11:12' or '2013-01-20 10:11:12.123123'.")
0568     parser.add_argument('--exclude', '-e', nargs='*', help = 'list of records to exclude from the file (can not be used with --include)')
0569     parser.add_argument('--include', '-i', nargs='*', help = 'lost of the only records that should be included in the file (can not be used with --exclude')
0570     parser.add_argument('--output', '-o', default='test.h5cond', help='name of hdf5 output file to write')
0571     parser.add_argument('--compressor', '-c', default='zlib', choices =['zlib','lzma','none'], help="compress data using 'zlib', 'lzma' or 'none'")    
0572     args = parser.parse_args()
0573 
0574     if args.exclude and args.include:
0575         print("Can not use --exclude and --include at the same time")
0576         exit(-1)
0577 
0578     connection = connect(args)
0579     session = connection.session()
0580 
0581     excludeRecords = set()
0582     if args.exclude:
0583         excludeRecords = set(args.exclude)
0584     includeRecords = set()
0585     if args.include:
0586         includeRecords = set(args.include)
0587 
0588     writeH5File(args.output, args.name, excludeRecords, includeRecords, lambda x: DBGlobalTag(args, session, x), args.compressor)
0589     
0590 if __name__ == '__main__':
0591     main()