Warning, /HLTrigger/Configuration/scripts/utils/hltMenuContentToCSVs is written in an unsupported language. File is not indexed.
0001 #!/usr/bin/env python3
0002 import os
0003 import csv
0004 import json
0005 import argparse
0006 import subprocess
0007
0008 import FWCore.ParameterSet.Config as cms
0009 import HLTrigger.Configuration.Tools.options as options
0010 from HLTrigger.Configuration.extend_argparse import *
0011
0012 def MKDIRP(dirpath, verbose=False, dry_run=False):
0013 if verbose:
0014 print('\033[1m'+'>'+'\033[0m'+' os.mkdirs("'+dirpath+'")')
0015 if dry_run:
0016 return
0017 try:
0018 os.makedirs(dirpath)
0019 except OSError:
0020 if not os.path.isdir(dirpath):
0021 raise
0022
0023 def colored_text(txt, keys=[]):
0024 _tmp_out = ''
0025 for _i_tmp in keys:
0026 _tmp_out += '\033['+_i_tmp+'m'
0027 _tmp_out += txt
0028 if len(keys) > 0:
0029 _tmp_out += '\033[0m'
0030 return _tmp_out
0031
0032 def getHLTProcess(config):
0033 if config.menu.run:
0034 configline = f'--runNumber {config.menu.run}'
0035 else:
0036 configline = f'--{config.menu.database} --{config.menu.version} --configName {config.menu.name}'
0037
0038 # cmd to download HLT configuration
0039 cmdline = f'hltConfigFromDB {configline} --noedsources --noes --nooutput'
0040 if config.proxy:
0041 cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}'
0042
0043 # download HLT configuration
0044 proc = subprocess.Popen(cmdline, shell = True, stdin = None, stdout = subprocess.PIPE, stderr = None)
0045 (out, err) = proc.communicate()
0046
0047 # load HLT configuration
0048 try:
0049 foo = {'process': None}
0050 exec(out, foo)
0051 process = foo['process']
0052 except:
0053 raise Exception(f'query did not return a valid python file:\n query="{cmdline}"')
0054
0055 if not isinstance(process, cms.Process):
0056 raise Exception(f'query did not return a valid HLT menu:\n query="{cmdline}"')
0057
0058 return process
0059
0060 def getPrescaleTableLines(process, pathNames):
0061 ret = []
0062 if hasattr(process, 'PrescaleService'):
0063 ret += [['Path']+process.PrescaleService.lvl1Labels]
0064 ncols = len(process.PrescaleService.lvl1Labels)
0065 psDict = {pset_i.pathName.value():pset_i.prescales for pset_i in process.PrescaleService.prescaleTable}
0066 for pathName in pathNames:
0067 if pathName not in process.paths_():
0068 raise SystemExit(f'getPrescaleTableLines: {pathName}')
0069 psvals = psDict[pathName] if pathName in psDict else [1]*ncols
0070 ret += [[pathName]+[str(psval_i) for psval_i in psvals]]
0071 return ret
0072
0073 def getPrescale(process, pathName, psColumnName):
0074 ret = ''
0075 if not hasattr(process, 'PrescaleService'):
0076 return ret
0077 psColIndex = -1
0078 for psColIdx_i, psColName_i in enumerate(process.PrescaleService.lvl1Labels):
0079 if psColName_i == psColumnName:
0080 psColIndex = psColIdx_i
0081 if psColIndex < 0:
0082 return ret
0083 ret = '1'
0084 for pset_i in process.PrescaleService.prescaleTable:
0085 if pathName == pset_i.pathName:
0086 ret = f'{pset_i.prescales[psColIndex]}'
0087 break
0088 return ret
0089
0090 def getDatasets(process, pathName):
0091 # format: "PD1 (smartPSinPD1), PD2 (smartPSinPD2), .."
0092 ret = []
0093 datasets = [dataset_i for dataset_i in process.datasets.parameterNames_() \
0094 if pathName in process.datasets.getParameter(dataset_i)]
0095 for dataset_i in datasets:
0096 datasetLabel = dataset_i
0097 # if the DatasetPath exists, add value of smart-prescale
0098 if hasattr(process, 'Dataset_'+dataset_i):
0099 datasetPath_i = getattr(process, 'Dataset_'+dataset_i)
0100 if isinstance(datasetPath_i, cms.Path):
0101 for modName in datasetPath_i.moduleNames():
0102 module = getattr(process, modName)
0103 if module.type_() == 'TriggerResultsFilter':
0104 if hasattr(module, 'triggerConditions'):
0105 for trigCond_j in module.triggerConditions:
0106 trigCond_j_split = trigCond_j.split(' / ')
0107 if trigCond_j_split[0] == pathName and len(trigCond_j_split) > 1:
0108 datasetLabel += f'({trigCond_j_split[1]})'
0109 ret += [datasetLabel]
0110 return ', '.join(ret)
0111
0112 def getStreams(process, pathName):
0113 # format: "Stream1, Stream2, .."
0114 datasets = [dataset_i for dataset_i in process.datasets.parameterNames_() \
0115 if pathName in process.datasets.getParameter(dataset_i)]
0116 streams = [stream_i for stream_i in process.streams.parameterNames_() \
0117 for dataset_i in datasets if dataset_i in process.streams.getParameter(stream_i)]
0118 return ', '.join(streams)
0119
0120 def getL1TSeed(process, pathName):
0121 ret = ''
0122 path = process.paths_()[pathName]
0123 minIdx = None
0124 for modName in path.moduleNames():
0125 module = getattr(process, modName)
0126 try: modIdx = path.index(module)
0127 except: continue
0128 if module.type_() == 'HLTL1TSeed':
0129 if hasattr(module, 'L1SeedsLogicalExpression'):
0130 if minIdx == None or modIdx < minIdx:
0131 ret = module.L1SeedsLogicalExpression.value()
0132 minIdx = modIdx
0133 return ret
0134
0135 def getDatasetStreamDict(process):
0136 # key: "Dataset", value: list of "Streams"
0137 ret = {}
0138 for dataset_i in process.datasets.parameterNames_():
0139 ret[dataset_i] = []
0140 for stream_i in process.streams.parameterNames_():
0141 if dataset_i in process.streams.getParameter(stream_i):
0142 ret[dataset_i].append(stream_i)
0143 ret[dataset_i] = sorted(list(set(ret[dataset_i])))
0144 return ret
0145
0146 def create_csv(outputFilePath, delimiter, lines):
0147 # create output directory
0148 MKDIRP(os.path.dirname(outputFilePath))
0149 # write .csv file
0150 with open(outputFilePath, 'w') as csvfile:
0151 outf = csv.writer(csvfile, delimiter=delimiter)
0152 for line_i in lines:
0153 outf.writerow(line_i)
0154 print(colored_text(outputFilePath, ['1']))
0155
0156 def main():
0157 # define an argparse parser to parse our options
0158 textwidth = int( 80 )
0159 try:
0160 textwidth = int( os.popen("stty size", "r").read().split()[1] )
0161 except:
0162 pass
0163 formatter = FixedWidthFormatter( HelpFormatterRespectNewlines, width = textwidth )
0164
0165 # read defaults
0166 defaults = options.HLTProcessOptions()
0167
0168 parser = argparse.ArgumentParser(
0169 description = 'Create outputs to announce the release of a new HLT menu.',
0170 argument_default = argparse.SUPPRESS,
0171 formatter_class = formatter,
0172 add_help = False
0173 )
0174
0175 # required argument
0176 parser.add_argument('menu',
0177 action = 'store',
0178 type = options.ConnectionHLTMenu,
0179 metavar = 'MENU',
0180 help = 'HLT menu to dump from the database. Supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' )
0181
0182 # options
0183 parser.add_argument('--dbproxy',
0184 dest = 'proxy',
0185 action = 'store_true',
0186 default = defaults.proxy,
0187 help = 'Use a socks proxy to connect outside CERN network (default: False)' )
0188 parser.add_argument('--dbproxyport',
0189 dest = 'proxy_port',
0190 action = 'store',
0191 metavar = 'PROXYPORT',
0192 default = defaults.proxy_port,
0193 help = 'Port of the socks proxy (default: 8080)' )
0194 parser.add_argument('--dbproxyhost',
0195 dest = 'proxy_host',
0196 action = 'store',
0197 metavar = 'PROXYHOST',
0198 default = defaults.proxy_host,
0199 help = 'Host of the socks proxy (default: "localhost")' )
0200
0201 parser.add_argument('--prescale-column',
0202 dest = 'prescale_column',
0203 action = 'store',
0204 default = '2p0E34',
0205 help = 'Name of main prescale column (default: "2p0E34")' )
0206
0207 parser.add_argument('--csv-delimiter',
0208 dest = 'csv_delimiter',
0209 action = 'store',
0210 default = '|',
0211 help = 'Delimiter used in the .csv output files (default: "|")' )
0212
0213 parser.add_argument('--metadata-json',
0214 dest = 'metadata_json',
0215 action = 'store',
0216 default = 'owners.json',
0217 help = 'Path to .json file with metadata on HLT Paths (online?, group-owners)' )
0218
0219 parser.add_argument('-o', '--output-dir',
0220 dest = 'output_dir',
0221 action = 'store',
0222 default = '.',
0223 help = 'Path to output directory' )
0224
0225 # redefine "--help" to be the last option, and use a customized message
0226 parser.add_argument('-h', '--help',
0227 action = 'help',
0228 help = 'Show this help message and exit' )
0229
0230 # parse command line arguments and options
0231 config = parser.parse_args()
0232
0233 process = getHLTProcess(config)
0234
0235 pathNames = [pathName for pathName, path in process.paths_().items()]
0236
0237 ## Tab: HLT Prescales
0238 create_csv(
0239 outputFilePath = os.path.join(config.output_dir, 'tabHLTPrescales.csv'),
0240 delimiter = config.csv_delimiter,
0241 lines = getPrescaleTableLines(process, pathNames),
0242 )
0243
0244 ## Tab: HLT Menu
0245 metadataDict = {}
0246 if config.metadata_json and os.path.isfile(config.metadata_json):
0247 metadataDict = json.load(open(config.metadata_json))
0248
0249 pathAttributes = {}
0250 for pathName in pathNames:
0251 pathNameUnv = pathName[:pathName.rfind('_v')+2] if '_v' in pathName else pathName
0252 pathOwners = ', '.join(metadataDict[pathNameUnv]['owners']) if pathNameUnv in metadataDict else ''
0253 pathIsOnline = 'Yes' if pathNameUnv in metadataDict and metadataDict[pathNameUnv]['online?'] else 'No'
0254 pathAttributes[pathName] = {
0255 'Owners': pathOwners,
0256 'Online?': pathIsOnline,
0257 'PS ('+config.prescale_column+')': getPrescale(process, pathName, config.prescale_column),
0258 'Datasets (SmartPS)': getDatasets(process, pathName),
0259 'Streams': getStreams(process, pathName),
0260 'L1T Seed': getL1TSeed(process, pathName),
0261 }
0262
0263 linesHLTMenu = [[
0264 'Path',
0265 'Owners',
0266 'Online?',
0267 'PS ('+config.prescale_column+')',
0268 'Datasets (SmartPS)',
0269 'Streams',
0270 'L1T Seed',
0271 ]]
0272
0273 for pathName in pathNames:
0274 if pathName.startswith('Dataset_'):
0275 continue
0276 pathDict = pathAttributes[pathName]
0277 linesHLTMenu += [[
0278 pathName,
0279 pathDict[linesHLTMenu[0][1]],
0280 pathDict[linesHLTMenu[0][2]],
0281 pathDict[linesHLTMenu[0][3]],
0282 pathDict[linesHLTMenu[0][4]],
0283 pathDict[linesHLTMenu[0][5]],
0284 pathDict[linesHLTMenu[0][6]],
0285 ]]
0286
0287 create_csv(
0288 outputFilePath = os.path.join(config.output_dir, 'tabHLTMenu.csv'),
0289 delimiter = config.csv_delimiter,
0290 lines = linesHLTMenu,
0291 )
0292
0293 ## Tab: HLT Datasets and Streams
0294 dsetDict = getDatasetStreamDict(process)
0295 linesHLTDatasetsAndStreams = [['Primary Dataset', 'Stream']]
0296 linesHLTDatasetsAndStreams += [[dset, ', '.join(dsetDict[dset])] for dset in sorted(dsetDict.keys())]
0297 create_csv(
0298 outputFilePath = os.path.join(config.output_dir, 'tabHLTDatasetsAndStreams.csv'),
0299 delimiter = config.csv_delimiter,
0300 lines = linesHLTDatasetsAndStreams,
0301 )
0302
0303 ###
0304 ### main
0305 ###
0306 if __name__ == '__main__':
0307 main()