File indexing completed on 2025-04-25 02:43:14
0001
0002
0003 __version__ = "$Revision: 1.19 $"
0004 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
0005
0006 import FWCore.ParameterSet.Config as cms
0007 from FWCore.ParameterSet.Modules import _Module
0008
0009
0010 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
0011
0012 import hashlib
0013 import sys
0014 import re
0015 import collections
0016 from subprocess import Popen,PIPE
0017 import FWCore.ParameterSet.DictTypes as DictTypes
0018 from FWCore.ParameterSet.OrderedSet import OrderedSet
0019 class Options:
0020 pass
0021
0022
0023 defaultOptions = Options()
0024 defaultOptions.datamix = 'DataOnSim'
0025 defaultOptions.isMC=False
0026 defaultOptions.isData=True
0027 defaultOptions.step=''
0028 defaultOptions.pileup='NoPileUp'
0029 defaultOptions.pileup_input = None
0030 defaultOptions.pileup_dasoption = ''
0031 defaultOptions.geometry = 'SimDB'
0032 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
0033 defaultOptions.magField = ''
0034 defaultOptions.conditions = None
0035 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
0036 defaultOptions.harvesting= 'AtRunEnd'
0037 defaultOptions.gflash = False
0038 defaultOptions.number = -1
0039 defaultOptions.number_out = None
0040 defaultOptions.arguments = ""
0041 defaultOptions.name = "NO NAME GIVEN"
0042 defaultOptions.evt_type = ""
0043 defaultOptions.filein = ""
0044 defaultOptions.dasquery=""
0045 defaultOptions.dasoption=""
0046 defaultOptions.secondfilein = ""
0047 defaultOptions.customisation_file = []
0048 defaultOptions.customisation_file_unsch = []
0049 defaultOptions.customise_commands = ""
0050 defaultOptions.inline_custom=False
0051 defaultOptions.particleTable = 'pythiapdt'
0052 defaultOptions.particleTableList = ['pythiapdt','pdt']
0053 defaultOptions.dirin = ''
0054 defaultOptions.dirout = ''
0055 defaultOptions.filetype = 'EDM'
0056 defaultOptions.fileout = 'output.root'
0057 defaultOptions.rntuple_out = False
0058 defaultOptions.filtername = ''
0059 defaultOptions.lazy_download = False
0060 defaultOptions.custom_conditions = ''
0061 defaultOptions.hltProcess = ''
0062 defaultOptions.eventcontent = None
0063 defaultOptions.datatier = None
0064 defaultOptions.inlineEventContent = True
0065 defaultOptions.inlineObjects =''
0066 defaultOptions.hideGen=False
0067 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey
0068 defaultOptions.beamspot=None
0069 defaultOptions.outputDefinition =''
0070 defaultOptions.inputCommands = None
0071 defaultOptions.outputCommands = None
0072 defaultOptions.inputEventContent = ''
0073 defaultOptions.dropDescendant = False
0074 defaultOptions.relval = None
0075 defaultOptions.prefix = None
0076 defaultOptions.profile = None
0077 defaultOptions.heap_profile = None
0078 defaultOptions.maxmem_profile = None
0079 defaultOptions.isRepacked = False
0080 defaultOptions.restoreRNDSeeds = False
0081 defaultOptions.donotDropOnInput = ''
0082 defaultOptions.python_filename =''
0083 defaultOptions.io=None
0084 defaultOptions.lumiToProcess=None
0085 defaultOptions.fast=False
0086 defaultOptions.runsAndWeightsForMC = None
0087 defaultOptions.runsScenarioForMC = None
0088 defaultOptions.runsAndWeightsForMCIntegerWeights = None
0089 defaultOptions.runsScenarioForMCIntegerWeights = None
0090 defaultOptions.runUnscheduled = False
0091 defaultOptions.timeoutOutput = False
0092 defaultOptions.nThreads = 1
0093 defaultOptions.nStreams = 0
0094 defaultOptions.nConcurrentLumis = 0
0095 defaultOptions.nConcurrentIOVs = 0
0096 defaultOptions.accelerators = None
0097
0098
0099 def dumpPython(process,name):
0100 theObject = getattr(process,name)
0101 if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
0102 return "process."+name+" = " + theObject.dumpPython()
0103 elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
0104 return "process."+name+" = " + theObject.dumpPython()+"\n"
0105 else:
0106 return "process."+name+" = " + theObject.dumpPython()+"\n"
0107 def filesFromList(fileName,s=None):
0108 import os
0109 import FWCore.ParameterSet.Config as cms
0110 prim=[]
0111 sec=[]
0112 for line in open(fileName,'r'):
0113 if line.count(".root")>=2:
0114
0115 entries=line.replace("\n","").split()
0116 prim.append(entries[0])
0117 sec.append(entries[1])
0118 elif (line.find(".root")!=-1):
0119 entry=line.replace("\n","")
0120 prim.append(entry)
0121
0122 file_seen = set()
0123 prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
0124 file_seen = set()
0125 sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
0126 if s:
0127 if not hasattr(s,"fileNames"):
0128 s.fileNames=cms.untracked.vstring(prim)
0129 else:
0130 s.fileNames.extend(prim)
0131 if len(sec)!=0:
0132 if not hasattr(s,"secondaryFileNames"):
0133 s.secondaryFileNames=cms.untracked.vstring(sec)
0134 else:
0135 s.secondaryFileNames.extend(sec)
0136 print("found files: ",prim)
0137 if len(prim)==0:
0138 raise Exception("There are not files in input from the file list")
0139 if len(sec)!=0:
0140 print("found parent files:",sec)
0141 return (prim,sec)
0142
0143 def filesFromDASQuery(query,option="",s=None,max_files=None):
0144 import os,time
0145 import FWCore.ParameterSet.Config as cms
0146 prim=[]
0147 sec=[]
0148 print("the query is",query)
0149 eC=5
0150 count=0
0151 while eC!=0 and count<3:
0152 if count!=0:
0153 print('Sleeping, then retrying DAS')
0154 time.sleep(100)
0155 p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
0156 pipe=p.stdout.read()
0157 tupleP = os.waitpid(p.pid, 0)
0158 eC=tupleP[1]
0159 count=count+1
0160 if eC==0:
0161 print("DAS succeeded after",count,"attempts",eC)
0162 else:
0163 print("DAS failed 3 times- I give up")
0164 for line in pipe.split('\n'):
0165 if line.count(".root")>=2:
0166
0167 entries=line.replace("\n","").split()
0168 prim.append(entries[0])
0169 sec.append(entries[1])
0170 elif (line.find(".root")!=-1):
0171 entry=line.replace("\n","")
0172 prim.append(entry)
0173
0174 prim = sorted(list(set(prim)))
0175 sec = sorted(list(set(sec)))
0176 if max_files:
0177 max_files=int(max_files)
0178 prim = prim[:max_files]
0179 sec = sec[:max_files]
0180 if s:
0181 if not hasattr(s,"fileNames"):
0182 s.fileNames=cms.untracked.vstring(prim)
0183 else:
0184 s.fileNames.extend(prim)
0185 if len(sec)!=0:
0186 if not hasattr(s,"secondaryFileNames"):
0187 s.secondaryFileNames=cms.untracked.vstring(sec)
0188 else:
0189 s.secondaryFileNames.extend(sec)
0190 print("found files: ",prim)
0191 if len(sec)!=0:
0192 print("found parent files:",sec)
0193 return (prim,sec)
0194
0195 def anyOf(listOfKeys,dict,opt=None):
0196 for k in listOfKeys:
0197 if k in dict:
0198 toReturn=dict[k]
0199 dict.pop(k)
0200 return toReturn
0201 if opt!=None:
0202 return opt
0203 else:
0204 raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
0205
0206 class ConfigBuilder(object):
0207 """The main building routines """
0208
0209 def __init__(self, options, process = None, with_output = False, with_input = False ):
0210 """options taken from old cmsDriver and optparse """
0211
0212 options.outfile_name = options.dirout+options.fileout
0213
0214 self._options = options
0215
0216 self._customise_coms = []
0217 if self._options.customise_commands:
0218 self._customise_coms = self._options.customise_commands.split('\\n')
0219 self._options.customise_commands = ""
0220
0221 if self._options.isData and options.isMC:
0222 raise Exception("ERROR: You may specify only --data or --mc, not both")
0223
0224
0225
0226
0227 if 'ENDJOB' in self._options.step:
0228 if (hasattr(self._options,"outputDefinition") and \
0229 self._options.outputDefinition != '' and \
0230 any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
0231 (hasattr(self._options,"datatier") and \
0232 self._options.datatier and \
0233 'DQMIO' in self._options.datatier):
0234 print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
0235 self._options.step=self._options.step.replace(',ENDJOB','')
0236
0237
0238
0239
0240 stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
0241 self.stepMap={}
0242 self.stepKeys=[]
0243 for step in self._options.step.split(","):
0244 if step=='': continue
0245 stepParts = step.split(":")
0246 stepName = stepParts[0]
0247 if stepName not in stepList and not stepName.startswith('re'):
0248 raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
0249 if len(stepParts)==1:
0250 self.stepMap[stepName]=""
0251 elif len(stepParts)==2:
0252 self.stepMap[stepName]=stepParts[1].split('+')
0253 elif len(stepParts)==3:
0254 self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
0255 else:
0256 raise ValueError(f"Step definition {step} invalid")
0257 self.stepKeys.append(stepName)
0258
0259
0260
0261 self.with_output = with_output
0262 self.process=process
0263
0264 if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
0265 self.with_output = False
0266 self.with_input = with_input
0267 self.imports = []
0268 self.create_process()
0269 self.define_Configs()
0270 self.schedule = list()
0271 self.scheduleIndexOfFirstHLTPath = None
0272
0273
0274
0275
0276
0277 self.additionalCommands = []
0278
0279 self.blacklist_paths = []
0280 self.addedObjects = []
0281 self.additionalOutputs = {}
0282
0283 self.productionFilterSequence = None
0284 self.labelsToAssociate=[]
0285 self.nextScheduleIsConditional=False
0286 self.conditionalPaths=[]
0287 self.excludedPaths=[]
0288
0289 def profileOptions(self):
0290 """
0291 addIgProfService
0292 Function to add the igprof profile service so that you can dump in the middle
0293 of the run.
0294 """
0295 profileOpts = self._options.profile.split(':')
0296 profilerStart = 1
0297 profilerInterval = 100
0298 profilerFormat = None
0299 profilerJobFormat = None
0300
0301 if len(profileOpts):
0302
0303 profileOpts.pop(0)
0304 if len(profileOpts):
0305 startEvent = profileOpts.pop(0)
0306 if not startEvent.isdigit():
0307 raise Exception("%s is not a number" % startEvent)
0308 profilerStart = int(startEvent)
0309 if len(profileOpts):
0310 eventInterval = profileOpts.pop(0)
0311 if not eventInterval.isdigit():
0312 raise Exception("%s is not a number" % eventInterval)
0313 profilerInterval = int(eventInterval)
0314 if len(profileOpts):
0315 profilerFormat = profileOpts.pop(0)
0316
0317
0318 if not profilerFormat:
0319 profilerFormat = "%s___%s___%%I.gz" % (
0320 self._options.evt_type.replace("_cfi", ""),
0321 hashlib.md5(
0322 (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
0323 str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
0324 ).hexdigest()
0325 )
0326 if not profilerJobFormat and profilerFormat.endswith(".gz"):
0327 profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
0328 elif not profilerJobFormat:
0329 profilerJobFormat = profilerFormat + "_EndOfJob.gz"
0330
0331 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
0332
0333 def heapProfileOptions(self):
0334 """
0335 addJeProfService
0336 Function to add the jemalloc heap profile service so that you can dump in the middle
0337 of the run.
0338 """
0339 profileOpts = []
0340 profilerStart = 1
0341 profilerInterval = 100
0342 profilerFormat = "jeprof_%s.heap"
0343 profilerJobFormat = None
0344
0345
0346 if not profilerJobFormat and profilerFormat.endswith(".heap"):
0347 profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
0348 elif not profilerJobFormat:
0349 profilerJobFormat = profilerFormat + "_EndOfJob.heap"
0350
0351 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
0352
0353 def load(self,includeFile):
0354 includeFile = includeFile.replace('/','.')
0355 self.process.load(includeFile)
0356 return sys.modules[includeFile]
0357
0358 def loadAndRemember(self, includeFile):
0359 """helper routine to load am memorize imports"""
0360
0361
0362 includeFile = includeFile.replace('/','.')
0363 self.imports.append(includeFile)
0364 self.process.load(includeFile)
0365 return sys.modules[includeFile]
0366
0367 def executeAndRemember(self, command):
0368 """helper routine to remember replace statements"""
0369 self.additionalCommands.append(command)
0370 if not command.strip().startswith("#"):
0371
0372 import re
0373 exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
0374
0375
0376 def addCommon(self):
0377 if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
0378 self.process.options.Rethrow = ['ProductNotFound']
0379 self.process.options.fileMode = 'FULLMERGE'
0380
0381 self.addedObjects.append(("","options"))
0382
0383 if self._options.lazy_download:
0384 self.process.AdaptorConfig = cms.Service("AdaptorConfig",
0385 stats = cms.untracked.bool(True),
0386 enable = cms.untracked.bool(True),
0387 cacheHint = cms.untracked.string("lazy-download"),
0388 readHint = cms.untracked.string("read-ahead-buffered")
0389 )
0390 self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
0391
0392
0393
0394
0395 if self._options.profile:
0396 (start, interval, eventFormat, jobFormat)=self.profileOptions()
0397 self.process.IgProfService = cms.Service("IgProfService",
0398 reportFirstEvent = cms.untracked.int32(start),
0399 reportEventInterval = cms.untracked.int32(interval),
0400 reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
0401 reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
0402 self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
0403
0404 if self._options.heap_profile:
0405 (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
0406 self.process.JeProfService = cms.Service("JeProfService",
0407 reportFirstEvent = cms.untracked.int32(start),
0408 reportEventInterval = cms.untracked.int32(interval),
0409 reportToFileAtPostEvent = cms.untracked.string("%s"%(eventFormat)),
0410 reportToFileAtPostEndJob = cms.untracked.string("%s"%(jobFormat)))
0411 self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
0412
0413 def addMaxEvents(self):
0414 """Here we decide how many evts will be processed"""
0415 self.process.maxEvents.input = self._options.number
0416 if self._options.number_out:
0417 self.process.maxEvents.output = self._options.number_out
0418 self.addedObjects.append(("","maxEvents"))
0419
0420 def addSource(self):
0421 """Here the source is built. Priority: file, generator"""
0422 self.addedObjects.append(("Input source","source"))
0423
0424 def filesFromOption(self):
0425 def _datasetname_and_maxfiles(entry):
0426 if ":" in entry:
0427 return entry.split(":")
0428 else:
0429 return entry,None
0430
0431 for entry in self._options.filein.split(','):
0432 print("entry",entry)
0433 if entry.startswith("filelist:"):
0434 filesFromList(entry.split(":",1)[1],self.process.source)
0435 elif entry.startswith("dbs:") or entry.startswith("das:"):
0436 dataset_name,max_files = _datasetname_and_maxfiles(entry.split(":",1)[1])
0437 filesFromDASQuery('file dataset = %s'%(dataset_name),self._options.dasoption,self.process.source,max_files)
0438 else:
0439 self.process.source.fileNames.append(self._options.dirin+entry)
0440 if self._options.secondfilein:
0441 if not hasattr(self.process.source,"secondaryFileNames"):
0442 raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
0443 for entry in self._options.secondfilein.split(','):
0444 print("entry",entry)
0445 if entry.startswith("filelist:"):
0446 self.process.source.secondaryFileNames.extend((filesFromList(entry.split(":",1)[1]))[0])
0447 elif entry.startswith("dbs:") or entry.startswith("das:"):
0448 dataset_name,max_files = _datasetname_and_maxfiles(entry.split(":",1)[1])
0449 self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(dataset_name),self._options.dasoption))[0])
0450 else:
0451 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
0452
0453 if self._options.filein or self._options.dasquery:
0454 if self._options.filetype == "EDM":
0455 self.process.source=cms.Source("PoolSource",
0456 fileNames = cms.untracked.vstring(),
0457 secondaryFileNames= cms.untracked.vstring())
0458 filesFromOption(self)
0459 if self._options.filetype == "EDM_RNTUPLE":
0460 self.process.source=cms.Source("RNTupleSource",
0461 fileNames = cms.untracked.vstring())
0462
0463 filesFromOption(self)
0464 elif self._options.filetype == "DAT":
0465 self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
0466 filesFromOption(self)
0467 elif self._options.filetype == "LHE":
0468 self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
0469 if self._options.filein.startswith("lhe:"):
0470
0471 args=self._options.filein.split(':')
0472 article=args[1]
0473 print('LHE input from article ',article)
0474 location='/store/lhe/'
0475 import os
0476 textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
0477 for line in textOfFiles:
0478 for fileName in [x for x in line.split() if '.lhe' in x]:
0479 self.process.source.fileNames.append(location+article+'/'+fileName)
0480
0481 if len(line)<2:
0482 print('Issue to load LHE files, please check and try again.')
0483 sys.exit(-1)
0484
0485 if len(self.process.source.fileNames)==0:
0486 print('Issue with empty filename, but can pass line check')
0487 sys.exit(-1)
0488 if len(args)>2:
0489 self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
0490 else:
0491 filesFromOption(self)
0492
0493 elif self._options.filetype == "DQM":
0494 self.process.source=cms.Source("DQMRootSource",
0495 fileNames = cms.untracked.vstring())
0496 filesFromOption(self)
0497
0498 elif self._options.filetype == "DQMDAQ":
0499
0500 self.process.source=cms.Source("DQMStreamerReader")
0501
0502
0503 if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
0504 self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
0505
0506 if self._options.dasquery!='':
0507 self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
0508 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
0509
0510 if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
0511 self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
0512
0513
0514 if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
0515 if self._options.inputCommands:
0516 self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
0517 else:
0518 self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
0519
0520 if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
0521 if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
0522 for command in self._options.inputCommands.split(','):
0523
0524 command = command.strip()
0525 if command=='': continue
0526 self.process.source.inputCommands.append(command)
0527 if not self._options.dropDescendant:
0528 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
0529
0530 if self._options.lumiToProcess:
0531 import FWCore.PythonUtilities.LumiList as LumiList
0532 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
0533
0534 if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
0535 if self.process.source is None:
0536 self.process.source=cms.Source("EmptySource")
0537
0538
0539 self.runsAndWeights=None
0540 if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
0541 if not self._options.isMC :
0542 raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
0543 if self._options.runsAndWeightsForMC:
0544 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
0545 else:
0546 from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
0547 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
0548 __import__(RunsAndWeights[self._options.runsScenarioForMC])
0549 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
0550 else:
0551 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
0552
0553 if self.runsAndWeights:
0554 import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
0555 ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
0556 self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
0557 self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
0558
0559
0560 self.runsAndWeightsInt=None
0561 if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
0562 if not self._options.isMC :
0563 raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
0564 if self._options.runsAndWeightsForMCIntegerWeights:
0565 self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
0566 else:
0567 from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
0568 if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
0569 __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
0570 self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
0571 else:
0572 self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
0573
0574 if self.runsAndWeightsInt:
0575 if not self._options.relval:
0576 raise Exception("--relval option required when using --runsAndWeightsInt")
0577 if 'DATAMIX' in self._options.step:
0578 from SimGeneral.Configuration.LumiToRun import lumi_to_run
0579 total_events, events_per_job = self._options.relval.split(',')
0580 lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
0581 self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
0582
0583 return
0584
0585 def addOutput(self):
0586 """ Add output module to the process """
0587 if self._options.outputDefinition:
0588 return self._addOutputUsingOutputDefinition()
0589 else:
0590 return self._addOutputUsingTier()
0591 def _addOutputUsingOutputDefinition(self):
0592 result=""
0593 if self._options.datatier:
0594 print("--datatier & --eventcontent options ignored")
0595
0596
0597 outList = eval(self._options.outputDefinition)
0598 for (id,outDefDict) in enumerate(outList):
0599 outDefDictStr=outDefDict.__str__()
0600 if not isinstance(outDefDict,dict):
0601 raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
0602
0603 theTier=anyOf(['t','tier','dataTier'],outDefDict)
0604
0605
0606 theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
0607 theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
0608 theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
0609 theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
0610 theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
0611
0612 if not theModuleLabel:
0613 tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
0614 theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
0615 theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
0616 ]
0617 for name in tryNames:
0618 if not hasattr(self.process,name):
0619 theModuleLabel=name
0620 break
0621 if not theModuleLabel:
0622 raise Exception("cannot find a module label for specification: "+outDefDictStr)
0623 if id==0:
0624 defaultFileName=self._options.outfile_name
0625 else:
0626 defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
0627 defaultFileName=defaultFileName.replace('.rntpl','_in'+theTier+'.rntpl')
0628
0629 theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
0630 if not theFileName.endswith('.root') and not theFileName.endswith('.rntpl'):
0631 theFileName+='.root'
0632
0633 if len(outDefDict):
0634 raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
0635 if theStreamType=='DQMIO': theStreamType='DQM'
0636 if theStreamType=='ALL':
0637 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
0638 else:
0639 theEventContent = getattr(self.process, theStreamType+"EventContent")
0640
0641
0642 addAlCaSelects=False
0643 if theStreamType=='ALCARECO' and not theFilterName:
0644 theFilterName='StreamALCACombined'
0645 addAlCaSelects=True
0646
0647 output=self._createOutputModuleInAddOutput(tier=theTier, streamType = theStreamType, eventContent = theEventContent, fileName = theFileName, filterName = theFilterName, ignoreNano = True)
0648 if theSelectEvent:
0649 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
0650 else:
0651 self._updateOutputSelectEvents(output, theStreamType)
0652
0653 if addAlCaSelects:
0654 if not hasattr(output,'SelectEvents'):
0655 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
0656 for alca in self.AlCaPaths:
0657 output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
0658
0659
0660 if hasattr(self.process,theModuleLabel):
0661 raise Exception("the current process already has a module "+theModuleLabel+" defined")
0662
0663 outputModule = self._addOutputModuleAndPathToProcess(output, theModuleLabel)
0664
0665 self._inlineOutputEventContent(outputModule, theStreamType)
0666 if theExtraOutputCommands:
0667 if not isinstance(theExtraOutputCommands,list):
0668 raise Exception("extra ouput command in --option must be a list of strings")
0669 if hasattr(self.process,theStreamType+"EventContent"):
0670 self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
0671 else:
0672 outputModule.outputCommands.extend(theExtraOutputCommands)
0673
0674 result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
0675
0676
0677 return result
0678 def _addOutputUsingTier(self):
0679 result=""
0680 streamTypes=self._options.eventcontent.split(',')
0681 tiers=self._options.datatier.split(',')
0682 if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
0683 raise Exception("number of event content arguments does not match number of datatier arguments")
0684
0685
0686 if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
0687 return "\n"
0688
0689 for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
0690 if streamType=='': continue
0691 if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
0692 if streamType=='DQMIO': streamType='DQM'
0693 streamQualifier=''
0694 if streamType[-1].isdigit():
0695
0696 streamQualifier = str(streamType[-1])
0697 streamType = streamType[:-1]
0698 eventContent=streamType
0699
0700 if streamType.startswith("NANOEDMAOD"):
0701 eventContent = eventContent.replace("NANOEDM","NANO")
0702 theEventContent = getattr(self.process, eventContent+"EventContent")
0703 if i==0:
0704 theFileName=self._options.outfile_name
0705 else:
0706 theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
0707 theFileName=theFileName.replace('.rntpl','_in'+streamType+'.rntpl')
0708 theFilterName=self._options.filtername
0709 if streamType=='ALCARECO':
0710 theFilterName = 'StreamALCACombined'
0711 output = self._createOutputModuleInAddOutput(tier=tier, streamType=streamType, eventContent=theEventContent, fileName = theFileName, filterName = theFilterName, ignoreNano = False)
0712 self._updateOutputSelectEvents(output, streamType)
0713
0714 if "MINIAOD" in streamType:
0715
0716 from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
0717 miniAOD_customizeOutput(output)
0718
0719 outputModuleName=streamType+streamQualifier+'output'
0720 outputModule = self._addOutputModuleAndPathToProcess(output, outputModuleName)
0721
0722 if self._options.outputCommands and streamType!='DQM':
0723 for evct in self._options.outputCommands.split(','):
0724 if not evct: continue
0725 self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
0726
0727 self._inlineOutputEventContent(outputModule, streamType)
0728 result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
0729
0730 return result
0731 def _createOutputModuleInAddOutput(self, tier, streamType, eventContent, fileName, filterName, ignoreNano):
0732 CppType='PoolOutputModule'
0733 if self._options.timeoutOutput:
0734 CppType='TimeoutPoolOutputModule'
0735 if streamType=='DQM' and tier=='DQMIO':
0736 CppType='DQMRootOutputModule'
0737 fileName = fileName.replace('.rntpl', '.root')
0738 if not ignoreNano and "NANOAOD" in streamType : CppType='NanoAODOutputModule'
0739 if self._options.rntuple_out and CppType == 'PoolOutputModule':
0740 CppType='RNTupleOutputModule'
0741 if len(fileName) > 5 and fileName[-5:] == '.root':
0742 fileName = fileName.replace('.root', '.rntpl')
0743 output = cms.OutputModule(CppType,
0744 eventContent.clone(),
0745 fileName = cms.untracked.string(fileName),
0746 dataset = cms.untracked.PSet(
0747 dataTier = cms.untracked.string(tier),
0748 filterName = cms.untracked.string(filterName))
0749 )
0750 return output
0751 def _updateOutputSelectEvents(self, output, streamType):
0752 if hasattr(self.process,"generation_step") and streamType!='LHE':
0753 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
0754 if hasattr(self.process,"filtering_step"):
0755 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
0756 def _inlineOutputEventContent(self, outputModule, streamType):
0757 if not self._options.inlineEventContent:
0758 tmpstreamType=streamType
0759 if "NANOEDM" in tmpstreamType :
0760 tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
0761 if hasattr(self.process,tmpstreamType+"EventContent"):
0762 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
0763 return label
0764 outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
0765 def _addOutputModuleAndPathToProcess(self, output, name):
0766 setattr(self.process,name,output)
0767 outputModule=getattr(self.process,name)
0768 setattr(self.process,name+'_step',cms.EndPath(outputModule))
0769 path=getattr(self.process,name+'_step')
0770 self.schedule.append(path)
0771 return outputModule
0772
0773 def addStandardSequences(self):
0774 """
0775 Add selected standard sequences to the process
0776 """
0777
0778 if self._options.pileup:
0779 pileupSpec=self._options.pileup.split(',')[0]
0780
0781
0782 pileups_without_input=[defaultOptions.pileup,"Cosmics","default","HiMixNoPU",None]
0783 if self._options.pileup not in pileups_without_input and self._options.pileup_input==None:
0784 message = "Pileup scenerio requires input files. Please add an appropriate --pileup_input option"
0785 raise Exception(message)
0786
0787
0788 from Configuration.StandardSequences.Mixing import Mixing,defineMixing
0789 if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
0790 message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
0791 raise Exception(message)
0792
0793
0794 if '.' in pileupSpec:
0795 mixingDict={'file':pileupSpec}
0796 elif pileupSpec.startswith('file:'):
0797 mixingDict={'file':pileupSpec[5:]}
0798 else:
0799 import copy
0800 mixingDict=copy.copy(Mixing[pileupSpec])
0801 if len(self._options.pileup.split(','))>1:
0802 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
0803
0804
0805 if 'file:' in pileupSpec:
0806
0807 self.process.load(mixingDict['file'])
0808 print("inlining mixing module configuration")
0809 self._options.inlineObjects+=',mix'
0810 else:
0811 self.loadAndRemember(mixingDict['file'])
0812
0813 mixingDict.pop('file')
0814 if not "DATAMIX" in self.stepMap.keys():
0815 if self._options.pileup_input:
0816 if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
0817 mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
0818 elif self._options.pileup_input.startswith("filelist:"):
0819 mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
0820 else:
0821 mixingDict['F']=self._options.pileup_input.split(',')
0822 specialization=defineMixing(mixingDict)
0823 for command in specialization:
0824 self.executeAndRemember(command)
0825 if len(mixingDict)!=0:
0826 raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
0827
0828
0829
0830 try:
0831 if len(self.stepMap):
0832 self.loadAndRemember(self.GeometryCFF)
0833 if (self.GeometryCFF == 'Configuration/StandardSequences/GeometryRecoDB_cff' and not self.geometryDBLabel):
0834 print("Warning: The default GeometryRecoDB_cff is being used; however, the DB geometry is not applied. You may need to verify your cmsDriver.")
0835 if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
0836 self.loadAndRemember(self.SimGeometryCFF)
0837 if self.geometryDBLabel:
0838 self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
0839 self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
0840
0841 except ImportError:
0842 print("Geometry option",self._options.geometry,"unknown.")
0843 raise
0844
0845 if len(self.stepMap):
0846 self.loadAndRemember(self.magFieldCFF)
0847
0848 for stepName in self.stepKeys:
0849 stepSpec = self.stepMap[stepName]
0850 print("Step:", stepName,"Spec:",stepSpec)
0851 if stepName.startswith('re'):
0852
0853 if stepName[2:] not in self._options.donotDropOnInput:
0854 self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
0855 stepName=stepName[2:]
0856 if stepSpec=="":
0857 getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
0858 elif isinstance(stepSpec, list):
0859 getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
0860 elif isinstance(stepSpec, tuple):
0861 getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
0862 else:
0863 raise ValueError("Invalid step definition")
0864
0865 if self._options.restoreRNDSeeds!=False:
0866
0867 if self._options.restoreRNDSeeds==True:
0868 self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
0869 else:
0870 self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
0871 if self._options.inputEventContent or self._options.inputCommands:
0872 if self._options.inputCommands:
0873 self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
0874 else:
0875 self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
0876
0877
0878 def completeInputCommand(self):
0879 if self._options.inputEventContent:
0880 import copy
0881 def dropSecondDropStar(iec):
0882
0883 count=0
0884 for item in iec:
0885 if item=='drop *':
0886 if count!=0:
0887 iec.remove(item)
0888 count+=1
0889
0890
0891 if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
0892 for evct in self._options.inputEventContent.split(','):
0893 if evct=='': continue
0894 theEventContent = getattr(self.process, evct+"EventContent")
0895 if hasattr(theEventContent,'outputCommands'):
0896 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
0897 if hasattr(theEventContent,'inputCommands'):
0898 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
0899
0900 dropSecondDropStar(self.process.source.inputCommands)
0901
0902 if not self._options.dropDescendant:
0903 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
0904
0905
0906 return
0907
0908 def addConditions(self):
0909 """Add conditions to the process"""
0910 if not self._options.conditions: return
0911
0912 if 'FrontierConditions_GlobalTag' in self._options.conditions:
0913 print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
0914 self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
0915
0916 self.loadAndRemember(self.ConditionsDefaultCFF)
0917 from Configuration.AlCa.GlobalTag import GlobalTag
0918 self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
0919 self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
0920 self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
0921
0922
0923 def addCustomise(self,unsch=0):
0924 """Include the customise code """
0925
0926 custOpt=[]
0927 if unsch==0:
0928 for c in self._options.customisation_file:
0929 custOpt.extend(c.split(","))
0930 else:
0931 for c in self._options.customisation_file_unsch:
0932 custOpt.extend(c.split(","))
0933
0934 custMap=DictTypes.SortedKeysDict()
0935 for opt in custOpt:
0936 if opt=='': continue
0937 if opt.count('.')>1:
0938 raise Exception("more than . in the specification:"+opt)
0939 fileName=opt.split('.')[0]
0940 if opt.count('.')==0: rest='customise'
0941 else:
0942 rest=opt.split('.')[1]
0943 if rest=='py': rest='customise'
0944
0945 if fileName in custMap:
0946 custMap[fileName].extend(rest.split('+'))
0947 else:
0948 custMap[fileName]=rest.split('+')
0949
0950 if len(custMap)==0:
0951 final_snippet='\n'
0952 else:
0953 final_snippet='\n# customisation of the process.\n'
0954
0955 allFcn=[]
0956 for opt in custMap:
0957 allFcn.extend(custMap[opt])
0958 for fcn in allFcn:
0959 if allFcn.count(fcn)!=1:
0960 raise Exception("cannot specify twice "+fcn+" as a customisation method")
0961
0962 for f in custMap:
0963
0964 packageName = f.replace(".py","").replace("/",".")
0965 __import__(packageName)
0966 package = sys.modules[packageName]
0967
0968
0969 customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
0970
0971 final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
0972 if self._options.inline_custom:
0973 for line in file(customiseFile,'r'):
0974 if "import FWCore.ParameterSet.Config" in line:
0975 continue
0976 final_snippet += line
0977 else:
0978 final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
0979 for fcn in custMap[f]:
0980 print("customising the process with",fcn,"from",f)
0981 if not hasattr(package,fcn):
0982
0983 raise Exception("config "+f+" has no function "+fcn)
0984
0985 self.process=getattr(package,fcn)(self.process)
0986
0987 final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
0988 final_snippet += "\nprocess = %s(process)\n"%(fcn,)
0989
0990 if len(custMap)!=0:
0991 final_snippet += '\n# End of customisation functions\n'
0992
0993
0994 return final_snippet
0995
0996 def addCustomiseCmdLine(self):
0997 final_snippet='\n# Customisation from command line\n'
0998 included_already = set()
0999 if self._customise_coms:
1000 for com in self._customise_coms:
1001 com=com.lstrip()
1002 if com in included_already: continue
1003 self.executeAndRemember(com)
1004 final_snippet +='\n'+com
1005 included_already.add(com)
1006
1007 if self._options.customise_commands:
1008 import string
1009 for com in self._options.customise_commands.split('\\n'):
1010 com=com.lstrip()
1011 if com in included_already: continue
1012 self.executeAndRemember(com)
1013 final_snippet +='\n'+com
1014 included_already.add(com)
1015
1016 return final_snippet
1017
1018
1019
1020
1021
1022 def define_Configs(self):
1023 if len(self.stepMap):
1024 self.loadAndRemember('Configuration/StandardSequences/Services_cff')
1025 if self._options.particleTable not in defaultOptions.particleTableList:
1026 print('Invalid particle table provided. Options are:')
1027 print(defaultOptions.particleTable)
1028 sys.exit(-1)
1029 else:
1030 if len(self.stepMap):
1031 self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
1032
1033 self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
1034
1035 self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
1036 self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
1037 self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
1038 self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
1039 self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
1040 self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
1041 self.L1P2GTDefaultCFF = 'Configuration/StandardSequences/SimPhase2L1GlobalTriggerEmulator_cff'
1042 self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1043 self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1044 self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1045 if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1046 self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1047 self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1048 self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1049 self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1050 self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1051 self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1052 self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1053 self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1054 self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1055 self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1056 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1057 self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1058 self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1059 self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1060 self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1061 self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1062 self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1063
1064 if "DATAMIX" in self.stepMap.keys():
1065 self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1066 self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1067 self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1068 self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1069
1070 self.ALCADefaultSeq=None
1071 self.LHEDefaultSeq='externalLHEProducer'
1072 self.GENDefaultSeq='pgen'
1073 self.SIMDefaultSeq='psim'
1074 self.DIGIDefaultSeq='pdigi'
1075 self.DATAMIXDefaultSeq=None
1076 self.DIGI2RAWDefaultSeq='DigiToRaw'
1077 self.HLTDefaultSeq='GRun'
1078 self.L1DefaultSeq=None
1079 self.L1P2GTDefaultSeq=None
1080 self.L1REPACKDefaultSeq='GT'
1081 self.HARVESTINGDefaultSeq=None
1082 self.ALCAHARVESTDefaultSeq=None
1083 self.CFWRITERDefaultSeq=None
1084 self.RAW2DIGIDefaultSeq='RawToDigi'
1085 self.L1RecoDefaultSeq='L1Reco'
1086 self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1087 if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1088 self.RECODefaultSeq='reconstruction'
1089 else:
1090 self.RECODefaultSeq='reconstruction_fromRECO'
1091 self.RECOSIMDefaultSeq='recosim'
1092 self.POSTRECODefaultSeq=None
1093 self.L1HwValDefaultSeq='L1HwVal'
1094 self.DQMDefaultSeq='DQMOffline'
1095 self.VALIDATIONDefaultSeq=''
1096 self.ENDJOBDefaultSeq='endOfProcess'
1097 self.REPACKDefaultSeq='DigiToRawRepack'
1098 self.PATDefaultSeq='miniAOD'
1099 self.PATGENDefaultSeq='miniGEN'
1100
1101 self.NANODefaultSeq='nanoSequence'
1102 self.NANODefaultCustom='nanoAOD_customizeCommon'
1103
1104 self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1105
1106 if not self._options.beamspot:
1107
1108
1109 if 'GEN' in self.stepMap and not 'pgen_genonly' in self.stepMap['GEN']:
1110 raise Exception("Missing \'--beamspot\' option in the GEN step of the cmsDriver command!")
1111 else:
1112 self._options.beamspot=VtxSmearedDefaultKey
1113
1114
1115 if self._options.isMC==True:
1116 self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1117 self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1118 self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1119 self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1120 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1121 self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1122 self.NANODefaultSeq='nanoSequenceMC'
1123 else:
1124 self._options.beamspot = None
1125
1126
1127 if 'reGEN' in self.stepMap:
1128 self.GENDefaultSeq='fixGenInfo'
1129
1130 if self._options.scenario=='cosmics':
1131 self._options.pileup='Cosmics'
1132 self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1133 self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1134 self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1135 self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1136 self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1137 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1138 if self._options.isMC==True:
1139 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1140 self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1141 self.RECODefaultSeq='reconstructionCosmics'
1142 self.DQMDefaultSeq='DQMOfflineCosmics'
1143
1144 if self._options.scenario=='HeavyIons':
1145 self.HLTDefaultSeq = 'HIon'
1146 self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1147 self.VALIDATIONDefaultSeq=''
1148 self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1149 self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1150 self.RECODefaultSeq='reconstruction'
1151 self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1152 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1153 self.DQMDefaultSeq='DQMOfflineHeavyIons'
1154 self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1155 self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1156 if self._options.isMC==True:
1157 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1158
1159
1160 self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1161
1162 self.USERDefaultSeq='user'
1163 self.USERDefaultCFF=None
1164
1165
1166 self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1167 self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1168
1169
1170 self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1171 self.geometryDBLabel=None
1172 simGeometry=''
1173 if self._options.fast:
1174 if 'start' in self._options.conditions.lower():
1175 self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1176 else:
1177 self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1178 else:
1179 def inGeometryKeys(opt):
1180 from Configuration.StandardSequences.GeometryConf import GeometryConf
1181 if opt in GeometryConf:
1182 return GeometryConf[opt]
1183 else:
1184 if (opt=='SimDB' or opt.startswith('DB:')):
1185 return opt
1186 else:
1187 raise Exception("Geometry "+opt+" does not exist!")
1188
1189 geoms=self._options.geometry.split(',')
1190 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1191 if len(geoms)==2:
1192
1193 if '/' in geoms[1] or '_cff' in geoms[1]:
1194 self.GeometryCFF=geoms[1]
1195 else:
1196 self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1197
1198 if (geoms[0].startswith('DB:')):
1199 self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1200 self.geometryDBLabel=geoms[0][3:]
1201 print("with DB:")
1202 else:
1203 if '/' in geoms[0] or '_cff' in geoms[0]:
1204 self.SimGeometryCFF=geoms[0]
1205 else:
1206 simGeometry=geoms[0]
1207 if self._options.gflash==True:
1208 self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1209 else:
1210 self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1211
1212
1213 if simGeometry not in defaultOptions.geometryExtendedOptions:
1214 self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1215
1216 if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1217 self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1218 self._options.beamspot='NoSmear'
1219
1220
1221 if self._options.fast:
1222 self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1223 self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1224 self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1225 self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1226 self.NANODefaultSeq = 'nanoSequenceFS'
1227 self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1228
1229
1230 if self._options.pileup=='default':
1231 from Configuration.StandardSequences.Mixing import MixingDefaultKey
1232 self._options.pileup=MixingDefaultKey
1233
1234
1235
1236 if self._options.isData:
1237 self._options.pileup=None
1238
1239
1240 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1241
1242
1243 def addExtraStream(self, name, stream, workflow='full'):
1244
1245 if self._options.rntuple_out:
1246 extension = '.rntpl'
1247 output = cms.OutputModule('RNTupleOutputModule')
1248 else:
1249 extension = '.root'
1250 output = cms.OutputModule("PoolOutputModule")
1251 if stream.selectEvents.parameters_().__len__()!=0:
1252 output.SelectEvents = stream.selectEvents
1253 else:
1254 output.SelectEvents = cms.untracked.PSet()
1255 output.SelectEvents.SelectEvents=cms.vstring()
1256 if isinstance(stream.paths,tuple):
1257 for path in stream.paths:
1258 output.SelectEvents.SelectEvents.append(path.label())
1259 else:
1260 output.SelectEvents.SelectEvents.append(stream.paths.label())
1261
1262
1263
1264 if isinstance(stream.content,str):
1265 evtPset=getattr(self.process,stream.content)
1266 for p in evtPset.parameters_():
1267 setattr(output,p,getattr(evtPset,p))
1268 if not self._options.inlineEventContent:
1269 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1270 return label
1271 output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1272 else:
1273 output.outputCommands = stream.content
1274
1275 output.fileName = cms.untracked.string(self._options.dirout+stream.name+extension)
1276
1277 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1278 filterName = cms.untracked.string(stream.name))
1279
1280 if self._options.filtername:
1281 output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1282
1283
1284 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1285
1286 if workflow in ("producers,full"):
1287 if isinstance(stream.paths,tuple):
1288 for path in stream.paths:
1289 self.schedule.append(path)
1290 else:
1291 self.schedule.append(stream.paths)
1292
1293
1294
1295 if (not self._options.relval) and workflow in ("full","output"):
1296 self.additionalOutputs[name] = output
1297 setattr(self.process,name,output)
1298
1299 if workflow == 'output':
1300
1301 filterList = output.SelectEvents.SelectEvents
1302 for i, filter in enumerate(filterList):
1303 filterList[i] = filter+":"+self._options.triggerResultsProcess
1304
1305 return output
1306
1307
1308
1309
1310
1311
1312 def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1313 _dotsplit = stepSpec.split('.')
1314 if ( len(_dotsplit)==1 ):
1315 if '/' in _dotsplit[0]:
1316 _sequence = defaultSEQ if defaultSEQ else stepSpec
1317 _cff = _dotsplit[0]
1318 else:
1319 _sequence = stepSpec
1320 _cff = defaultCFF
1321 elif ( len(_dotsplit)==2 ):
1322 _cff,_sequence = _dotsplit
1323 else:
1324 print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1325 print(stepSpec,"not recognized")
1326 raise
1327 l=self.loadAndRemember(_cff)
1328 return l,_sequence,_cff
1329
1330 def scheduleSequence(self,seq,prefix,what='Path'):
1331 if '*' in seq:
1332
1333 for i,s in enumerate(seq.split('*')):
1334 if i==0:
1335 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1336 else:
1337 p=getattr(self.process,prefix)
1338 tmp = getattr(self.process, s)
1339 if isinstance(tmp, cms.Task):
1340 p.associate(tmp)
1341 else:
1342 p+=tmp
1343 self.schedule.append(getattr(self.process,prefix))
1344 return
1345 else:
1346
1347 if not '+' in seq:
1348 if self.nextScheduleIsConditional:
1349 self.conditionalPaths.append(prefix)
1350 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1351 self.schedule.append(getattr(self.process,prefix))
1352 else:
1353 for i,s in enumerate(seq.split('+')):
1354 sn=prefix+'%d'%(i)
1355 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1356 self.schedule.append(getattr(self.process,sn))
1357 return
1358
1359 def scheduleSequenceAtEnd(self,seq,prefix):
1360 self.scheduleSequence(seq,prefix,what='EndPath')
1361 return
1362
1363 def prepare_ALCAPRODUCER(self, stepSpec = None):
1364 self.prepare_ALCA(stepSpec, workflow = "producers")
1365
1366 def prepare_ALCAOUTPUT(self, stepSpec = None):
1367 self.prepare_ALCA(stepSpec, workflow = "output")
1368
1369 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1370 """ Enrich the process with alca streams """
1371 alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1372
1373 MAXLEN=31
1374
1375 alcaList = sequence.split("+")
1376 for alca in alcaList:
1377 if (len(alca)>MAXLEN):
1378 raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1379
1380 maxLevel=0
1381 from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1382
1383 self.expandMapping(alcaList,autoAlca)
1384 self.AlCaPaths=[]
1385 for name in alcaConfig.__dict__:
1386 alcastream = getattr(alcaConfig,name)
1387 shortName = name.replace('ALCARECOStream','')
1388 if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1389 if shortName in AlCaNoConcurrentLumis:
1390 print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1391 self._options.nConcurrentLumis = 1
1392 self._options.nConcurrentIOVs = 1
1393 output = self.addExtraStream(name,alcastream, workflow = workflow)
1394 self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1395 self.AlCaPaths.append(shortName)
1396 if 'DQM' in alcaList:
1397 if not self._options.inlineEventContent and hasattr(self.process,name):
1398 self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1399 else:
1400 output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1401
1402
1403 if self._options.hltProcess or 'HLT' in self.stepMap:
1404 if isinstance(alcastream.paths,tuple):
1405 for path in alcastream.paths:
1406 self.renameHLTprocessInSequence(path.label())
1407 else:
1408 self.renameHLTprocessInSequence(alcastream.paths.label())
1409
1410 for i in range(alcaList.count(shortName)):
1411 alcaList.remove(shortName)
1412
1413
1414 elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1415 path = getattr(alcaConfig,name)
1416 self.schedule.append(path)
1417 alcaList.remove('DQM')
1418
1419 if isinstance(alcastream,cms.Path):
1420
1421 self.blacklist_paths.append(alcastream)
1422
1423
1424 if len(alcaList) != 0:
1425 available=[]
1426 for name in alcaConfig.__dict__:
1427 alcastream = getattr(alcaConfig,name)
1428 if isinstance(alcastream,cms.FilteredStream):
1429 available.append(name.replace('ALCARECOStream',''))
1430 print("The following alcas could not be found "+str(alcaList))
1431 print("available ",available)
1432
1433 raise Exception("The following alcas could not be found "+str(alcaList))
1434
1435 def prepare_LHE(self, stepSpec = None):
1436
1437
1438 loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1439 print("Loading lhe fragment from",loadFragment)
1440 __import__(loadFragment)
1441 self.process.load(loadFragment)
1442
1443 self._options.inlineObjects+=','+stepSpec
1444
1445 getattr(self.process,stepSpec).nEvents = self._options.number
1446
1447
1448 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1449 self.excludedPaths.append("lhe_step")
1450 self.schedule.append( self.process.lhe_step )
1451
1452 def prepare_GEN(self, stepSpec = None):
1453 """ load the fragment of generator configuration """
1454 loadFailure=False
1455
1456
1457
1458 loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1459
1460 if not '/' in loadFragment:
1461 loadFragment='Configuration.Generator.'+loadFragment
1462 else:
1463 loadFragment=loadFragment.replace('/','.')
1464 try:
1465 print("Loading generator fragment from",loadFragment)
1466 __import__(loadFragment)
1467 except:
1468 loadFailure=True
1469
1470 if not (self._options.filein or self._options.dasquery):
1471 raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1472
1473 if not loadFailure:
1474 from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1475
1476 generatorModule=sys.modules[loadFragment]
1477 genModules=generatorModule.__dict__
1478
1479
1480 if self.LHEDefaultSeq in genModules:
1481 del genModules[self.LHEDefaultSeq]
1482
1483 if self._options.hideGen:
1484 self.loadAndRemember(loadFragment)
1485 else:
1486 self.process.load(loadFragment)
1487
1488 import FWCore.ParameterSet.Modules as cmstypes
1489 for name in genModules:
1490 theObject = getattr(generatorModule,name)
1491 if isinstance(theObject, cmstypes._Module):
1492 self._options.inlineObjects=name+','+self._options.inlineObjects
1493 if theObject.type_() in noConcurrentLumiGenerators:
1494 print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1495 self._options.nConcurrentLumis = 1
1496 self._options.nConcurrentIOVs = 1
1497 elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1498 self._options.inlineObjects+=','+name
1499 if name == 'ProductionFilterSequence':
1500 self.productionFilterSequence = 'ProductionFilterSequence'
1501 if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly':
1502 if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1503 self.productionFilterSequence = 'ProductionFilterSequence'
1504 elif 'generator' in genModules:
1505 self.productionFilterSequence = 'generator'
1506
1507 """ Enrich the schedule with the rest of the generation step """
1508 _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1509
1510 if True:
1511 try:
1512 from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1513 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1514 self.loadAndRemember(cffToBeLoaded)
1515 except ImportError:
1516 raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1517
1518 if self._options.scenario == 'HeavyIons':
1519 if self._options.pileup=='HiMixGEN':
1520 self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1521 elif self._options.pileup=='HiMixEmbGEN':
1522 self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1523 else:
1524 self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1525
1526 self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1527 self.schedule.append(self.process.generation_step)
1528
1529
1530 self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1531
1532 if 'reGEN' in self.stepMap or stepSpec == 'pgen_smear':
1533
1534 return
1535
1536 """ Enrich the schedule with the summary of the filter step """
1537
1538 self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1539 self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1540 return
1541
1542 def prepare_SIM(self, stepSpec = None):
1543 """ Enrich the schedule with the simulation step"""
1544 _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1545 if not self._options.fast:
1546 if self._options.gflash==True:
1547 self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1548
1549 if self._options.magField=='0T':
1550 self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1551 else:
1552 if self._options.magField=='0T':
1553 self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1554
1555 self.scheduleSequence(_simSeq,'simulation_step')
1556 return
1557
1558 def prepare_DIGI(self, stepSpec = None):
1559 """ Enrich the schedule with the digitisation step"""
1560 _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1561
1562 if self._options.gflash==True:
1563 self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1564
1565 if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1566 self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1567
1568 if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1569 if self._options.inputEventContent=='':
1570 self._options.inputEventContent='REGEN'
1571 else:
1572 self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1573
1574
1575 self.scheduleSequence(_digiSeq,'digitisation_step')
1576 return
1577
1578 def prepare_CFWRITER(self, stepSpec = None):
1579 """ Enrich the schedule with the crossing frame writer step"""
1580 self.loadAndRemember(self.CFWRITERDefaultCFF)
1581 self.scheduleSequence('pcfw','cfwriter_step')
1582 return
1583
1584 def prepare_DATAMIX(self, stepSpec = None):
1585 """ Enrich the schedule with the digitisation step"""
1586 self.loadAndRemember(self.DATAMIXDefaultCFF)
1587 self.scheduleSequence('pdatamix','datamixing_step')
1588
1589 if self._options.pileup_input:
1590 theFiles=''
1591 if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1592 theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1593 elif self._options.pileup_input.startswith("filelist:"):
1594 theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1595 else:
1596 theFiles=self._options.pileup_input.split(',')
1597
1598 self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1599
1600 return
1601
1602 def prepare_DIGI2RAW(self, stepSpec = None):
1603 _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1604 self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1605 return
1606
1607 def prepare_REPACK(self, stepSpec = None):
1608 _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1609 self.scheduleSequence(_repackSeq,'digi2repack_step')
1610 return
1611
1612 def loadPhase2GTMenu(self, menuFile: str):
1613 import importlib
1614 menuPath = f'L1Trigger.Configuration.Phase2GTMenus.{menuFile}'
1615 print(f"Loading P2GT menu from {menuPath}")
1616 menuModule = importlib.import_module(menuPath)
1617
1618 theMenu = menuModule.menu
1619 triggerPaths = []
1620
1621 for triggerPathFile in theMenu:
1622 self.loadAndRemember(triggerPathFile)
1623
1624 triggerPathModule = importlib.import_module(triggerPathFile)
1625 for objName in dir(triggerPathModule):
1626 obj = getattr(triggerPathModule, objName)
1627 objType = type(obj)
1628 if objType == cms.Path:
1629 triggerPaths.append(objName)
1630
1631 triggerScheduleList = [getattr(self.process, name) for name in sorted(triggerPaths)]
1632 self.schedule.extend(triggerScheduleList)
1633
1634
1635
1636 def prepare_L1P2GT(self, stepSpec=None):
1637 """ Run the GT emulation sequence on top of the L1 emulation step """
1638 self.loadAndRemember(self.L1P2GTDefaultCFF)
1639 self.scheduleSequence('l1tGTProducerSequence', 'Phase2L1GTProducer')
1640 self.scheduleSequence('l1tGTAlgoBlockProducerSequence', 'Phase2L1GTAlgoBlockProducer')
1641 if stepSpec == None:
1642 defaultMenuFile = "step1_2024"
1643 self.loadPhase2GTMenu(menuFile = defaultMenuFile)
1644 else:
1645 self.loadPhase2GTMenu(menuFile = stepSpec)
1646
1647 def prepare_L1(self, stepSpec = None):
1648 """ Enrich the schedule with the L1 simulation step"""
1649 assert(stepSpec == None)
1650 self.loadAndRemember(self.L1EMDefaultCFF)
1651 self.scheduleSequence('SimL1Emulator','L1simulation_step')
1652 return
1653
1654 def prepare_L1REPACK(self, stepSpec = None):
1655 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1656 supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1657 if stepSpec in supported:
1658 self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1659 if self._options.scenario == 'HeavyIons':
1660 self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1661 self.scheduleSequence('SimL1Emulator','L1RePack_step')
1662 else:
1663 print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1664 raise Exception('unsupported feature')
1665
1666 def prepare_HLT(self, stepSpec = None):
1667 """ Enrich the schedule with the HLT simulation step"""
1668 if not stepSpec:
1669 print("no specification of the hlt menu has been given, should never happen")
1670 raise Exception('no HLT specifications provided')
1671
1672 if '@' in stepSpec:
1673
1674 from Configuration.HLT.autoHLT import autoHLT
1675 key = stepSpec[1:]
1676 if key in autoHLT:
1677 stepSpec = autoHLT[key]
1678 else:
1679 raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1680
1681 if ',' in stepSpec:
1682
1683 self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1684 optionsForHLT = {}
1685 if self._options.scenario == 'HeavyIons':
1686 optionsForHLT['type'] = 'HIon'
1687 else:
1688 optionsForHLT['type'] = 'GRun'
1689 optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1690 if stepSpec == 'run,fromSource':
1691 if hasattr(self.process.source,'firstRun'):
1692 self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1693 elif hasattr(self.process.source,'setRunNumber'):
1694 self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1695 else:
1696 raise Exception(f'Cannot replace menu to load {stepSpec}')
1697 else:
1698 self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1699 else:
1700 self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1701
1702 if self._options.isMC:
1703 self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1704
1705 if self._options.name != 'HLT':
1706 self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1707 self.additionalCommands.append('process = ProcessName(process)')
1708 self.additionalCommands.append('')
1709 from HLTrigger.Configuration.CustomConfigs import ProcessName
1710 self.process = ProcessName(self.process)
1711
1712 if self.process.schedule == None:
1713 raise Exception('the HLT step did not attach a valid schedule to the process')
1714
1715 self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1716 [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1717
1718
1719 if self._options.fast:
1720 if not hasattr(self.process,'HLTEndSequence'):
1721 self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1722
1723
1724 def prepare_RAW2RECO(self, stepSpec = None):
1725 if ','in stepSpec:
1726 seqReco,seqDigi=stepSpec.spli(',')
1727 else:
1728 print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1729
1730 self.prepare_RAW2DIGI(seqDigi)
1731 self.prepare_RECO(seqReco)
1732 return
1733
1734 def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1735 _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1736 self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1737 return
1738
1739 def prepare_PATFILTER(self, stepSpec = None):
1740 self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1741 from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1742 for filt in allMetFilterPaths:
1743 self.schedule.append(getattr(self.process,'Flag_'+filt))
1744
1745 def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1746 ''' Enrich the schedule with L1 HW validation '''
1747 self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1748 print('\n\n\n DEPRECATED this has no action \n\n\n')
1749 return
1750
1751 def prepare_L1Reco(self, stepSpec = "L1Reco"):
1752 ''' Enrich the schedule with L1 reconstruction '''
1753 _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1754 self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1755 return
1756
1757 def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1758 ''' Enrich the schedule with L1 reconstruction '''
1759 _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1760 self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1761 return
1762
1763 def prepare_FILTER(self, stepSpec = None):
1764 ''' Enrich the schedule with a user defined filter sequence '''
1765
1766 filterConfig,filterSeq = stepSpec.split('.')
1767 filterConfig=self.load(filterConfig)
1768
1769 class PrintAllModules(object):
1770 def __init__(self):
1771 self.inliner=''
1772 pass
1773 def enter(self,visitee):
1774 try:
1775 label=visitee.label()
1776
1777 self.inliner=label+','+self.inliner
1778 except:
1779 pass
1780 def leave(self,v): pass
1781
1782 expander=PrintAllModules()
1783 getattr(self.process,filterSeq).visit( expander )
1784 self._options.inlineObjects+=','+expander.inliner
1785 self._options.inlineObjects+=','+filterSeq
1786
1787
1788 self.scheduleSequence(filterSeq,'filtering_step')
1789 self.nextScheduleIsConditional=True
1790
1791 self.productionFilterSequence = filterSeq
1792
1793 return
1794
1795 def prepare_RECO(self, stepSpec = "reconstruction"):
1796 ''' Enrich the schedule with reconstruction '''
1797 _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1798 self.scheduleSequence(_recoSeq,'reconstruction_step')
1799 return
1800
1801 def prepare_RECOSIM(self, stepSpec = "recosim"):
1802 ''' Enrich the schedule with reconstruction '''
1803 _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1804 self.scheduleSequence(_recosimSeq,'recosim_step')
1805 return
1806
1807 def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1808 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1809 if not self._options.fast:
1810 print("ERROR: this step is only implemented for FastSim")
1811 sys.exit()
1812 _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1813 self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1814 return
1815
1816 def prepare_PAT(self, stepSpec = "miniAOD"):
1817 ''' Enrich the schedule with PAT '''
1818 self.prepare_PATFILTER(self)
1819 self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1820 self.labelsToAssociate.append('patTask')
1821 if self._options.isData:
1822 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1823 else:
1824 if self._options.fast:
1825 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1826 else:
1827 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1828
1829 if self._options.hltProcess:
1830 self._customise_coms.append( f'process.patTrigger.processName = "{self._options.hltProcess}"')
1831 self._customise_coms.append( f'process.slimmedPatTrigger.triggerResults= cms.InputTag( "TriggerResults::{self._options.hltProcess}" )')
1832 self._customise_coms.append( f'process.patMuons.triggerResults= cms.InputTag( "TriggerResults::{self._options.hltProcess}" )')
1833
1834
1835 if self.stepKeys[0] == 'PAT':
1836 self._customise_coms.append( 'process.source.delayReadingEventProducts = cms.untracked.bool(False)')
1837
1838 return
1839
1840 def prepare_PATGEN(self, stepSpec = "miniGEN"):
1841 ''' Enrich the schedule with PATGEN '''
1842 self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF)
1843 self.labelsToAssociate.append('patGENTask')
1844 if self._options.isData:
1845 raise Exception("PATGEN step can only run on MC")
1846 return
1847
1848 def prepare_NANO(self, stepSpec = '' ):
1849 print(f"in prepare_nano {stepSpec}")
1850 ''' Enrich the schedule with NANO '''
1851 if not '@' in stepSpec:
1852 _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1853 else:
1854 _nanoSeq = stepSpec
1855 _nanoCff = self.NANODefaultCFF
1856
1857 print(_nanoSeq)
1858
1859 from PhysicsTools.NanoAOD.autoNANO import autoNANO, expandNanoMapping
1860
1861 _nanoCustoms = _nanoSeq.split('+') if '@' in stepSpec else ['']
1862 _nanoSeq = _nanoSeq.split('+')
1863 expandNanoMapping(_nanoSeq, autoNANO, 'sequence')
1864 expandNanoMapping(_nanoCustoms, autoNANO, 'customize')
1865
1866 _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1867 _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1868
1869 _nanoSeq = [seq if seq!='' else f"{self.NANODefaultCFF}.{self.NANODefaultSeq}" for seq in _nanoSeq]
1870 _nanoCustoms = [cust if cust!='' else self.NANODefaultCustom for cust in _nanoCustoms]
1871
1872 if len(_nanoSeq) < 1 and '@' in stepSpec:
1873 raise Exception(f'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mapping')
1874 _seqToSchedule = []
1875 for _subSeq in _nanoSeq:
1876 if '.' in _subSeq:
1877 _cff,_seq = _subSeq.split('.')
1878 print("NANO: scheduling:",_seq,"from",_cff)
1879 self.loadAndRemember(_cff)
1880 _seqToSchedule.append(_seq)
1881 elif '/' in _subSeq:
1882 self.loadAndRemember(_subSeq)
1883 _seqToSchedule.append(self.NANODefaultSeq)
1884 else:
1885 print("NANO: scheduling:",_subSeq)
1886 _seqToSchedule.append(_subSeq)
1887 self.scheduleSequence('+'.join(_seqToSchedule), 'nanoAOD_step')
1888
1889
1890 for custom in _nanoCustoms:
1891 custom_path = custom if '.' in custom else '.'.join([_nanoCff,custom])
1892
1893 self._options.customisation_file.append(custom_path)
1894 if self._options.hltProcess:
1895 self._customise_coms.append( f'process.unpackedPatTrigger.triggerResults= cms.InputTag( "TriggerResults::{self._options.hltProcess}" )')
1896
1897
1898 if self.stepKeys[0] == 'NANO':
1899 self._customise_coms.append( 'process.source.delayReadingEventProducts = cms.untracked.bool(False)')
1900
1901 def prepare_SKIM(self, stepSpec = "all"):
1902 ''' Enrich the schedule with skimming fragments'''
1903 skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1904
1905 stdHLTProcName = 'HLT'
1906 newHLTProcName = self._options.hltProcess
1907 customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1908 if customiseForReHLT:
1909 print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1910
1911
1912 from Configuration.Skimming.autoSkim import autoSkim
1913 skimlist = sequence.split('+')
1914 self.expandMapping(skimlist,autoSkim)
1915
1916
1917 for skim in skimConfig.__dict__:
1918 skimstream = getattr(skimConfig, skim)
1919
1920
1921 if isinstance(skimstream, cms.Path):
1922 self.blacklist_paths.append(skimstream)
1923
1924 elif isinstance(skimstream, cms.Sequence):
1925 if customiseForReHLT:
1926 self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1927
1928 if not isinstance(skimstream, cms.FilteredStream):
1929 continue
1930
1931 shortname = skim.replace('SKIMStream','')
1932 if (sequence=="all"):
1933 self.addExtraStream(skim,skimstream)
1934 elif (shortname in skimlist):
1935 self.addExtraStream(skim,skimstream)
1936
1937 if self._options.datatier=='DQM':
1938 self.process.load(self.EVTCONTDefaultCFF)
1939 skimstreamDQM = cms.FilteredStream(
1940 responsible = skimstream.responsible,
1941 name = skimstream.name+'DQM',
1942 paths = skimstream.paths,
1943 selectEvents = skimstream.selectEvents,
1944 content = self._options.datatier+'EventContent',
1945 dataTier = cms.untracked.string(self._options.datatier)
1946 )
1947 self.addExtraStream(skim+'DQM',skimstreamDQM)
1948 for i in range(skimlist.count(shortname)):
1949 skimlist.remove(shortname)
1950
1951 if (skimlist.__len__()!=0 and sequence!="all"):
1952 print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1953 raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1954
1955
1956 def prepare_USER(self, stepSpec = None):
1957 ''' Enrich the schedule with a user defined sequence '''
1958 _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1959 self.scheduleSequence(_userSeq,'user_step')
1960 return
1961
1962 def prepare_POSTRECO(self, stepSpec = None):
1963 """ Enrich the schedule with the postreco step """
1964 self.loadAndRemember(self.POSTRECODefaultCFF)
1965 self.scheduleSequence('postreco_generator','postreco_step')
1966 return
1967
1968
1969 def prepare_VALIDATION(self, stepSpec = 'validation'):
1970 print(f"{stepSpec} in preparing validation")
1971 _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1972 from Validation.Configuration.autoValidation import autoValidation
1973
1974 if sequence.find(',')!=-1:
1975 prevalSeqName=sequence.split(',')[0].split('+')
1976 valSeqName=sequence.split(',')[1].split('+')
1977 self.expandMapping(prevalSeqName,autoValidation,index=0)
1978 self.expandMapping(valSeqName,autoValidation,index=1)
1979 else:
1980 if '@' in sequence:
1981 prevalSeqName=sequence.split('+')
1982 valSeqName=sequence.split('+')
1983 self.expandMapping(prevalSeqName,autoValidation,index=0)
1984 self.expandMapping(valSeqName,autoValidation,index=1)
1985 else:
1986 postfix=''
1987 if sequence:
1988 postfix='_'+sequence
1989 prevalSeqName=['prevalidation'+postfix]
1990 valSeqName=['validation'+postfix]
1991 if not hasattr(self.process,valSeqName[0]):
1992 prevalSeqName=['']
1993 valSeqName=[sequence]
1994
1995 def NFI(index):
1996
1997 if index==0:
1998 return ''
1999 else:
2000 return '%s'%index
2001
2002
2003
2004 if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
2005 for s in valSeqName+prevalSeqName:
2006 if s:
2007 self.renameHLTprocessInSequence(s)
2008 for (i,s) in enumerate(prevalSeqName):
2009 if s:
2010 setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
2011 self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
2012
2013 for (i,s) in enumerate(valSeqName):
2014 setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
2015 self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
2016
2017
2018 if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
2019 return
2020
2021 if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
2022 if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
2023 self._options.restoreRNDSeeds=True
2024
2025 if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
2026 self.executeAndRemember("process.mix.playback = True")
2027 self.executeAndRemember("process.mix.digitizers = cms.PSet()")
2028 self.executeAndRemember("for a in process.aliases: delattr(process, a)")
2029 self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
2030
2031 if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2032
2033 for (i,s) in enumerate(valSeqName):
2034 getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
2035
2036 return
2037
2038
2039 class MassSearchReplaceProcessNameVisitor(object):
2040 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
2041 It will climb down within PSets, VPSets and VInputTags to find its target"""
2042 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
2043 self._paramReplace = paramReplace
2044 self._paramSearch = paramSearch
2045 self._verbose = verbose
2046 self._whitelist = whitelist
2047
2048 def doIt(self, pset, base):
2049 if isinstance(pset, cms._Parameterizable):
2050 for name in pset.parameters_().keys():
2051
2052 if name in self._whitelist:
2053 continue
2054
2055
2056 value = getattr(pset, name)
2057 valueType = type(value)
2058 if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
2059 self.doIt(value,base+"."+name)
2060 elif valueType in [cms.VPSet, cms.untracked.VPSet]:
2061 for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
2062 elif valueType in [cms.string, cms.untracked.string]:
2063 if value.value() == self._paramSearch:
2064 if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
2065 setattr(pset, name,self._paramReplace)
2066 elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
2067 for (i,n) in enumerate(value):
2068 if not isinstance(n, cms.InputTag):
2069 n=cms.InputTag(n)
2070 if n.processName == self._paramSearch:
2071
2072 if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
2073 setattr(n,"processName",self._paramReplace)
2074 value[i]=n
2075 elif valueType in [cms.vstring, cms.untracked.vstring]:
2076 for (i,n) in enumerate(value):
2077 if n==self._paramSearch:
2078 getattr(pset,name)[i]=self._paramReplace
2079 elif valueType in [cms.InputTag, cms.untracked.InputTag]:
2080 if value.processName == self._paramSearch:
2081 if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
2082 setattr(getattr(pset, name),"processName",self._paramReplace)
2083
2084 def enter(self,visitee):
2085 label = ''
2086 try:
2087 label = visitee.label()
2088 except AttributeError:
2089 label = '<Module not in a Process>'
2090 except:
2091 label = 'other execption'
2092 self.doIt(visitee, label)
2093
2094 def leave(self,visitee):
2095 pass
2096
2097
2098 def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
2099 print("Replacing all InputTag %s => %s"%(oldT,newT))
2100 from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
2101 massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
2102 loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2103 if not loadMe in self.additionalCommands:
2104 self.additionalCommands.append(loadMe)
2105 self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2106
2107
2108 def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2109 if proc == None:
2110 proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2111 if proc == HLTprocess:
2112 return
2113
2114 if verbosityLevel > 0:
2115 print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2116 verboseVisit = (verbosityLevel > 1)
2117 getattr(self.process,sequence).visit(
2118 ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2119 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2120 self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2121 self.additionalCommands.append(
2122 'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2123 % (sequence, HLTprocess, proc, verboseVisit))
2124
2125 def expandMapping(self,seqList,mapping,index=None):
2126 maxLevel=30
2127 level=0
2128 while '@' in repr(seqList) and level<maxLevel:
2129 level+=1
2130 for specifiedCommand in seqList:
2131 if specifiedCommand.startswith('@'):
2132 location=specifiedCommand[1:]
2133 if not location in mapping:
2134 raise Exception("Impossible to map "+location+" from "+repr(mapping))
2135 mappedTo=mapping[location]
2136 if index!=None:
2137 mappedTo=mappedTo[index]
2138 seqList.remove(specifiedCommand)
2139 seqList.extend(mappedTo.split('+'))
2140 break;
2141 if level==maxLevel:
2142 raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2143
2144 def prepare_DQM(self, stepSpec = 'DQMOffline'):
2145
2146
2147
2148 self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2149 _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2150 sequenceList=_dqmSeq.split('+')
2151 postSequenceList=_dqmSeq.split('+')
2152 from DQMOffline.Configuration.autoDQM import autoDQM
2153 self.expandMapping(sequenceList,autoDQM,index=0)
2154 self.expandMapping(postSequenceList,autoDQM,index=1)
2155
2156 if len(set(sequenceList))!=len(sequenceList):
2157 sequenceList=list(OrderedSet(sequenceList))
2158 print("Duplicate entries for DQM:, using",sequenceList)
2159
2160 pathName='dqmoffline_step'
2161 for (i,_sequence) in enumerate(sequenceList):
2162 if (i!=0):
2163 pathName='dqmoffline_%d_step'%(i)
2164
2165 if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2166 self.renameHLTprocessInSequence(_sequence)
2167
2168 setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2169 self.schedule.append(getattr(self.process,pathName))
2170
2171 if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2172
2173 getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2174
2175
2176 pathName='dqmofflineOnPAT_step'
2177 for (i,_sequence) in enumerate(postSequenceList):
2178
2179 if (sequenceList[i]==postSequenceList[i]):
2180 continue
2181 if (i!=0):
2182 pathName='dqmofflineOnPAT_%d_step'%(i)
2183
2184 setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2185 self.schedule.append(getattr(self.process,pathName))
2186
2187 def prepare_HARVESTING(self, stepSpec = None):
2188 """ Enrich the process with harvesting step """
2189 self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2190 self.loadAndRemember(self.DQMSaverCFF)
2191
2192 harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2193
2194
2195 harvestingList = sequence.split("+")
2196 from DQMOffline.Configuration.autoDQM import autoDQM
2197 from Validation.Configuration.autoValidation import autoValidation
2198 import copy
2199 combined_mapping = copy.deepcopy( autoDQM )
2200 combined_mapping.update( autoValidation )
2201 self.expandMapping(harvestingList,combined_mapping,index=-1)
2202
2203 if len(set(harvestingList))!=len(harvestingList):
2204 harvestingList=list(OrderedSet(harvestingList))
2205 print("Duplicate entries for HARVESTING, using",harvestingList)
2206
2207 for name in harvestingList:
2208 if not name in harvestingConfig.__dict__:
2209 print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2210
2211 getattr(self.process, name)
2212 continue
2213 harvestingstream = getattr(harvestingConfig,name)
2214 if isinstance(harvestingstream,cms.Path):
2215 self.schedule.append(harvestingstream)
2216 self.blacklist_paths.append(harvestingstream)
2217 if isinstance(harvestingstream,cms.Sequence):
2218 setattr(self.process,name+"_step",cms.Path(harvestingstream))
2219 self.schedule.append(getattr(self.process,name+"_step"))
2220
2221
2222
2223
2224
2225 self.scheduleSequence('DQMSaver','dqmsave_step')
2226 return
2227
2228 def prepare_ALCAHARVEST(self, stepSpec = None):
2229 """ Enrich the process with AlCaHarvesting step """
2230 harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2231 sequence=stepSpec.split(".")[-1]
2232
2233
2234 harvestingList = sequence.split("+")
2235
2236
2237
2238 from Configuration.AlCa.autoPCL import autoPCL
2239 self.expandMapping(harvestingList,autoPCL)
2240
2241 for name in harvestingConfig.__dict__:
2242 harvestingstream = getattr(harvestingConfig,name)
2243 if name in harvestingList and isinstance(harvestingstream,cms.Path):
2244 self.schedule.append(harvestingstream)
2245 if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2246 isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2247 self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2248 self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2249 else:
2250 self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2251 self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2252 harvestingList.remove(name)
2253
2254 lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2255 self.schedule.append(lastStep)
2256
2257 if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2258 print("The following harvesting could not be found : ", harvestingList)
2259 raise Exception("The following harvesting could not be found : "+str(harvestingList))
2260
2261
2262
2263 def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2264 _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2265 self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2266 return
2267
2268 def finalizeFastSimHLT(self):
2269 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2270 self.schedule.append(self.process.reconstruction)
2271
2272
2273 def build_production_info(self, evt_type, evtnumber):
2274 """ Add useful info for the production. """
2275 self.process.configurationMetadata=cms.untracked.PSet\
2276 (version=cms.untracked.string("$Revision: 1.19 $"),
2277 name=cms.untracked.string("Applications"),
2278 annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2279 )
2280
2281 self.addedObjects.append(("Production Info","configurationMetadata"))
2282
2283
2284 def create_process(self):
2285 self.pythonCfgCode = "# Auto generated configuration file\n"
2286 self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2287 self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2288 self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2289
2290
2291 modifiers=[]
2292 modifierStrings=[]
2293 modifierImports=[]
2294
2295 if hasattr(self._options,"era") and self._options.era :
2296
2297 from Configuration.StandardSequences.Eras import eras
2298 for requestedEra in self._options.era.split(",") :
2299 modifierStrings.append(requestedEra)
2300 modifierImports.append(eras.pythonCfgLines[requestedEra])
2301 modifiers.append(getattr(eras,requestedEra))
2302
2303
2304 if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2305 import importlib
2306 thingsImported=[]
2307 for c in self._options.procModifiers:
2308 thingsImported.extend(c.split(","))
2309 for pm in thingsImported:
2310 modifierStrings.append(pm)
2311 modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2312 modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2313
2314 self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2315 self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'"
2316
2317
2318 if len(modifierStrings)>0:
2319 self.pythonCfgCode+= ','+','.join(modifierStrings)
2320 self.pythonCfgCode+=')\n\n'
2321
2322
2323
2324 if self.process == None:
2325 if len(modifiers)>0:
2326 self.process = cms.Process(self._options.name,*modifiers)
2327 else:
2328 self.process = cms.Process(self._options.name)
2329
2330
2331
2332
2333 def prepare(self, doChecking = False):
2334 """ Prepare the configuration string and add missing pieces."""
2335
2336 self.loadAndRemember(self.EVTCONTDefaultCFF)
2337 self.addMaxEvents()
2338 if self.with_input:
2339 self.addSource()
2340 self.addStandardSequences()
2341
2342 self.completeInputCommand()
2343 self.addConditions()
2344
2345
2346 outputModuleCfgCode=""
2347 if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2348 outputModuleCfgCode=self.addOutput()
2349
2350 self.addCommon()
2351
2352 self.pythonCfgCode += "# import of standard configurations\n"
2353 for module in self.imports:
2354 self.pythonCfgCode += ("process.load('"+module+"')\n")
2355
2356
2357 if not hasattr(self.process,"configurationMetadata"):
2358 self.build_production_info(self._options.evt_type, self._options.number)
2359 else:
2360
2361 self.addedObjects.append(("Production Info","configurationMetadata"))
2362
2363 self.pythonCfgCode +="\n"
2364 for comment,object in self.addedObjects:
2365 if comment!="":
2366 self.pythonCfgCode += "\n# "+comment+"\n"
2367 self.pythonCfgCode += dumpPython(self.process,object)
2368
2369
2370 self.pythonCfgCode += "\n# Output definition\n"
2371 self.pythonCfgCode += outputModuleCfgCode
2372
2373
2374 self.pythonCfgCode += "\n# Additional output definition\n"
2375
2376 nl=sorted(self.additionalOutputs.keys())
2377 for name in nl:
2378 output = self.additionalOutputs[name]
2379 self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2380 tmpOut = cms.EndPath(output)
2381 setattr(self.process,name+'OutPath',tmpOut)
2382 self.schedule.append(tmpOut)
2383
2384
2385 self.pythonCfgCode += "\n# Other statements\n"
2386 for command in self.additionalCommands:
2387 self.pythonCfgCode += command + "\n"
2388
2389
2390 for object in self._options.inlineObjects.split(','):
2391 if not object:
2392 continue
2393 if not hasattr(self.process,object):
2394 print('cannot inline -'+object+'- : not known')
2395 else:
2396 self.pythonCfgCode +='\n'
2397 self.pythonCfgCode +=dumpPython(self.process,object)
2398
2399 if self._options.pileup=='HiMixEmbGEN':
2400 self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2401
2402
2403 self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2404 for path in self.process.paths:
2405 if getattr(self.process,path) not in self.blacklist_paths:
2406 self.pythonCfgCode += dumpPython(self.process,path)
2407
2408 for endpath in self.process.endpaths:
2409 if getattr(self.process,endpath) not in self.blacklist_paths:
2410 self.pythonCfgCode += dumpPython(self.process,endpath)
2411
2412
2413 self.pythonCfgCode += "\n# Schedule definition\n"
2414
2415
2416 pathNames = ['process.'+p.label_() for p in self.schedule]
2417 if self.process.schedule == None:
2418 self.process.schedule = cms.Schedule()
2419 for item in self.schedule:
2420 self.process.schedule.append(item)
2421 result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2422 else:
2423 if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2424 raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2425
2426 for index, item in enumerate(self.schedule):
2427 if index < self.scheduleIndexOfFirstHLTPath:
2428 self.process.schedule.insert(index, item)
2429 else:
2430 self.process.schedule.append(item)
2431
2432 result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2433 for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2434 result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2435 if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2436 result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2437
2438 self.pythonCfgCode += result
2439
2440 for labelToAssociate in self.labelsToAssociate:
2441 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2442 self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2443
2444 from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2445 associatePatAlgosToolsTask(self.process)
2446 self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2447 self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2448
2449 overrideThreads = (self._options.nThreads != 1)
2450 overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2451 overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2452
2453 if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2454 self.pythonCfgCode +="\n"
2455 self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2456 if overrideThreads:
2457 self.pythonCfgCode +="process.options.numberOfThreads = {}\n".format(self._options.nThreads)
2458 self.pythonCfgCode +="process.options.numberOfStreams = {}\n".format(self._options.nStreams)
2459 self.process.options.numberOfThreads = self._options.nThreads
2460 self.process.options.numberOfStreams = self._options.nStreams
2461 if overrideConcurrentLumis:
2462 self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = {}\n".format(self._options.nConcurrentLumis)
2463 self.process.options.numberOfConcurrentLuminosityBlocks = self._options.nConcurrentLumis
2464 if overrideConcurrentIOVs:
2465 self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = {}\n".format(self._options.nConcurrentIOVs)
2466 self.process.options.eventSetup.numberOfConcurrentIOVs = self._options.nConcurrentIOVs
2467
2468 if self._options.accelerators is not None:
2469 accelerators = self._options.accelerators.split(',')
2470 self.pythonCfgCode += "\n"
2471 self.pythonCfgCode += "# Enable only these accelerator backends\n"
2472 self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2473 self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2474 self.process.load('Configuration.StandardSequences.Accelerators_cff')
2475 self.process.options.accelerators = accelerators
2476
2477
2478 if self._options.isRepacked:
2479 self.pythonCfgCode +="\n"
2480 self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2481 self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2482 MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2483
2484
2485 if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2486 self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2487 self.pythonCfgCode +='for path in process.paths:\n'
2488 if len(self.conditionalPaths):
2489 self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2490 if len(self.excludedPaths):
2491 self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2492 self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2493 pfs = getattr(self.process,self.productionFilterSequence)
2494 for path in self.process.paths:
2495 if not path in self.conditionalPaths: continue
2496 if path in self.excludedPaths: continue
2497 getattr(self.process,path).insert(0, pfs)
2498
2499
2500
2501 self.pythonCfgCode += self.addCustomise()
2502
2503 if self._options.runUnscheduled:
2504 print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2505
2506
2507
2508
2509 self.pythonCfgCode += self.addCustomise(1)
2510
2511 self.pythonCfgCode += self.addCustomiseCmdLine()
2512
2513 if hasattr(self.process,"logErrorHarvester"):
2514
2515 self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2516 self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2517 self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2518 from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2519 self.process = customiseLogErrorHarvesterUsingOutputCommands(self.process)
2520
2521
2522
2523
2524
2525 self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2526 self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2527 self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2528 self.pythonCfgCode += "# End adding early deletion\n"
2529 from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2530 self.process = customiseEarlyDelete(self.process)
2531
2532 imports = cms.specialImportRegistry.getSpecialImports()
2533 if len(imports) > 0:
2534
2535 index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2536
2537 index = self.pythonCfgCode.find("\n",index)
2538 self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2539
2540
2541
2542
2543 if self._options.io:
2544
2545 if not self._options.io.endswith('.io'): self._option.io+='.io'
2546 io=open(self._options.io,'w')
2547 ioJson={}
2548 if hasattr(self.process.source,"fileNames"):
2549 if len(self.process.source.fileNames.value()):
2550 ioJson['primary']=self.process.source.fileNames.value()
2551 if hasattr(self.process.source,"secondaryFileNames"):
2552 if len(self.process.source.secondaryFileNames.value()):
2553 ioJson['secondary']=self.process.source.secondaryFileNames.value()
2554 if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2555 ioJson['pileup']=self._options.pileup_input[4:]
2556 for (o,om) in self.process.outputModules_().items():
2557 ioJson[o]=om.fileName.value()
2558 ioJson['GT']=self.process.GlobalTag.globaltag.value()
2559 if self.productionFilterSequence:
2560 ioJson['filter']=self.productionFilterSequence
2561 import json
2562 io.write(json.dumps(ioJson))
2563 return
2564