Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-07-24 04:44:42

0001 #! /usr/bin/env python3
0002 
0003 __version__ = "$Revision: 1.19 $"
0004 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
0005 
0006 import FWCore.ParameterSet.Config as cms
0007 from FWCore.ParameterSet.Modules import _Module
0008 # The following import is provided for backward compatibility reasons.
0009 # The function used to be defined in this file.
0010 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
0011 
0012 import hashlib
0013 import sys
0014 import re
0015 import collections
0016 from subprocess import Popen,PIPE
0017 import FWCore.ParameterSet.DictTypes as DictTypes
0018 from FWCore.ParameterSet.OrderedSet import OrderedSet
0019 class Options:
0020     pass
0021 
0022 # the canonical defaults
0023 defaultOptions = Options()
0024 defaultOptions.datamix = 'DataOnSim'
0025 defaultOptions.isMC=False
0026 defaultOptions.isData=True
0027 defaultOptions.step=''
0028 defaultOptions.pileup='NoPileUp'
0029 defaultOptions.pileup_input = None
0030 defaultOptions.pileup_dasoption = ''
0031 defaultOptions.geometry = 'SimDB'
0032 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
0033 defaultOptions.magField = ''
0034 defaultOptions.conditions = None
0035 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
0036 defaultOptions.harvesting= 'AtRunEnd'
0037 defaultOptions.gflash = False
0038 defaultOptions.number = -1
0039 defaultOptions.number_out = None
0040 defaultOptions.arguments = ""
0041 defaultOptions.name = "NO NAME GIVEN"
0042 defaultOptions.evt_type = ""
0043 defaultOptions.filein = ""
0044 defaultOptions.dasquery=""
0045 defaultOptions.dasoption=""
0046 defaultOptions.secondfilein = ""
0047 defaultOptions.customisation_file = []
0048 defaultOptions.customisation_file_unsch = []
0049 defaultOptions.customise_commands = ""
0050 defaultOptions.inline_custom=False
0051 defaultOptions.particleTable = 'pythiapdt'
0052 defaultOptions.particleTableList = ['pythiapdt','pdt']
0053 defaultOptions.dirin = ''
0054 defaultOptions.dirout = ''
0055 defaultOptions.filetype = 'EDM'
0056 defaultOptions.fileout = 'output.root'
0057 defaultOptions.filtername = ''
0058 defaultOptions.lazy_download = False
0059 defaultOptions.custom_conditions = ''
0060 defaultOptions.hltProcess = ''
0061 defaultOptions.eventcontent = None
0062 defaultOptions.datatier = None
0063 defaultOptions.inlineEventContent = True
0064 defaultOptions.inlineObjects =''
0065 defaultOptions.hideGen=False
0066 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
0067 defaultOptions.beamspot=None
0068 defaultOptions.outputDefinition =''
0069 defaultOptions.inputCommands = None
0070 defaultOptions.outputCommands = None
0071 defaultOptions.inputEventContent = ''
0072 defaultOptions.dropDescendant = False
0073 defaultOptions.relval = None
0074 defaultOptions.prefix = None
0075 defaultOptions.profile = None
0076 defaultOptions.heap_profile = None
0077 defaultOptions.maxmem_profile = None
0078 defaultOptions.isRepacked = False
0079 defaultOptions.restoreRNDSeeds = False
0080 defaultOptions.donotDropOnInput = ''
0081 defaultOptions.python_filename =''
0082 defaultOptions.io=None
0083 defaultOptions.lumiToProcess=None
0084 defaultOptions.fast=False
0085 defaultOptions.runsAndWeightsForMC = None
0086 defaultOptions.runsScenarioForMC = None
0087 defaultOptions.runsAndWeightsForMCIntegerWeights = None
0088 defaultOptions.runsScenarioForMCIntegerWeights = None
0089 defaultOptions.runUnscheduled = False
0090 defaultOptions.timeoutOutput = False
0091 defaultOptions.nThreads = 1
0092 defaultOptions.nStreams = 0
0093 defaultOptions.nConcurrentLumis = 0
0094 defaultOptions.nConcurrentIOVs = 0
0095 defaultOptions.accelerators = None
0096 
0097 # some helper routines
0098 def dumpPython(process,name):
0099     theObject = getattr(process,name)
0100     if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
0101         return "process."+name+" = " + theObject.dumpPython()
0102     elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
0103         return "process."+name+" = " + theObject.dumpPython()+"\n"
0104     else:
0105         return "process."+name+" = " + theObject.dumpPython()+"\n"
0106 def filesFromList(fileName,s=None):
0107     import os
0108     import FWCore.ParameterSet.Config as cms
0109     prim=[]
0110     sec=[]
0111     for line in open(fileName,'r'):
0112         if line.count(".root")>=2:
0113             #two files solution...
0114             entries=line.replace("\n","").split()
0115             prim.append(entries[0])
0116             sec.append(entries[1])
0117         elif (line.find(".root")!=-1):
0118             entry=line.replace("\n","")
0119             prim.append(entry)
0120     # remove any duplicates but keep the order
0121     file_seen = set()
0122     prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
0123     file_seen = set()
0124     sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
0125     if s:
0126         if not hasattr(s,"fileNames"):
0127             s.fileNames=cms.untracked.vstring(prim)
0128         else:
0129             s.fileNames.extend(prim)
0130         if len(sec)!=0:
0131             if not hasattr(s,"secondaryFileNames"):
0132                 s.secondaryFileNames=cms.untracked.vstring(sec)
0133             else:
0134                 s.secondaryFileNames.extend(sec)
0135     print("found files: ",prim)
0136     if len(prim)==0:
0137         raise Exception("There are not files in input from the file list")
0138     if len(sec)!=0:
0139         print("found parent files:",sec)
0140     return (prim,sec)
0141 
0142 def filesFromDASQuery(query,option="",s=None):
0143     import os,time
0144     import FWCore.ParameterSet.Config as cms
0145     prim=[]
0146     sec=[]
0147     print("the query is",query)
0148     eC=5
0149     count=0
0150     while eC!=0 and count<3:
0151         if count!=0:
0152             print('Sleeping, then retrying DAS')
0153             time.sleep(100)
0154         p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
0155         pipe=p.stdout.read()
0156         tupleP = os.waitpid(p.pid, 0)
0157         eC=tupleP[1]
0158         count=count+1
0159     if eC==0:
0160         print("DAS succeeded after",count,"attempts",eC)
0161     else:
0162         print("DAS failed 3 times- I give up")
0163     for line in pipe.split('\n'):
0164         if line.count(".root")>=2:
0165             #two files solution...
0166             entries=line.replace("\n","").split()
0167             prim.append(entries[0])
0168             sec.append(entries[1])
0169         elif (line.find(".root")!=-1):
0170             entry=line.replace("\n","")
0171             prim.append(entry)
0172     # remove any duplicates
0173     prim = sorted(list(set(prim)))
0174     sec = sorted(list(set(sec)))
0175     if s:
0176         if not hasattr(s,"fileNames"):
0177             s.fileNames=cms.untracked.vstring(prim)
0178         else:
0179             s.fileNames.extend(prim)
0180         if len(sec)!=0:
0181             if not hasattr(s,"secondaryFileNames"):
0182                 s.secondaryFileNames=cms.untracked.vstring(sec)
0183             else:
0184                 s.secondaryFileNames.extend(sec)
0185     print("found files: ",prim)
0186     if len(sec)!=0:
0187         print("found parent files:",sec)
0188     return (prim,sec)
0189 
0190 def anyOf(listOfKeys,dict,opt=None):
0191     for k in listOfKeys:
0192         if k in dict:
0193             toReturn=dict[k]
0194             dict.pop(k)
0195             return toReturn
0196     if opt!=None:
0197         return opt
0198     else:
0199         raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
0200 
0201 class ConfigBuilder(object):
0202     """The main building routines """
0203 
0204     def __init__(self, options, process = None, with_output = False, with_input = False ):
0205         """options taken from old cmsDriver and optparse """
0206 
0207         options.outfile_name = options.dirout+options.fileout
0208 
0209         self._options = options
0210 
0211         if self._options.isData and options.isMC:
0212             raise Exception("ERROR: You may specify only --data or --mc, not both")
0213         #if not self._options.conditions:
0214         #        raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
0215 
0216         # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
0217         if 'ENDJOB' in self._options.step:
0218             if  (hasattr(self._options,"outputDefinition") and \
0219                 self._options.outputDefinition != '' and \
0220                 any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
0221                 (hasattr(self._options,"datatier") and \
0222                 self._options.datatier and \
0223                 'DQMIO' in self._options.datatier):
0224                 print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
0225                 self._options.step=self._options.step.replace(',ENDJOB','')
0226 
0227 
0228 
0229         # what steps are provided by this class?
0230         stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
0231         self.stepMap={}
0232         self.stepKeys=[]
0233         for step in self._options.step.split(","):
0234             if step=='': continue
0235             stepParts = step.split(":")
0236             stepName = stepParts[0]
0237             if stepName not in stepList and not stepName.startswith('re'):
0238                 raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
0239             if len(stepParts)==1:
0240                 self.stepMap[stepName]=""
0241             elif len(stepParts)==2:
0242                 self.stepMap[stepName]=stepParts[1].split('+')
0243             elif len(stepParts)==3:
0244                 self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
0245             else:
0246                 raise ValueError(f"Step definition {step} invalid")
0247             self.stepKeys.append(stepName)
0248 
0249         #print(f"map of steps is: {self.stepMap}")
0250 
0251         self.with_output = with_output
0252         self.process=process
0253 
0254         if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
0255             self.with_output = False
0256         self.with_input = with_input
0257         self.imports = []
0258         self.create_process()
0259         self.define_Configs()
0260         self.schedule = list()
0261         self.scheduleIndexOfFirstHLTPath = None
0262 
0263         # we are doing three things here:
0264         # creating a process to catch errors
0265         # building the code to re-create the process
0266 
0267         self.additionalCommands = []
0268         # TODO: maybe a list of to be dumped objects would help as well
0269         self.blacklist_paths = []
0270         self.addedObjects = []
0271         self.additionalOutputs = {}
0272 
0273         self.productionFilterSequence = None
0274         self.labelsToAssociate=[]
0275         self.nextScheduleIsConditional=False
0276         self.conditionalPaths=[]
0277         self.excludedPaths=[]
0278 
0279     def profileOptions(self):
0280         """
0281         addIgProfService
0282         Function to add the igprof profile service so that you can dump in the middle
0283         of the run.
0284         """
0285         profileOpts = self._options.profile.split(':')
0286         profilerStart = 1
0287         profilerInterval = 100
0288         profilerFormat = None
0289         profilerJobFormat = None
0290 
0291         if len(profileOpts):
0292             #type, given as first argument is unused here
0293             profileOpts.pop(0)
0294         if len(profileOpts):
0295             startEvent = profileOpts.pop(0)
0296             if not startEvent.isdigit():
0297                 raise Exception("%s is not a number" % startEvent)
0298             profilerStart = int(startEvent)
0299         if len(profileOpts):
0300             eventInterval = profileOpts.pop(0)
0301             if not eventInterval.isdigit():
0302                 raise Exception("%s is not a number" % eventInterval)
0303             profilerInterval = int(eventInterval)
0304         if len(profileOpts):
0305             profilerFormat = profileOpts.pop(0)
0306 
0307 
0308         if not profilerFormat:
0309             profilerFormat = "%s___%s___%%I.gz" % (
0310                 self._options.evt_type.replace("_cfi", ""),
0311                 hashlib.md5(
0312                     (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
0313                     str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
0314                 ).hexdigest()
0315             )
0316         if not profilerJobFormat and profilerFormat.endswith(".gz"):
0317             profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
0318         elif not profilerJobFormat:
0319             profilerJobFormat = profilerFormat + "_EndOfJob.gz"
0320 
0321         return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
0322 
0323     def heapProfileOptions(self):
0324         """
0325         addJeProfService
0326         Function to add the jemalloc heap  profile service so that you can dump in the middle
0327         of the run.
0328         """
0329         profileOpts = []
0330         profilerStart = 1
0331         profilerInterval = 100
0332         profilerFormat = "jeprof_%s.heap"
0333         profilerJobFormat = None
0334 
0335 
0336         if not profilerJobFormat and profilerFormat.endswith(".heap"):
0337             profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
0338         elif not profilerJobFormat:
0339             profilerJobFormat = profilerFormat + "_EndOfJob.heap"
0340 
0341         return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
0342 
0343     def load(self,includeFile):
0344         includeFile = includeFile.replace('/','.')
0345         self.process.load(includeFile)
0346         return sys.modules[includeFile]
0347 
0348     def loadAndRemember(self, includeFile):
0349         """helper routine to load am memorize imports"""
0350         # we could make the imports a on-the-fly data method of the process instance itself
0351         # not sure if the latter is a good idea
0352         includeFile = includeFile.replace('/','.')
0353         self.imports.append(includeFile)
0354         self.process.load(includeFile)
0355         return sys.modules[includeFile]
0356 
0357     def executeAndRemember(self, command):
0358         """helper routine to remember replace statements"""
0359         self.additionalCommands.append(command)
0360         if not command.strip().startswith("#"):
0361         # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
0362             import re
0363             exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
0364             #exec(command.replace("process.","self.process."))
0365 
0366     def addCommon(self):
0367         if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
0368             self.process.options.Rethrow = ['ProductNotFound']
0369             self.process.options.fileMode = 'FULLMERGE'
0370 
0371         self.addedObjects.append(("","options"))
0372 
0373         if self._options.lazy_download:
0374             self.process.AdaptorConfig = cms.Service("AdaptorConfig",
0375                                                      stats = cms.untracked.bool(True),
0376                                                      enable = cms.untracked.bool(True),
0377                                                      cacheHint = cms.untracked.string("lazy-download"),
0378                                                      readHint = cms.untracked.string("read-ahead-buffered")
0379                                                      )
0380             self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
0381 
0382         #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
0383         #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
0384 
0385         if self._options.profile:
0386             (start, interval, eventFormat, jobFormat)=self.profileOptions()
0387             self.process.IgProfService = cms.Service("IgProfService",
0388                                                      reportFirstEvent            = cms.untracked.int32(start),
0389                                                      reportEventInterval         = cms.untracked.int32(interval),
0390                                                      reportToFileAtPostEvent     = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
0391                                                      reportToFileAtPostEndJob    = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
0392             self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
0393 
0394         if self._options.heap_profile:
0395             (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
0396             self.process.JeProfService = cms.Service("JeProfService",
0397                                                      reportFirstEvent            = cms.untracked.int32(start),
0398                                                      reportEventInterval         = cms.untracked.int32(interval),
0399                                                      reportToFileAtPostEvent     = cms.untracked.string("%s"%(eventFormat)),
0400                                                      reportToFileAtPostEndJob    = cms.untracked.string("%s"%(jobFormat)))
0401             self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
0402 
0403     def addMaxEvents(self):
0404         """Here we decide how many evts will be processed"""
0405         self.process.maxEvents.input = self._options.number
0406         if self._options.number_out:
0407             self.process.maxEvents.output = self._options.number_out
0408         self.addedObjects.append(("","maxEvents"))
0409 
0410     def addSource(self):
0411         """Here the source is built. Priority: file, generator"""
0412         self.addedObjects.append(("Input source","source"))
0413 
0414         def filesFromOption(self):
0415             for entry in self._options.filein.split(','):
0416                 print("entry",entry)
0417                 if entry.startswith("filelist:"):
0418                     filesFromList(entry[9:],self.process.source)
0419                 elif entry.startswith("dbs:") or entry.startswith("das:"):
0420                     filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
0421                 else:
0422                     self.process.source.fileNames.append(self._options.dirin+entry)
0423             if self._options.secondfilein:
0424                 if not hasattr(self.process.source,"secondaryFileNames"):
0425                     raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
0426                 for entry in self._options.secondfilein.split(','):
0427                     print("entry",entry)
0428                     if entry.startswith("filelist:"):
0429                         self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
0430                     elif entry.startswith("dbs:") or entry.startswith("das:"):
0431                         self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
0432                     else:
0433                         self.process.source.secondaryFileNames.append(self._options.dirin+entry)
0434 
0435         if self._options.filein or self._options.dasquery:
0436             if self._options.filetype == "EDM":
0437                 self.process.source=cms.Source("PoolSource",
0438                                                fileNames = cms.untracked.vstring(),
0439                                                secondaryFileNames= cms.untracked.vstring())
0440                 filesFromOption(self)
0441             elif self._options.filetype == "DAT":
0442                 self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
0443                 filesFromOption(self)
0444             elif self._options.filetype == "LHE":
0445                 self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
0446                 if self._options.filein.startswith("lhe:"):
0447                     #list the article directory automatically
0448                     args=self._options.filein.split(':')
0449                     article=args[1]
0450                     print('LHE input from article ',article)
0451                     location='/store/lhe/'
0452                     import os
0453                     textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
0454                     for line in textOfFiles:
0455                         for fileName in [x for x in line.split() if '.lhe' in x]:
0456                             self.process.source.fileNames.append(location+article+'/'+fileName)
0457                     #check first if list of LHE files is loaded (not empty)
0458                     if len(line)<2:
0459                         print('Issue to load LHE files, please check and try again.')
0460                         sys.exit(-1)
0461                     #Additional check to protect empty fileNames in process.source
0462                     if len(self.process.source.fileNames)==0:
0463                         print('Issue with empty filename, but can pass line check')
0464                         sys.exit(-1)
0465                     if len(args)>2:
0466                         self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
0467                 else:
0468                     filesFromOption(self)
0469 
0470             elif self._options.filetype == "DQM":
0471                 self.process.source=cms.Source("DQMRootSource",
0472                                                fileNames = cms.untracked.vstring())
0473                 filesFromOption(self)
0474 
0475             elif self._options.filetype == "DQMDAQ":
0476                 # FIXME: how to configure it if there are no input files specified?
0477                 self.process.source=cms.Source("DQMStreamerReader")
0478 
0479 
0480             if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
0481                 self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
0482 
0483         if self._options.dasquery!='':
0484             self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
0485             filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
0486 
0487             if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
0488                 self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
0489 
0490         ##drop LHEXMLStringProduct on input to save memory if appropriate
0491         if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
0492             if self._options.inputCommands:
0493                 self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
0494             else:
0495                 self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
0496 
0497         if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
0498             if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
0499             for command in self._options.inputCommands.split(','):
0500                 # remove whitespace around the keep/drop statements
0501                 command = command.strip()
0502                 if command=='': continue
0503                 self.process.source.inputCommands.append(command)
0504             if not self._options.dropDescendant:
0505                 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
0506 
0507         if self._options.lumiToProcess:
0508             import FWCore.PythonUtilities.LumiList as LumiList
0509             self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
0510 
0511         if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
0512             if self.process.source is None:
0513                 self.process.source=cms.Source("EmptySource")
0514 
0515         # modify source in case of run-dependent MC
0516         self.runsAndWeights=None
0517         if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
0518             if not self._options.isMC :
0519                 raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
0520             if self._options.runsAndWeightsForMC:
0521                 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
0522             else:
0523                 from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
0524                 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
0525                     __import__(RunsAndWeights[self._options.runsScenarioForMC])
0526                     self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
0527                 else:
0528                     self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
0529 
0530         if self.runsAndWeights:
0531             import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
0532             ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
0533             self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
0534             self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
0535 
0536         # modify source in case of run-dependent MC (Run-3 method)
0537         self.runsAndWeightsInt=None
0538         if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
0539             if not self._options.isMC :
0540                 raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
0541             if self._options.runsAndWeightsForMCIntegerWeights:
0542                 self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
0543             else:
0544                 from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
0545                 if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
0546                     __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
0547                     self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
0548                 else:
0549                     self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
0550 
0551         if self.runsAndWeightsInt:
0552             if not self._options.relval:
0553                 raise Exception("--relval option required when using --runsAndWeightsInt")
0554             if 'DATAMIX' in self._options.step:
0555                 from SimGeneral.Configuration.LumiToRun import lumi_to_run
0556                 total_events, events_per_job  = self._options.relval.split(',')
0557                 lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
0558                 self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
0559 
0560         return
0561 
0562     def addOutput(self):
0563         """ Add output module to the process """
0564         result=""
0565         if self._options.outputDefinition:
0566             if self._options.datatier:
0567                 print("--datatier & --eventcontent options ignored")
0568 
0569             #new output convention with a list of dict
0570             outList = eval(self._options.outputDefinition)
0571             for (id,outDefDict) in enumerate(outList):
0572                 outDefDictStr=outDefDict.__str__()
0573                 if not isinstance(outDefDict,dict):
0574                     raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
0575                 #requires option: tier
0576                 theTier=anyOf(['t','tier','dataTier'],outDefDict)
0577                 #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
0578                 ## event content
0579                 theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
0580                 theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
0581                 theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
0582                 theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
0583                 theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
0584                 # module label has a particular role
0585                 if not theModuleLabel:
0586                     tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
0587                               theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
0588                               theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
0589                               ]
0590                     for name in tryNames:
0591                         if not hasattr(self.process,name):
0592                             theModuleLabel=name
0593                             break
0594                 if not theModuleLabel:
0595                     raise Exception("cannot find a module label for specification: "+outDefDictStr)
0596                 if id==0:
0597                     defaultFileName=self._options.outfile_name
0598                 else:
0599                     defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
0600 
0601                 theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
0602                 if not theFileName.endswith('.root'):
0603                     theFileName+='.root'
0604 
0605                 if len(outDefDict):
0606                     raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
0607                 if theStreamType=='DQMIO': theStreamType='DQM'
0608                 if theStreamType=='ALL':
0609                     theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
0610                 else:
0611                     theEventContent = getattr(self.process, theStreamType+"EventContent")
0612 
0613 
0614                 addAlCaSelects=False
0615                 if theStreamType=='ALCARECO' and not theFilterName:
0616                     theFilterName='StreamALCACombined'
0617                     addAlCaSelects=True
0618 
0619                 CppType='PoolOutputModule'
0620                 if self._options.timeoutOutput:
0621                     CppType='TimeoutPoolOutputModule'
0622                 if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
0623                 output = cms.OutputModule(CppType,
0624                                           theEventContent.clone(),
0625                                           fileName = cms.untracked.string(theFileName),
0626                                           dataset = cms.untracked.PSet(
0627                                              dataTier = cms.untracked.string(theTier),
0628                                              filterName = cms.untracked.string(theFilterName))
0629                                           )
0630                 if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
0631                     output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
0632                 if not theSelectEvent and hasattr(self.process,'filtering_step'):
0633                     output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
0634                 if theSelectEvent:
0635                     output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
0636 
0637                 if addAlCaSelects:
0638                     if not hasattr(output,'SelectEvents'):
0639                         output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
0640                     for alca in self.AlCaPaths:
0641                         output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
0642 
0643 
0644                 if hasattr(self.process,theModuleLabel):
0645                     raise Exception("the current process already has a module "+theModuleLabel+" defined")
0646                 #print "creating output module ",theModuleLabel
0647                 setattr(self.process,theModuleLabel,output)
0648                 outputModule=getattr(self.process,theModuleLabel)
0649                 setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
0650                 path=getattr(self.process,theModuleLabel+'_step')
0651                 self.schedule.append(path)
0652 
0653                 if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
0654                     def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
0655                         return label
0656                     outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
0657                 if theExtraOutputCommands:
0658                     if not isinstance(theExtraOutputCommands,list):
0659                         raise Exception("extra ouput command in --option must be a list of strings")
0660                     if hasattr(self.process,theStreamType+"EventContent"):
0661                         self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
0662                     else:
0663                         outputModule.outputCommands.extend(theExtraOutputCommands)
0664 
0665                 result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
0666 
0667             ##ends the --output options model
0668             return result
0669 
0670         streamTypes=self._options.eventcontent.split(',')
0671         tiers=self._options.datatier.split(',')
0672         if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
0673             raise Exception("number of event content arguments does not match number of datatier arguments")
0674 
0675         # if the only step is alca we don't need to put in an output
0676         if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
0677             return "\n"
0678 
0679         for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
0680             if streamType=='': continue
0681             if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
0682             if streamType=='DQMIO': streamType='DQM'
0683             eventContent=streamType
0684             ## override streamType to eventContent in case NANOEDM
0685             if streamType == "NANOEDMAOD" :
0686                 eventContent = "NANOAOD"
0687             elif streamType == "NANOEDMAODSIM" :
0688                 eventContent = "NANOAODSIM"
0689             theEventContent = getattr(self.process, eventContent+"EventContent")
0690             if i==0:
0691                 theFileName=self._options.outfile_name
0692                 theFilterName=self._options.filtername
0693             else:
0694                 theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
0695                 theFilterName=self._options.filtername
0696             CppType='PoolOutputModule'
0697             if self._options.timeoutOutput:
0698                 CppType='TimeoutPoolOutputModule'
0699             if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
0700             if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
0701             output = cms.OutputModule(CppType,
0702                                       theEventContent,
0703                                       fileName = cms.untracked.string(theFileName),
0704                                       dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
0705                                                                    filterName = cms.untracked.string(theFilterName)
0706                                                                    )
0707                                       )
0708             if hasattr(self.process,"generation_step") and streamType!='LHE':
0709                 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
0710             if hasattr(self.process,"filtering_step"):
0711                 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
0712 
0713             if streamType=='ALCARECO':
0714                 output.dataset.filterName = cms.untracked.string('StreamALCACombined')
0715 
0716             if "MINIAOD" in streamType:
0717                 from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
0718                 miniAOD_customizeOutput(output)
0719 
0720             outputModuleName=streamType+'output'
0721             setattr(self.process,outputModuleName,output)
0722             outputModule=getattr(self.process,outputModuleName)
0723             setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
0724             path=getattr(self.process,outputModuleName+'_step')
0725             self.schedule.append(path)
0726 
0727             if self._options.outputCommands and streamType!='DQM':
0728                 for evct in self._options.outputCommands.split(','):
0729                     if not evct: continue
0730                     self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
0731 
0732             if not self._options.inlineEventContent:
0733                 tmpstreamType=streamType
0734                 if "NANOEDM" in tmpstreamType :
0735                     tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
0736                 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
0737                     return label
0738                 outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
0739 
0740             result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
0741 
0742         return result
0743 
0744     def addStandardSequences(self):
0745         """
0746         Add selected standard sequences to the process
0747         """
0748         # load the pile up file
0749         if self._options.pileup:
0750             pileupSpec=self._options.pileup.split(',')[0]
0751 
0752             #make sure there is a set of pileup files specified when needed
0753             pileups_without_input=[defaultOptions.pileup,"Cosmics","default","HiMixNoPU",None]
0754             if self._options.pileup not in pileups_without_input and self._options.pileup_input==None:
0755                 message = "Pileup scenerio requires input files. Please add an appropriate --pileup_input option"
0756                 raise Exception(message)
0757 
0758             # Does the requested pile-up scenario exist?
0759             from Configuration.StandardSequences.Mixing import Mixing,defineMixing
0760             if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
0761                 message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
0762                 raise Exception(message)
0763 
0764             # Put mixing parameters in a dictionary
0765             if '.' in pileupSpec:
0766                 mixingDict={'file':pileupSpec}
0767             elif pileupSpec.startswith('file:'):
0768                 mixingDict={'file':pileupSpec[5:]}
0769             else:
0770                 import copy
0771                 mixingDict=copy.copy(Mixing[pileupSpec])
0772             if len(self._options.pileup.split(','))>1:
0773                 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
0774 
0775             # Load the pu cfg file corresponding to the requested pu scenario
0776             if 'file:' in pileupSpec:
0777                 #the file is local
0778                 self.process.load(mixingDict['file'])
0779                 print("inlining mixing module configuration")
0780                 self._options.inlineObjects+=',mix'
0781             else:
0782                 self.loadAndRemember(mixingDict['file'])
0783 
0784             mixingDict.pop('file')
0785             if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
0786                 if self._options.pileup_input:
0787                     if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
0788                         mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
0789                     elif self._options.pileup_input.startswith("filelist:"):
0790                         mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
0791                     else:
0792                         mixingDict['F']=self._options.pileup_input.split(',')
0793                 specialization=defineMixing(mixingDict)
0794                 for command in specialization:
0795                     self.executeAndRemember(command)
0796                 if len(mixingDict)!=0:
0797                     raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
0798 
0799 
0800         # load the geometry file
0801         try:
0802             if len(self.stepMap):
0803                 self.loadAndRemember(self.GeometryCFF)
0804                 if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
0805                     self.loadAndRemember(self.SimGeometryCFF)
0806                     if self.geometryDBLabel:
0807                         self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
0808                         self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
0809 
0810         except ImportError:
0811             print("Geometry option",self._options.geometry,"unknown.")
0812             raise
0813 
0814         if len(self.stepMap):
0815             self.loadAndRemember(self.magFieldCFF)
0816 
0817         for stepName in self.stepKeys:
0818             stepSpec = self.stepMap[stepName]
0819             print("Step:", stepName,"Spec:",stepSpec)
0820             if stepName.startswith('re'):
0821                 ##add the corresponding input content
0822                 if stepName[2:] not in self._options.donotDropOnInput:
0823                     self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
0824                 stepName=stepName[2:]
0825             if stepSpec=="":
0826                 getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
0827             elif isinstance(stepSpec, list):
0828                 getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
0829             elif isinstance(stepSpec, tuple):
0830                 getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
0831             else:
0832                 raise ValueError("Invalid step definition")
0833 
0834         if self._options.restoreRNDSeeds!=False:
0835             #it is either True, or a process name
0836             if self._options.restoreRNDSeeds==True:
0837                 self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
0838             else:
0839                 self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
0840             if self._options.inputEventContent or self._options.inputCommands:
0841                 if self._options.inputCommands:
0842                     self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
0843                 else:
0844                     self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
0845 
0846 
0847     def completeInputCommand(self):
0848         if self._options.inputEventContent:
0849             import copy
0850             def dropSecondDropStar(iec):
0851                 #drop occurence of 'drop *' in the list
0852                 count=0
0853                 for item in iec:
0854                     if item=='drop *':
0855                         if count!=0:
0856                             iec.remove(item)
0857                         count+=1
0858 
0859             ## allow comma separated input eventcontent
0860             if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
0861             for evct in self._options.inputEventContent.split(','):
0862                 if evct=='': continue
0863                 theEventContent = getattr(self.process, evct+"EventContent")
0864                 if hasattr(theEventContent,'outputCommands'):
0865                     self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
0866                 if hasattr(theEventContent,'inputCommands'):
0867                     self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
0868 
0869             dropSecondDropStar(self.process.source.inputCommands)
0870 
0871             if not self._options.dropDescendant:
0872                 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
0873 
0874 
0875         return
0876 
0877     def addConditions(self):
0878         """Add conditions to the process"""
0879         if not self._options.conditions: return
0880 
0881         if 'FrontierConditions_GlobalTag' in self._options.conditions:
0882             print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
0883             self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
0884 
0885         self.loadAndRemember(self.ConditionsDefaultCFF)
0886         from Configuration.AlCa.GlobalTag import GlobalTag
0887         self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
0888         self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
0889         self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
0890 
0891 
0892     def addCustomise(self,unsch=0):
0893         """Include the customise code """
0894 
0895         custOpt=[]
0896         if unsch==0:
0897             for c in self._options.customisation_file:
0898                 custOpt.extend(c.split(","))
0899         else:
0900             for c in self._options.customisation_file_unsch:
0901                 custOpt.extend(c.split(","))
0902 
0903         custMap=DictTypes.SortedKeysDict()
0904         for opt in custOpt:
0905             if opt=='': continue
0906             if opt.count('.')>1:
0907                 raise Exception("more than . in the specification:"+opt)
0908             fileName=opt.split('.')[0]
0909             if opt.count('.')==0: rest='customise'
0910             else:
0911                 rest=opt.split('.')[1]
0912                 if rest=='py': rest='customise' #catch the case of --customise file.py
0913 
0914             if fileName in custMap:
0915                 custMap[fileName].extend(rest.split('+'))
0916             else:
0917                 custMap[fileName]=rest.split('+')
0918 
0919         if len(custMap)==0:
0920             final_snippet='\n'
0921         else:
0922             final_snippet='\n# customisation of the process.\n'
0923 
0924         allFcn=[]
0925         for opt in custMap:
0926             allFcn.extend(custMap[opt])
0927         for fcn in allFcn:
0928             if allFcn.count(fcn)!=1:
0929                 raise Exception("cannot specify twice "+fcn+" as a customisation method")
0930 
0931         for f in custMap:
0932             # let python search for that package and do syntax checking at the same time
0933             packageName = f.replace(".py","").replace("/",".")
0934             __import__(packageName)
0935             package = sys.modules[packageName]
0936 
0937             # now ask the package for its definition and pick .py instead of .pyc
0938             customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
0939 
0940             final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
0941             if self._options.inline_custom:
0942                 for line in file(customiseFile,'r'):
0943                     if "import FWCore.ParameterSet.Config" in line:
0944                         continue
0945                     final_snippet += line
0946             else:
0947                 final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
0948             for fcn in custMap[f]:
0949                 print("customising the process with",fcn,"from",f)
0950                 if not hasattr(package,fcn):
0951                     #bound to fail at run time
0952                     raise Exception("config "+f+" has no function "+fcn)
0953                 #execute the command
0954                 self.process=getattr(package,fcn)(self.process)
0955                 #and print it in the configuration
0956                 final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
0957                 final_snippet += "\nprocess = %s(process)\n"%(fcn,)
0958 
0959         if len(custMap)!=0:
0960             final_snippet += '\n# End of customisation functions\n'
0961 
0962         ### now for a useful command
0963         return final_snippet
0964 
0965     def addCustomiseCmdLine(self):
0966         final_snippet='\n# Customisation from command line\n'
0967         if self._options.customise_commands:
0968             import string
0969             for com in self._options.customise_commands.split('\\n'):
0970                 com=com.lstrip()
0971                 self.executeAndRemember(com)
0972                 final_snippet +='\n'+com
0973 
0974         return final_snippet
0975 
0976     #----------------------------------------------------------------------------
0977     # here the methods to define the python includes for each step or
0978     # conditions
0979     #----------------------------------------------------------------------------
0980     def define_Configs(self):
0981         if len(self.stepMap):
0982             self.loadAndRemember('Configuration/StandardSequences/Services_cff')
0983         if self._options.particleTable not in defaultOptions.particleTableList:
0984             print('Invalid particle table provided. Options are:')
0985             print(defaultOptions.particleTable)
0986             sys.exit(-1)
0987         else:
0988             if len(self.stepMap):
0989                 self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
0990 
0991         self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
0992 
0993         self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
0994         self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
0995         self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
0996         self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
0997         self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
0998         self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
0999         self.L1P2GTDefaultCFF = 'Configuration/StandardSequences/SimPhase2L1GlobalTriggerEmulator_cff'
1000         self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1001         self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1002         self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1003         if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1004         self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1005         self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1006         self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1007         self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1008         self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1009         self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1010         self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1011         self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1012         self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1013         self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1014         self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1015         self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1016         self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1017         self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1018         self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1019         self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1020         self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1021 
1022         if "DATAMIX" in self.stepMap.keys():
1023             self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1024             self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1025             self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1026             self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1027 
1028         self.ALCADefaultSeq=None
1029         self.LHEDefaultSeq='externalLHEProducer'
1030         self.GENDefaultSeq='pgen'
1031         self.SIMDefaultSeq='psim'
1032         self.DIGIDefaultSeq='pdigi'
1033         self.DATAMIXDefaultSeq=None
1034         self.DIGI2RAWDefaultSeq='DigiToRaw'
1035         self.HLTDefaultSeq='GRun'
1036         self.L1DefaultSeq=None
1037         self.L1P2GTDefaultSeq=None
1038         self.L1REPACKDefaultSeq='GT'
1039         self.HARVESTINGDefaultSeq=None
1040         self.ALCAHARVESTDefaultSeq=None
1041         self.CFWRITERDefaultSeq=None
1042         self.RAW2DIGIDefaultSeq='RawToDigi'
1043         self.L1RecoDefaultSeq='L1Reco'
1044         self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1045         if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1046             self.RECODefaultSeq='reconstruction'
1047         else:
1048             self.RECODefaultSeq='reconstruction_fromRECO'
1049         self.RECOSIMDefaultSeq='recosim'
1050         self.POSTRECODefaultSeq=None
1051         self.L1HwValDefaultSeq='L1HwVal'
1052         self.DQMDefaultSeq='DQMOffline'
1053         self.VALIDATIONDefaultSeq=''
1054         self.ENDJOBDefaultSeq='endOfProcess'
1055         self.REPACKDefaultSeq='DigiToRawRepack'
1056         self.PATDefaultSeq='miniAOD'
1057         self.PATGENDefaultSeq='miniGEN'
1058         #TODO: Check based of file input
1059         self.NANODefaultSeq='nanoSequence'
1060         self.NANODefaultCustom='nanoAOD_customizeCommon'
1061 
1062         self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1063 
1064         if not self._options.beamspot:
1065             self._options.beamspot=VtxSmearedDefaultKey
1066 
1067         # if its MC then change the raw2digi
1068         if self._options.isMC==True:
1069             self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1070             self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1071             self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1072             self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1073             self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1074             self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1075             self.NANODefaultSeq='nanoSequenceMC'
1076         else:
1077             self._options.beamspot = None
1078 
1079         #patch for gen, due to backward incompatibility
1080         if 'reGEN' in self.stepMap:
1081             self.GENDefaultSeq='fixGenInfo'
1082 
1083         if self._options.scenario=='cosmics':
1084             self._options.pileup='Cosmics'
1085             self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1086             self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1087             self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1088             self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1089             self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1090             self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1091             if self._options.isMC==True:
1092                 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1093             self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1094             self.RECODefaultSeq='reconstructionCosmics'
1095             self.DQMDefaultSeq='DQMOfflineCosmics'
1096 
1097         if self._options.scenario=='HeavyIons':
1098             if not self._options.beamspot:
1099                 self._options.beamspot=VtxSmearedHIDefaultKey
1100             self.HLTDefaultSeq = 'HIon'
1101             self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1102             self.VALIDATIONDefaultSeq=''
1103             self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1104             self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1105             self.RECODefaultSeq='reconstruction'
1106             self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1107             self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1108             self.DQMDefaultSeq='DQMOfflineHeavyIons'
1109             self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1110             self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1111             if self._options.isMC==True:
1112                 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1113 
1114 
1115         self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1116 
1117         self.USERDefaultSeq='user'
1118         self.USERDefaultCFF=None
1119 
1120         # the magnetic field
1121         self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1122         self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1123 
1124         # the geometry
1125         self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1126         self.geometryDBLabel=None
1127         simGeometry=''
1128         if self._options.fast:
1129             if 'start' in self._options.conditions.lower():
1130                 self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1131             else:
1132                 self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1133         else:
1134             def inGeometryKeys(opt):
1135                 from Configuration.StandardSequences.GeometryConf import GeometryConf
1136                 if opt in GeometryConf:
1137                     return GeometryConf[opt]
1138                 else:
1139                     return opt
1140 
1141             geoms=self._options.geometry.split(',')
1142             if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1143             if len(geoms)==2:
1144                 #may specify the reco geometry
1145                 if '/' in geoms[1] or '_cff' in geoms[1]:
1146                     self.GeometryCFF=geoms[1]
1147                 else:
1148                     self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1149 
1150             if (geoms[0].startswith('DB:')):
1151                 self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1152                 self.geometryDBLabel=geoms[0][3:]
1153                 print("with DB:")
1154             else:
1155                 if '/' in geoms[0] or '_cff' in geoms[0]:
1156                     self.SimGeometryCFF=geoms[0]
1157                 else:
1158                     simGeometry=geoms[0]
1159                     if self._options.gflash==True:
1160                         self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1161                     else:
1162                         self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1163 
1164         # synchronize the geometry configuration and the FullSimulation sequence to be used
1165         if simGeometry not in defaultOptions.geometryExtendedOptions:
1166             self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1167 
1168         if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1169             self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1170             self._options.beamspot='NoSmear'
1171 
1172         # fastsim requires some changes to the default cff files and sequences
1173         if self._options.fast:
1174             self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1175             self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1176             self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1177             self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1178             self.NANODefaultSeq = 'nanoSequenceFS'
1179             self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1180 
1181         # Mixing
1182         if self._options.pileup=='default':
1183             from Configuration.StandardSequences.Mixing import MixingDefaultKey
1184             self._options.pileup=MixingDefaultKey
1185 
1186 
1187         #not driven by a default cff anymore
1188         if self._options.isData:
1189             self._options.pileup=None
1190 
1191 
1192         self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1193 
1194     # for alca, skims, etc
1195     def addExtraStream(self, name, stream, workflow='full'):
1196             # define output module and go from there
1197         output = cms.OutputModule("PoolOutputModule")
1198         if stream.selectEvents.parameters_().__len__()!=0:
1199             output.SelectEvents = stream.selectEvents
1200         else:
1201             output.SelectEvents = cms.untracked.PSet()
1202             output.SelectEvents.SelectEvents=cms.vstring()
1203             if isinstance(stream.paths,tuple):
1204                 for path in stream.paths:
1205                     output.SelectEvents.SelectEvents.append(path.label())
1206             else:
1207                 output.SelectEvents.SelectEvents.append(stream.paths.label())
1208 
1209 
1210 
1211         if isinstance(stream.content,str):
1212             evtPset=getattr(self.process,stream.content)
1213             for p in evtPset.parameters_():
1214                 setattr(output,p,getattr(evtPset,p))
1215             if not self._options.inlineEventContent:
1216                 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1217                     return label
1218                 output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1219         else:
1220             output.outputCommands = stream.content
1221 
1222 
1223         output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1224 
1225         output.dataset  = cms.untracked.PSet( dataTier = stream.dataTier,
1226                                               filterName = cms.untracked.string(stream.name))
1227 
1228         if self._options.filtername:
1229             output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1230 
1231         #add an automatic flushing to limit memory consumption
1232         output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1233 
1234         if workflow in ("producers,full"):
1235             if isinstance(stream.paths,tuple):
1236                 for path in stream.paths:
1237                     self.schedule.append(path)
1238             else:
1239                 self.schedule.append(stream.paths)
1240 
1241 
1242         # in case of relvals we don't want to have additional outputs
1243         if (not self._options.relval) and workflow in ("full","output"):
1244             self.additionalOutputs[name] = output
1245             setattr(self.process,name,output)
1246 
1247         if workflow == 'output':
1248             # adjust the select events to the proper trigger results from previous process
1249             filterList = output.SelectEvents.SelectEvents
1250             for i, filter in enumerate(filterList):
1251                 filterList[i] = filter+":"+self._options.triggerResultsProcess
1252 
1253         return output
1254 
1255     #----------------------------------------------------------------------------
1256     # here the methods to create the steps. Of course we are doing magic here ;)
1257     # prepare_STEPNAME modifies self.process and what else's needed.
1258     #----------------------------------------------------------------------------
1259 
1260     def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1261         _dotsplit = stepSpec.split('.')
1262         if ( len(_dotsplit)==1 ):
1263             if '/' in _dotsplit[0]:
1264                 _sequence = defaultSEQ if defaultSEQ else stepSpec 
1265                 _cff = _dotsplit[0]
1266             else:
1267                 _sequence = stepSpec
1268                 _cff = defaultCFF
1269         elif ( len(_dotsplit)==2 ):
1270             _cff,_sequence  = _dotsplit
1271         else:
1272             print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1273             print(stepSpec,"not recognized")
1274             raise
1275         l=self.loadAndRemember(_cff)
1276         return l,_sequence,_cff
1277 
1278     def scheduleSequence(self,seq,prefix,what='Path'):
1279         if '*' in seq:
1280             #create only one path with all sequences in it
1281             for i,s in enumerate(seq.split('*')):
1282                 if i==0:
1283                     setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1284                 else:
1285                     p=getattr(self.process,prefix)
1286                     tmp = getattr(self.process, s)
1287                     if isinstance(tmp, cms.Task):
1288                         p.associate(tmp)
1289                     else:
1290                         p+=tmp
1291             self.schedule.append(getattr(self.process,prefix))
1292             return
1293         else:
1294             #create as many path as many sequences
1295             if not '+' in seq:
1296                 if self.nextScheduleIsConditional:
1297                     self.conditionalPaths.append(prefix)
1298                 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1299                 self.schedule.append(getattr(self.process,prefix))
1300             else:
1301                 for i,s in enumerate(seq.split('+')):
1302                     sn=prefix+'%d'%(i)
1303                     setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1304                     self.schedule.append(getattr(self.process,sn))
1305             return
1306 
1307     def scheduleSequenceAtEnd(self,seq,prefix):
1308         self.scheduleSequence(seq,prefix,what='EndPath')
1309         return
1310 
1311     def prepare_ALCAPRODUCER(self, stepSpec = None):
1312         self.prepare_ALCA(stepSpec, workflow = "producers")
1313 
1314     def prepare_ALCAOUTPUT(self, stepSpec = None):
1315         self.prepare_ALCA(stepSpec, workflow = "output")
1316 
1317     def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1318         """ Enrich the process with alca streams """
1319         alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1320 
1321         MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1322         # decide which ALCA paths to use
1323         alcaList = sequence.split("+")
1324         for alca in alcaList:
1325             if (len(alca)>MAXLEN):
1326                 raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1327 
1328         maxLevel=0
1329         from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1330         # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1331         self.expandMapping(alcaList,autoAlca)
1332         self.AlCaPaths=[]
1333         for name in alcaConfig.__dict__:
1334             alcastream = getattr(alcaConfig,name)
1335             shortName = name.replace('ALCARECOStream','')
1336             if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1337                 if shortName in AlCaNoConcurrentLumis:
1338                     print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1339                     self._options.nConcurrentLumis = 1
1340                     self._options.nConcurrentIOVs = 1
1341                 output = self.addExtraStream(name,alcastream, workflow = workflow)
1342                 self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1343                 self.AlCaPaths.append(shortName)
1344                 if 'DQM' in alcaList:
1345                     if not self._options.inlineEventContent and hasattr(self.process,name):
1346                         self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1347                     else:
1348                         output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1349 
1350                 #rename the HLT process name in the alca modules
1351                 if self._options.hltProcess or 'HLT' in self.stepMap:
1352                     if isinstance(alcastream.paths,tuple):
1353                         for path in alcastream.paths:
1354                             self.renameHLTprocessInSequence(path.label())
1355                     else:
1356                         self.renameHLTprocessInSequence(alcastream.paths.label())
1357 
1358                 for i in range(alcaList.count(shortName)):
1359                     alcaList.remove(shortName)
1360 
1361             # DQM needs a special handling
1362             elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1363                 path = getattr(alcaConfig,name)
1364                 self.schedule.append(path)
1365                 alcaList.remove('DQM')
1366 
1367             if isinstance(alcastream,cms.Path):
1368                 #black list the alca path so that they do not appear in the cfg
1369                 self.blacklist_paths.append(alcastream)
1370 
1371 
1372         if len(alcaList) != 0:
1373             available=[]
1374             for name in alcaConfig.__dict__:
1375                 alcastream = getattr(alcaConfig,name)
1376                 if isinstance(alcastream,cms.FilteredStream):
1377                     available.append(name.replace('ALCARECOStream',''))
1378             print("The following alcas could not be found "+str(alcaList))
1379             print("available ",available)
1380             #print "verify your configuration, ignoring for now"
1381             raise Exception("The following alcas could not be found "+str(alcaList))
1382 
1383     def prepare_LHE(self, stepSpec = None):
1384             #load the fragment
1385             ##make it loadable
1386         loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1387         print("Loading lhe fragment from",loadFragment)
1388         __import__(loadFragment)
1389         self.process.load(loadFragment)
1390         ##inline the modules
1391         self._options.inlineObjects+=','+stepSpec
1392 
1393         getattr(self.process,stepSpec).nEvents = self._options.number
1394 
1395         #schedule it
1396         self.process.lhe_step = cms.Path( getattr( self.process,stepSpec)  )
1397         self.excludedPaths.append("lhe_step")
1398         self.schedule.append( self.process.lhe_step )
1399 
1400     def prepare_GEN(self, stepSpec = None):
1401         """ load the fragment of generator configuration """
1402         loadFailure=False
1403         #remove trailing .py
1404         #support old style .cfi by changing into something.cfi into something_cfi
1405         #remove python/ from the name
1406         loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1407         #standard location of fragments
1408         if not '/' in loadFragment:
1409             loadFragment='Configuration.Generator.'+loadFragment
1410         else:
1411             loadFragment=loadFragment.replace('/','.')
1412         try:
1413             print("Loading generator fragment from",loadFragment)
1414             __import__(loadFragment)
1415         except:
1416             loadFailure=True
1417             #if self.process.source and self.process.source.type_()=='EmptySource':
1418             if not (self._options.filein or self._options.dasquery):
1419                 raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1420 
1421         if not loadFailure:
1422             from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1423 
1424             generatorModule=sys.modules[loadFragment]
1425             genModules=generatorModule.__dict__
1426             #remove lhe producer module since this should have been
1427             #imported instead in the LHE step
1428             if self.LHEDefaultSeq in genModules:
1429                 del genModules[self.LHEDefaultSeq]
1430 
1431             if self._options.hideGen:
1432                 self.loadAndRemember(loadFragment)
1433             else:
1434                 self.process.load(loadFragment)
1435                 # expose the objects from that fragment to the configuration
1436                 import FWCore.ParameterSet.Modules as cmstypes
1437                 for name in genModules:
1438                     theObject = getattr(generatorModule,name)
1439                     if isinstance(theObject, cmstypes._Module):
1440                         self._options.inlineObjects=name+','+self._options.inlineObjects
1441                         if theObject.type_() in noConcurrentLumiGenerators:
1442                             print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1443                             self._options.nConcurrentLumis = 1
1444                             self._options.nConcurrentIOVs = 1
1445                     elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1446                         self._options.inlineObjects+=','+name
1447 
1448             if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly':
1449                 if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1450                     self.productionFilterSequence = 'ProductionFilterSequence'
1451                 elif 'generator' in genModules:
1452                     self.productionFilterSequence = 'generator'
1453 
1454         """ Enrich the schedule with the rest of the generation step """
1455         _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1456 
1457         if True:
1458             try:
1459                 from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1460                 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1461                 self.loadAndRemember(cffToBeLoaded)
1462             except ImportError:
1463                 raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1464 
1465             if self._options.scenario == 'HeavyIons':
1466                 if self._options.pileup=='HiMixGEN':
1467                     self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1468                 elif self._options.pileup=='HiMixEmbGEN':
1469                     self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1470                 else:
1471                     self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1472 
1473         self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1474         self.schedule.append(self.process.generation_step)
1475 
1476         #register to the genstepfilter the name of the path (static right now, but might evolve)
1477         self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1478 
1479         if 'reGEN' in self.stepMap or stepSpec == 'pgen_smear':
1480             #stop here
1481             return
1482 
1483         """ Enrich the schedule with the summary of the filter step """
1484         #the gen filter in the endpath
1485         self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1486         self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1487         return
1488 
1489     def prepare_SIM(self, stepSpec = None):
1490         """ Enrich the schedule with the simulation step"""
1491         _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1492         if not self._options.fast:
1493             if self._options.gflash==True:
1494                 self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1495 
1496             if self._options.magField=='0T':
1497                 self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1498         else:
1499             if self._options.magField=='0T':
1500                 self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1501 
1502         self.scheduleSequence(_simSeq,'simulation_step')
1503         return
1504 
1505     def prepare_DIGI(self, stepSpec = None):
1506         """ Enrich the schedule with the digitisation step"""
1507         _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1508 
1509         if self._options.gflash==True:
1510             self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1511 
1512         if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1513             self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1514 
1515         if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1516             if self._options.inputEventContent=='':
1517                 self._options.inputEventContent='REGEN'
1518             else:
1519                 self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1520 
1521 
1522         self.scheduleSequence(_digiSeq,'digitisation_step')
1523         return
1524 
1525     def prepare_CFWRITER(self, stepSpec = None):
1526         """ Enrich the schedule with the crossing frame writer step"""
1527         self.loadAndRemember(self.CFWRITERDefaultCFF)
1528         self.scheduleSequence('pcfw','cfwriter_step')
1529         return
1530 
1531     def prepare_DATAMIX(self, stepSpec = None):
1532         """ Enrich the schedule with the digitisation step"""
1533         self.loadAndRemember(self.DATAMIXDefaultCFF)
1534         self.scheduleSequence('pdatamix','datamixing_step')
1535 
1536         if self._options.pileup_input:
1537             theFiles=''
1538             if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1539                 theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1540             elif self._options.pileup_input.startswith("filelist:"):
1541                 theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1542             else:
1543                 theFiles=self._options.pileup_input.split(',')
1544             #print theFiles
1545             self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%(  theFiles ) )
1546 
1547         return
1548 
1549     def prepare_DIGI2RAW(self, stepSpec = None):
1550         _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1551         self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1552         return
1553 
1554     def prepare_REPACK(self, stepSpec = None):
1555         _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1556         self.scheduleSequence(_repackSeq,'digi2repack_step')
1557         return
1558 
1559     def loadPhase2GTMenu(self, menuFile: str):
1560         import importlib
1561         menuPath = f'L1Trigger.Configuration.Phase2GTMenus.{menuFile}'
1562         menuModule = importlib.import_module(menuPath)
1563         
1564         theMenu = menuModule.menu
1565         triggerPaths = [] #we get a list of paths in each of these files to schedule
1566 
1567         for triggerPathFile in theMenu:
1568             self.loadAndRemember(triggerPathFile) #this load and remember will set the algo variable of the algoblock later
1569 
1570             triggerPathModule = importlib.import_module(triggerPathFile)
1571             for objName in dir(triggerPathModule):
1572                 obj = getattr(triggerPathModule, objName)
1573                 objType = type(obj)
1574                 if objType == cms.Path:
1575                     triggerPaths.append(objName)
1576         
1577         triggerScheduleList = [getattr(self.process, name) for name in triggerPaths] #get the actual paths to put in the schedule
1578         self.schedule.extend(triggerScheduleList) #put them in the schedule for later
1579     
1580     # create the L1 GT step
1581     # We abuse the stepSpec a bit as a way to specify a menu
1582     def prepare_L1P2GT(self, stepSpec=None):
1583         """ Run the GT emulation sequence on top of the L1 emulation step """
1584         self.loadAndRemember(self.L1P2GTDefaultCFF)
1585         self.scheduleSequence('l1tGTProducerSequence', 'Phase2L1GTProducer')
1586         self.scheduleSequence('l1tGTAlgoBlockProducerSequence', 'Phase2L1GTAlgoBlockProducer')
1587         if stepSpec == None:
1588             defaultMenuFile = "prototype_2023_v1_0_0"
1589             self.loadPhase2GTMenu(menuFile = defaultMenuFile)
1590         else:
1591             self.loadPhase2GTMenu(menuFile = stepSpec)
1592 
1593     def prepare_L1(self, stepSpec = None):
1594         """ Enrich the schedule with the L1 simulation step"""
1595         assert(stepSpec == None)
1596         self.loadAndRemember(self.L1EMDefaultCFF)
1597         self.scheduleSequence('SimL1Emulator','L1simulation_step')
1598         return
1599 
1600     def prepare_L1REPACK(self, stepSpec = None):
1601         """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1602         supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1603         if stepSpec in supported:
1604             self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1605             if self._options.scenario == 'HeavyIons':
1606                 self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1607             self.scheduleSequence('SimL1Emulator','L1RePack_step')
1608         else:
1609             print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1610             raise Exception('unsupported feature')
1611 
1612     def prepare_HLT(self, stepSpec = None):
1613         """ Enrich the schedule with the HLT simulation step"""
1614         if not stepSpec:
1615             print("no specification of the hlt menu has been given, should never happen")
1616             raise  Exception('no HLT specifications provided')
1617 
1618         if '@' in stepSpec:
1619             # case where HLT:@something was provided
1620             from Configuration.HLT.autoHLT import autoHLT
1621             key = stepSpec[1:]
1622             if key in autoHLT:
1623                 stepSpec = autoHLT[key]
1624             else:
1625                 raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1626 
1627         if ',' in stepSpec:
1628             #case where HLT:something:something was provided
1629             self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1630             optionsForHLT = {}
1631             if self._options.scenario == 'HeavyIons':
1632                 optionsForHLT['type'] = 'HIon'
1633             else:
1634                 optionsForHLT['type'] = 'GRun'
1635             optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1636             if stepSpec == 'run,fromSource':
1637                 if hasattr(self.process.source,'firstRun'):
1638                     self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1639                 elif hasattr(self.process.source,'setRunNumber'):
1640                     self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1641                 else:
1642                     raise Exception(f'Cannot replace menu to load {stepSpec}')
1643             else:
1644                 self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1645         else:
1646             self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1647 
1648         if self._options.isMC:
1649             self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1650 
1651         if self._options.name != 'HLT':
1652             self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1653             self.additionalCommands.append('process = ProcessName(process)')
1654             self.additionalCommands.append('')
1655             from HLTrigger.Configuration.CustomConfigs import ProcessName
1656             self.process = ProcessName(self.process)
1657 
1658         if self.process.schedule == None:
1659             raise Exception('the HLT step did not attach a valid schedule to the process')
1660 
1661         self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1662         [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1663 
1664         # this is a fake, to be removed with fastim migration and HLT menu dump
1665         if self._options.fast:
1666             if not hasattr(self.process,'HLTEndSequence'):
1667                 self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1668 
1669 
1670     def prepare_RAW2RECO(self, stepSpec = None):
1671         if ','in stepSpec:
1672             seqReco,seqDigi=stepSpec.spli(',')
1673         else:
1674             print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1675 
1676         self.prepare_RAW2DIGI(seqDigi)
1677         self.prepare_RECO(seqReco)
1678         return
1679 
1680     def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1681         _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1682         self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1683         return
1684 
1685     def prepare_PATFILTER(self, stepSpec = None):
1686         self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1687         from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1688         for filt in allMetFilterPaths:
1689             self.schedule.append(getattr(self.process,'Flag_'+filt))
1690 
1691     def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1692         ''' Enrich the schedule with L1 HW validation '''
1693         self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1694         print('\n\n\n DEPRECATED this has no action \n\n\n')
1695         return
1696 
1697     def prepare_L1Reco(self, stepSpec = "L1Reco"):
1698         ''' Enrich the schedule with L1 reconstruction '''
1699         _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1700         self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1701         return
1702 
1703     def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1704         ''' Enrich the schedule with L1 reconstruction '''
1705         _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1706         self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1707         return
1708 
1709     def prepare_FILTER(self, stepSpec = None):
1710         ''' Enrich the schedule with a user defined filter sequence '''
1711         ## load the relevant part
1712         filterConfig,filterSeq = stepSpec.split('.')
1713         filterConfig=self.load(filterConfig)
1714         ## print it in the configuration
1715         class PrintAllModules(object):
1716             def __init__(self):
1717                 self.inliner=''
1718                 pass
1719             def enter(self,visitee):
1720                 try:
1721                     label=visitee.label()
1722                     ##needs to be in reverse order
1723                     self.inliner=label+','+self.inliner
1724                 except:
1725                     pass
1726             def leave(self,v): pass
1727 
1728         expander=PrintAllModules()
1729         getattr(self.process,filterSeq).visit( expander )
1730         self._options.inlineObjects+=','+expander.inliner
1731         self._options.inlineObjects+=','+filterSeq
1732 
1733         ## put the filtering path in the schedule
1734         self.scheduleSequence(filterSeq,'filtering_step')
1735         self.nextScheduleIsConditional=True
1736         ## put it before all the other paths
1737         self.productionFilterSequence = filterSeq
1738 
1739         return
1740 
1741     def prepare_RECO(self, stepSpec = "reconstruction"):
1742         ''' Enrich the schedule with reconstruction '''
1743         _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1744         self.scheduleSequence(_recoSeq,'reconstruction_step')
1745         return
1746 
1747     def prepare_RECOSIM(self, stepSpec = "recosim"):
1748         ''' Enrich the schedule with reconstruction '''
1749         _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1750         self.scheduleSequence(_recosimSeq,'recosim_step')
1751         return
1752 
1753     def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1754         ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1755         if not self._options.fast:
1756             print("ERROR: this step is only implemented for FastSim")
1757             sys.exit()
1758         _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1759         self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1760         return
1761 
1762     def prepare_PAT(self, stepSpec = "miniAOD"):
1763         ''' Enrich the schedule with PAT '''
1764         self.prepare_PATFILTER(self)
1765         self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1766         self.labelsToAssociate.append('patTask')
1767         if self._options.isData:
1768             self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1769         else:
1770             if self._options.fast:
1771                 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1772             else:
1773                 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1774 
1775         if self._options.hltProcess:
1776             if len(self._options.customise_commands) > 1:
1777                 self._options.customise_commands = self._options.customise_commands + " \n"
1778             self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1779             self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1780             self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1781 
1782 #            self.renameHLTprocessInSequence(sequence)
1783 
1784         return
1785 
1786     def prepare_PATGEN(self, stepSpec = "miniGEN"):
1787         ''' Enrich the schedule with PATGEN '''
1788         self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1789         self.labelsToAssociate.append('patGENTask')
1790         if self._options.isData:
1791             raise Exception("PATGEN step can only run on MC")
1792         return
1793 
1794     def prepare_NANO(self, stepSpec = '' ):
1795         print(f"in prepare_nano {stepSpec}")
1796         ''' Enrich the schedule with NANO '''
1797         if not '@' in stepSpec:
1798             _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1799         else:
1800             _nanoSeq = stepSpec
1801             _nanoCff = self.NANODefaultCFF
1802 
1803         print(_nanoSeq)
1804         # create full specified sequence using autoNANO
1805         from PhysicsTools.NanoAOD.autoNANO import autoNANO, expandNanoMapping
1806         # if not a autoNANO mapping, load an empty customization, which later will be converted into the default.
1807         _nanoCustoms = _nanoSeq.split('+') if '@' in stepSpec else ['']
1808         _nanoSeq = _nanoSeq.split('+')
1809         expandNanoMapping(_nanoSeq, autoNANO, 'sequence')
1810         expandNanoMapping(_nanoCustoms, autoNANO, 'customize')
1811         # make sure there are no duplicates while preserving the ordering
1812         _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1813         _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1814         # replace empty sequence with default
1815         _nanoSeq = [seq if seq!='' else f"{self.NANODefaultCFF}.{self.NANODefaultSeq}" for seq in _nanoSeq]
1816         _nanoCustoms = [cust if cust!='' else self.NANODefaultCustom for cust in _nanoCustoms]
1817         # build and inject the sequence
1818         if len(_nanoSeq) < 1 and '@' in stepSpec:
1819             raise Exception(f'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mapping')
1820         _seqToSchedule = []
1821         for _subSeq in _nanoSeq:
1822             if '.' in _subSeq:
1823                 _cff,_seq = _subSeq.split('.')
1824                 print("NANO: scheduling:",_seq,"from",_cff)
1825                 self.loadAndRemember(_cff)
1826                 _seqToSchedule.append(_seq)
1827             elif '/' in _subSeq:
1828                 self.loadAndRemember(_subSeq)
1829                 _seqToSchedule.append(self.NANODefaultSeq)
1830             else:
1831                 print("NANO: scheduling:",_subSeq)
1832                 _seqToSchedule.append(_subSeq)
1833         self.scheduleSequence('+'.join(_seqToSchedule), 'nanoAOD_step')
1834 
1835         # add the customisations
1836         for custom in _nanoCustoms:
1837             custom_path = custom if '.' in custom else '.'.join([_nanoCff,custom])
1838             # customization order can be important for NANO, here later specified customise take precedence
1839             self._options.customisation_file.append(custom_path)
1840         if self._options.hltProcess:
1841             if len(self._options.customise_commands) > 1:
1842                 self._options.customise_commands = self._options.customise_commands + " \n"
1843             self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1844 
1845     def prepare_SKIM(self, stepSpec = "all"):
1846         ''' Enrich the schedule with skimming fragments'''
1847         skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1848 
1849         stdHLTProcName = 'HLT'
1850         newHLTProcName = self._options.hltProcess
1851         customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1852         if customiseForReHLT:
1853             print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1854 
1855         ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1856         from Configuration.Skimming.autoSkim import autoSkim
1857         skimlist = sequence.split('+')
1858         self.expandMapping(skimlist,autoSkim)
1859 
1860         #print("dictionary for skims:", skimConfig.__dict__)
1861         for skim in skimConfig.__dict__:
1862             skimstream = getattr(skimConfig, skim)
1863 
1864             # blacklist AlCa paths so that they do not appear in the cfg
1865             if isinstance(skimstream, cms.Path):
1866                 self.blacklist_paths.append(skimstream)
1867             # if enabled, apply "hltProcess" renaming to Sequences
1868             elif isinstance(skimstream, cms.Sequence):
1869                 if customiseForReHLT:
1870                     self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1871 
1872             if not isinstance(skimstream, cms.FilteredStream):
1873                 continue
1874 
1875             shortname = skim.replace('SKIMStream','')
1876             if (sequence=="all"):
1877                 self.addExtraStream(skim,skimstream)
1878             elif (shortname in skimlist):
1879                 self.addExtraStream(skim,skimstream)
1880                 #add a DQM eventcontent for this guy
1881                 if self._options.datatier=='DQM':
1882                     self.process.load(self.EVTCONTDefaultCFF)
1883                     skimstreamDQM = cms.FilteredStream(
1884                             responsible = skimstream.responsible,
1885                             name = skimstream.name+'DQM',
1886                             paths = skimstream.paths,
1887                             selectEvents = skimstream.selectEvents,
1888                             content = self._options.datatier+'EventContent',
1889                             dataTier = cms.untracked.string(self._options.datatier)
1890                             )
1891                     self.addExtraStream(skim+'DQM',skimstreamDQM)
1892                 for i in range(skimlist.count(shortname)):
1893                     skimlist.remove(shortname)
1894 
1895         if (skimlist.__len__()!=0 and sequence!="all"):
1896             print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1897             raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1898 
1899 
1900     def prepare_USER(self, stepSpec = None):
1901         ''' Enrich the schedule with a user defined sequence '''
1902         _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1903         self.scheduleSequence(_userSeq,'user_step')
1904         return
1905 
1906     def prepare_POSTRECO(self, stepSpec = None):
1907         """ Enrich the schedule with the postreco step """
1908         self.loadAndRemember(self.POSTRECODefaultCFF)
1909         self.scheduleSequence('postreco_generator','postreco_step')
1910         return
1911 
1912 
1913     def prepare_VALIDATION(self, stepSpec = 'validation'):
1914         print(f"{stepSpec} in preparing validation")
1915         _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1916         from Validation.Configuration.autoValidation import autoValidation
1917         #in case VALIDATION:something:somethingelse -> something,somethingelse
1918         if sequence.find(',')!=-1:
1919             prevalSeqName=sequence.split(',')[0].split('+')
1920             valSeqName=sequence.split(',')[1].split('+')
1921             self.expandMapping(prevalSeqName,autoValidation,index=0)
1922             self.expandMapping(valSeqName,autoValidation,index=1)
1923         else:
1924             if '@' in sequence:
1925                 prevalSeqName=sequence.split('+')
1926                 valSeqName=sequence.split('+')
1927                 self.expandMapping(prevalSeqName,autoValidation,index=0)
1928                 self.expandMapping(valSeqName,autoValidation,index=1)
1929             else:
1930                 postfix=''
1931                 if sequence:
1932                     postfix='_'+sequence
1933                 prevalSeqName=['prevalidation'+postfix]
1934                 valSeqName=['validation'+postfix]
1935                 if not hasattr(self.process,valSeqName[0]):
1936                     prevalSeqName=['']
1937                     valSeqName=[sequence]
1938 
1939         def NFI(index):
1940             ##name from index, required to keep backward compatibility
1941             if index==0:
1942                 return ''
1943             else:
1944                 return '%s'%index
1945 
1946 
1947         #rename the HLT process in validation steps
1948         if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1949             for s in valSeqName+prevalSeqName:
1950                 if s:
1951                     self.renameHLTprocessInSequence(s)
1952         for (i,s) in enumerate(prevalSeqName):
1953             if s:
1954                 setattr(self.process,'prevalidation_step%s'%NFI(i),  cms.Path( getattr(self.process, s)) )
1955                 self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1956 
1957         for (i,s) in enumerate(valSeqName):
1958             setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1959             self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1960 
1961         #needed in case the miniAODValidation sequence is run starting from AODSIM
1962         if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1963             return
1964 
1965         if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1966             if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1967                 self._options.restoreRNDSeeds=True
1968 
1969         if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1970             self.executeAndRemember("process.mix.playback = True")
1971             self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1972             self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1973             self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1974 
1975         if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1976             #will get in the schedule, smoothly
1977             for (i,s) in enumerate(valSeqName):
1978                 getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1979 
1980         return
1981 
1982 
1983     class MassSearchReplaceProcessNameVisitor(object):
1984         """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1985         It will climb down within PSets, VPSets and VInputTags to find its target"""
1986         def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1987             self._paramReplace = paramReplace
1988             self._paramSearch = paramSearch
1989             self._verbose = verbose
1990             self._whitelist = whitelist
1991 
1992         def doIt(self, pset, base):
1993             if isinstance(pset, cms._Parameterizable):
1994                 for name in pset.parameters_().keys():
1995                     # skip whitelisted parameters
1996                     if name in self._whitelist:
1997                         continue
1998                     # if I use pset.parameters_().items() I get copies of the parameter values
1999                     # so I can't modify the nested pset
2000                     value = getattr(pset, name)
2001                     valueType = type(value)
2002                     if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
2003                         self.doIt(value,base+"."+name)
2004                     elif valueType in [cms.VPSet, cms.untracked.VPSet]:
2005                         for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
2006                     elif valueType in [cms.string, cms.untracked.string]:
2007                         if value.value() == self._paramSearch:
2008                             if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
2009                             setattr(pset, name,self._paramReplace)
2010                     elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
2011                         for (i,n) in enumerate(value):
2012                             if not isinstance(n, cms.InputTag):
2013                                 n=cms.InputTag(n)
2014                             if n.processName == self._paramSearch:
2015                                 # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
2016                                 if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
2017                                 setattr(n,"processName",self._paramReplace)
2018                                 value[i]=n
2019                     elif valueType in [cms.vstring, cms.untracked.vstring]:
2020                         for (i,n) in enumerate(value):
2021                             if n==self._paramSearch:
2022                                 getattr(pset,name)[i]=self._paramReplace
2023                     elif valueType in [cms.InputTag, cms.untracked.InputTag]:
2024                         if value.processName == self._paramSearch:
2025                             if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
2026                             setattr(getattr(pset, name),"processName",self._paramReplace)
2027 
2028         def enter(self,visitee):
2029             label = ''
2030             try:
2031                 label = visitee.label()
2032             except AttributeError:
2033                 label = '<Module not in a Process>'
2034             except:
2035                 label = 'other execption'
2036             self.doIt(visitee, label)
2037 
2038         def leave(self,visitee):
2039             pass
2040 
2041     #visit a sequence to repalce all input tags
2042     def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
2043         print("Replacing all InputTag %s => %s"%(oldT,newT))
2044         from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
2045         massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
2046         loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2047         if not loadMe in self.additionalCommands:
2048             self.additionalCommands.append(loadMe)
2049         self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2050 
2051     #change the process name used to address HLT results in any sequence
2052     def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2053         if proc == None:
2054             proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2055         if proc == HLTprocess:
2056             return
2057         # look up all module in sequence
2058         if verbosityLevel > 0:
2059             print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2060         verboseVisit = (verbosityLevel > 1)
2061         getattr(self.process,sequence).visit(
2062             ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2063         if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2064             self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2065         self.additionalCommands.append(
2066             'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2067             % (sequence, HLTprocess, proc, verboseVisit))
2068 
2069     def expandMapping(self,seqList,mapping,index=None):
2070         maxLevel=30
2071         level=0
2072         while '@' in repr(seqList) and level<maxLevel:
2073             level+=1
2074             for specifiedCommand in seqList:
2075                 if specifiedCommand.startswith('@'):
2076                     location=specifiedCommand[1:]
2077                     if not location in mapping:
2078                         raise Exception("Impossible to map "+location+" from "+repr(mapping))
2079                     mappedTo=mapping[location]
2080                     if index!=None:
2081                         mappedTo=mappedTo[index]
2082                     seqList.remove(specifiedCommand)
2083                     seqList.extend(mappedTo.split('+'))
2084                     break;
2085         if level==maxLevel:
2086             raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2087 
2088     def prepare_DQM(self, stepSpec = 'DQMOffline'):
2089         # this one needs replacement
2090 
2091         # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
2092         self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2093         _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2094         sequenceList=_dqmSeq.split('+')
2095         postSequenceList=_dqmSeq.split('+')
2096         from DQMOffline.Configuration.autoDQM import autoDQM
2097         self.expandMapping(sequenceList,autoDQM,index=0)
2098         self.expandMapping(postSequenceList,autoDQM,index=1)
2099 
2100         if len(set(sequenceList))!=len(sequenceList):
2101             sequenceList=list(OrderedSet(sequenceList))
2102             print("Duplicate entries for DQM:, using",sequenceList)
2103 
2104         pathName='dqmoffline_step'
2105         for (i,_sequence) in enumerate(sequenceList):
2106             if (i!=0):
2107                 pathName='dqmoffline_%d_step'%(i)
2108 
2109             if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2110                 self.renameHLTprocessInSequence(_sequence)
2111 
2112             setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2113             self.schedule.append(getattr(self.process,pathName))
2114 
2115             if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2116                 #will get in the schedule, smoothly
2117                 getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2118 
2119 
2120         pathName='dqmofflineOnPAT_step'
2121         for (i,_sequence) in enumerate(postSequenceList):
2122             #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2123             if (sequenceList[i]==postSequenceList[i]):
2124                       continue
2125             if (i!=0):
2126                 pathName='dqmofflineOnPAT_%d_step'%(i)
2127 
2128             setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2129             self.schedule.append(getattr(self.process,pathName))
2130 
2131     def prepare_HARVESTING(self, stepSpec = None):
2132         """ Enrich the process with harvesting step """
2133         self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2134         self.loadAndRemember(self.DQMSaverCFF)
2135 
2136         harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2137 
2138         # decide which HARVESTING paths to use
2139         harvestingList = sequence.split("+")
2140         from DQMOffline.Configuration.autoDQM import autoDQM
2141         from Validation.Configuration.autoValidation import autoValidation
2142         import copy
2143         combined_mapping = copy.deepcopy( autoDQM )
2144         combined_mapping.update( autoValidation )
2145         self.expandMapping(harvestingList,combined_mapping,index=-1)
2146 
2147         if len(set(harvestingList))!=len(harvestingList):
2148             harvestingList=list(OrderedSet(harvestingList))
2149             print("Duplicate entries for HARVESTING, using",harvestingList)
2150 
2151         for name in harvestingList:
2152             if not name in harvestingConfig.__dict__:
2153                 print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2154                 # trigger hard error, like for other sequence types
2155                 getattr(self.process, name)
2156                 continue
2157             harvestingstream = getattr(harvestingConfig,name)
2158             if isinstance(harvestingstream,cms.Path):
2159                 self.schedule.append(harvestingstream)
2160                 self.blacklist_paths.append(harvestingstream)
2161             if isinstance(harvestingstream,cms.Sequence):
2162                 setattr(self.process,name+"_step",cms.Path(harvestingstream))
2163                 self.schedule.append(getattr(self.process,name+"_step"))
2164 
2165         # # NOTE: the "hltProcess" option currently does nothing in the HARVEST step
2166         # if self._options.hltProcess or ('HLT' in self.stepMap):
2167         #     pass
2168 
2169         self.scheduleSequence('DQMSaver','dqmsave_step')
2170         return
2171 
2172     def prepare_ALCAHARVEST(self, stepSpec = None):
2173         """ Enrich the process with AlCaHarvesting step """
2174         harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2175         sequence=stepSpec.split(".")[-1]
2176 
2177         # decide which AlcaHARVESTING paths to use
2178         harvestingList = sequence.split("+")
2179 
2180 
2181 
2182         from Configuration.AlCa.autoPCL import autoPCL
2183         self.expandMapping(harvestingList,autoPCL)
2184 
2185         for name in harvestingConfig.__dict__:
2186             harvestingstream = getattr(harvestingConfig,name)
2187             if name in harvestingList and isinstance(harvestingstream,cms.Path):
2188                 self.schedule.append(harvestingstream)
2189                 if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2190                    isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2191                     self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2192                     self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2193                 else:
2194                     self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2195                     self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2196                 harvestingList.remove(name)
2197         # append the common part at the end of the sequence
2198         lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2199         self.schedule.append(lastStep)
2200 
2201         if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2202             print("The following harvesting could not be found : ", harvestingList)
2203             raise Exception("The following harvesting could not be found : "+str(harvestingList))
2204 
2205 
2206 
2207     def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2208         _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2209         self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2210         return
2211 
2212     def finalizeFastSimHLT(self):
2213         self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2214         self.schedule.append(self.process.reconstruction)
2215 
2216 
2217     def build_production_info(self, evt_type, evtnumber):
2218         """ Add useful info for the production. """
2219         self.process.configurationMetadata=cms.untracked.PSet\
2220                                             (version=cms.untracked.string("$Revision: 1.19 $"),
2221                                              name=cms.untracked.string("Applications"),
2222                                              annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2223                                              )
2224 
2225         self.addedObjects.append(("Production Info","configurationMetadata"))
2226 
2227 
2228     def create_process(self):
2229         self.pythonCfgCode =  "# Auto generated configuration file\n"
2230         self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2231         self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2232         self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2233 
2234         # now set up the modifies
2235         modifiers=[]
2236         modifierStrings=[]
2237         modifierImports=[]
2238 
2239         if hasattr(self._options,"era") and self._options.era :
2240         # Multiple eras can be specified in a comma seperated list
2241             from Configuration.StandardSequences.Eras import eras
2242             for requestedEra in self._options.era.split(",") :
2243                 modifierStrings.append(requestedEra)
2244                 modifierImports.append(eras.pythonCfgLines[requestedEra])
2245                 modifiers.append(getattr(eras,requestedEra))
2246 
2247 
2248         if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2249             import importlib
2250             thingsImported=[]
2251             for c in self._options.procModifiers:
2252                 thingsImported.extend(c.split(","))
2253             for pm in thingsImported:
2254                 modifierStrings.append(pm)
2255                 modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2256                 modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2257 
2258         self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2259         self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2260 
2261 
2262         if len(modifierStrings)>0:
2263             self.pythonCfgCode+= ','+','.join(modifierStrings)
2264         self.pythonCfgCode+=')\n\n'
2265 
2266         #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2267         #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2268         if self.process == None:
2269             if len(modifiers)>0:
2270                 self.process = cms.Process(self._options.name,*modifiers)
2271             else:
2272                 self.process = cms.Process(self._options.name)
2273 
2274 
2275 
2276 
2277     def prepare(self, doChecking = False):
2278         """ Prepare the configuration string and add missing pieces."""
2279 
2280         self.loadAndRemember(self.EVTCONTDefaultCFF)  #load the event contents regardless
2281         self.addMaxEvents()
2282         if self.with_input:
2283             self.addSource()
2284         self.addStandardSequences()
2285         ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2286         self.completeInputCommand()
2287         self.addConditions()
2288 
2289 
2290         outputModuleCfgCode=""
2291         if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2292             outputModuleCfgCode=self.addOutput()
2293 
2294         self.addCommon()
2295 
2296         self.pythonCfgCode += "# import of standard configurations\n"
2297         for module in self.imports:
2298             self.pythonCfgCode += ("process.load('"+module+"')\n")
2299 
2300         # production info
2301         if not hasattr(self.process,"configurationMetadata"):
2302             self.build_production_info(self._options.evt_type, self._options.number)
2303         else:
2304             #the PSet was added via a load
2305             self.addedObjects.append(("Production Info","configurationMetadata"))
2306 
2307         self.pythonCfgCode +="\n"
2308         for comment,object in self.addedObjects:
2309             if comment!="":
2310                 self.pythonCfgCode += "\n# "+comment+"\n"
2311             self.pythonCfgCode += dumpPython(self.process,object)
2312 
2313         # dump the output definition
2314         self.pythonCfgCode += "\n# Output definition\n"
2315         self.pythonCfgCode += outputModuleCfgCode
2316 
2317         # dump all additional outputs (e.g. alca or skim streams)
2318         self.pythonCfgCode += "\n# Additional output definition\n"
2319         #I do not understand why the keys are not normally ordered.
2320         nl=sorted(self.additionalOutputs.keys())
2321         for name in nl:
2322             output = self.additionalOutputs[name]
2323             self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2324             tmpOut = cms.EndPath(output)
2325             setattr(self.process,name+'OutPath',tmpOut)
2326             self.schedule.append(tmpOut)
2327 
2328         # dump all additional commands
2329         self.pythonCfgCode += "\n# Other statements\n"
2330         for command in self.additionalCommands:
2331             self.pythonCfgCode += command + "\n"
2332 
2333         #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2334         for object in self._options.inlineObjects.split(','):
2335             if not object:
2336                 continue
2337             if not hasattr(self.process,object):
2338                 print('cannot inline -'+object+'- : not known')
2339             else:
2340                 self.pythonCfgCode +='\n'
2341                 self.pythonCfgCode +=dumpPython(self.process,object)
2342 
2343         if self._options.pileup=='HiMixEmbGEN':
2344             self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2345 
2346         # dump all paths
2347         self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2348         for path in self.process.paths:
2349             if getattr(self.process,path) not in self.blacklist_paths:
2350                 self.pythonCfgCode += dumpPython(self.process,path)
2351 
2352         for endpath in self.process.endpaths:
2353             if getattr(self.process,endpath) not in self.blacklist_paths:
2354                 self.pythonCfgCode += dumpPython(self.process,endpath)
2355 
2356         # dump the schedule
2357         self.pythonCfgCode += "\n# Schedule definition\n"
2358 
2359         # handling of the schedule
2360         pathNames = ['process.'+p.label_() for p in self.schedule]
2361         if self.process.schedule == None:
2362             self.process.schedule = cms.Schedule()
2363             for item in self.schedule:
2364                 self.process.schedule.append(item)
2365             result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2366         else:
2367             if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2368                 raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2369 
2370             for index, item in enumerate(self.schedule):
2371                 if index < self.scheduleIndexOfFirstHLTPath:
2372                     self.process.schedule.insert(index, item)
2373                 else:
2374                     self.process.schedule.append(item)
2375 
2376             result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2377             for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2378                 result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2379             if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2380                 result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2381 
2382         self.pythonCfgCode += result
2383 
2384         for labelToAssociate in self.labelsToAssociate:
2385             self.process.schedule.associate(getattr(self.process, labelToAssociate))
2386             self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2387 
2388         from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2389         associatePatAlgosToolsTask(self.process)
2390         self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2391         self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2392 
2393         overrideThreads = (self._options.nThreads != 1)
2394         overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2395         overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2396 
2397         if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2398             self.pythonCfgCode +="\n"
2399             self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2400             if overrideThreads:
2401                 self.pythonCfgCode +="process.options.numberOfThreads = {}\n".format(self._options.nThreads)
2402                 self.pythonCfgCode +="process.options.numberOfStreams = {}\n".format(self._options.nStreams)
2403                 self.process.options.numberOfThreads = self._options.nThreads
2404                 self.process.options.numberOfStreams = self._options.nStreams
2405             if overrideConcurrentLumis:
2406                 self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = {}\n".format(self._options.nConcurrentLumis)
2407                 self.process.options.numberOfConcurrentLuminosityBlocks = self._options.nConcurrentLumis
2408             if overrideConcurrentIOVs:
2409                 self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = {}\n".format(self._options.nConcurrentIOVs)
2410                 self.process.options.eventSetup.numberOfConcurrentIOVs = self._options.nConcurrentIOVs
2411 
2412         if self._options.accelerators is not None:
2413             accelerators = self._options.accelerators.split(',')
2414             self.pythonCfgCode += "\n"
2415             self.pythonCfgCode += "# Enable only these accelerator backends\n"
2416             self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2417             self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2418             self.process.load('Configuration.StandardSequences.Accelerators_cff')
2419             self.process.options.accelerators = accelerators
2420 
2421         #repacked version
2422         if self._options.isRepacked:
2423             self.pythonCfgCode +="\n"
2424             self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2425             self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2426             MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2427 
2428         # special treatment in case of production filter sequence 2/2
2429         if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2430             self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2431             self.pythonCfgCode +='for path in process.paths:\n'
2432             if len(self.conditionalPaths):
2433                 self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2434             if len(self.excludedPaths):
2435                 self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2436             self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2437             pfs = getattr(self.process,self.productionFilterSequence)
2438             for path in self.process.paths:
2439                 if not path in self.conditionalPaths: continue
2440                 if path in self.excludedPaths: continue
2441                 getattr(self.process,path).insert(0, pfs)
2442 
2443 
2444         # dump customise fragment
2445         self.pythonCfgCode += self.addCustomise()
2446 
2447         if self._options.runUnscheduled:
2448             print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2449         # Keep the "unscheduled customise functions" separate for now,
2450         # there are customize functions given by users (in our unit
2451         # tests) that need to be run before the "unscheduled customise
2452         # functions"
2453         self.pythonCfgCode += self.addCustomise(1)
2454 
2455         self.pythonCfgCode += self.addCustomiseCmdLine()
2456 
2457         if hasattr(self.process,"logErrorHarvester"):
2458             #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2459             self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2460             self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2461             self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2462             from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2463             self.process = customiseLogErrorHarvesterUsingOutputCommands(self.process)
2464 
2465         # Temporary hack to put the early delete customization after
2466         # everything else
2467         #
2468         # FIXME: remove when no longer needed
2469         self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2470         self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2471         self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2472         self.pythonCfgCode += "# End adding early deletion\n"
2473         from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2474         self.process = customiseEarlyDelete(self.process)
2475 
2476         imports = cms.specialImportRegistry.getSpecialImports()
2477         if len(imports) > 0:
2478             #need to inject this at the top
2479             index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2480             #now find the end of line
2481             index = self.pythonCfgCode.find("\n",index)
2482             self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2483 
2484 
2485         # make the .io file
2486 
2487         if self._options.io:
2488             #io=open(self._options.python_filename.replace('.py','.io'),'w')
2489             if not self._options.io.endswith('.io'): self._option.io+='.io'
2490             io=open(self._options.io,'w')
2491             ioJson={}
2492             if hasattr(self.process.source,"fileNames"):
2493                 if len(self.process.source.fileNames.value()):
2494                     ioJson['primary']=self.process.source.fileNames.value()
2495             if hasattr(self.process.source,"secondaryFileNames"):
2496                 if len(self.process.source.secondaryFileNames.value()):
2497                     ioJson['secondary']=self.process.source.secondaryFileNames.value()
2498             if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2499                 ioJson['pileup']=self._options.pileup_input[4:]
2500             for (o,om) in self.process.outputModules_().items():
2501                 ioJson[o]=om.fileName.value()
2502             ioJson['GT']=self.process.GlobalTag.globaltag.value()
2503             if self.productionFilterSequence:
2504                 ioJson['filter']=self.productionFilterSequence
2505             import json
2506             io.write(json.dumps(ioJson))
2507         return
2508