Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2022-06-10 01:50:11

0001 #! /usr/bin/env python3
0002 
0003 from __future__ import print_function
0004 __version__ = "$Revision: 1.19 $"
0005 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
0006 
0007 import FWCore.ParameterSet.Config as cms
0008 from FWCore.ParameterSet.Modules import _Module
0009 # The following import is provided for backward compatibility reasons.
0010 # The function used to be defined in this file.
0011 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
0012 
0013 import hashlib
0014 import sys
0015 import re
0016 import collections
0017 from subprocess import Popen,PIPE
0018 import FWCore.ParameterSet.DictTypes as DictTypes
0019 class Options:
0020     pass
0021 
0022 # the canonical defaults
0023 defaultOptions = Options()
0024 defaultOptions.datamix = 'DataOnSim'
0025 defaultOptions.isMC=False
0026 defaultOptions.isData=True
0027 defaultOptions.step=''
0028 defaultOptions.pileup='NoPileUp'
0029 defaultOptions.pileup_input = None
0030 defaultOptions.pileup_dasoption = ''
0031 defaultOptions.geometry = 'SimDB'
0032 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
0033 defaultOptions.magField = ''
0034 defaultOptions.conditions = None
0035 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
0036 defaultOptions.harvesting= 'AtRunEnd'
0037 defaultOptions.gflash = False
0038 defaultOptions.number = -1
0039 defaultOptions.number_out = None
0040 defaultOptions.arguments = ""
0041 defaultOptions.name = "NO NAME GIVEN"
0042 defaultOptions.evt_type = ""
0043 defaultOptions.filein = ""
0044 defaultOptions.dasquery=""
0045 defaultOptions.dasoption=""
0046 defaultOptions.secondfilein = ""
0047 defaultOptions.customisation_file = []
0048 defaultOptions.customisation_file_unsch = []
0049 defaultOptions.customise_commands = ""
0050 defaultOptions.inline_custom=False
0051 defaultOptions.particleTable = 'pythiapdt'
0052 defaultOptions.particleTableList = ['pythiapdt','pdt']
0053 defaultOptions.dirin = ''
0054 defaultOptions.dirout = ''
0055 defaultOptions.filetype = 'EDM'
0056 defaultOptions.fileout = 'output.root'
0057 defaultOptions.filtername = ''
0058 defaultOptions.lazy_download = False
0059 defaultOptions.custom_conditions = ''
0060 defaultOptions.hltProcess = ''
0061 defaultOptions.eventcontent = None
0062 defaultOptions.datatier = None
0063 defaultOptions.inlineEventContent = True
0064 defaultOptions.inlineObjets =''
0065 defaultOptions.hideGen=False
0066 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
0067 defaultOptions.beamspot=None
0068 defaultOptions.outputDefinition =''
0069 defaultOptions.inputCommands = None
0070 defaultOptions.outputCommands = None
0071 defaultOptions.inputEventContent = ''
0072 defaultOptions.dropDescendant = False
0073 defaultOptions.relval = None
0074 defaultOptions.profile = None
0075 defaultOptions.isRepacked = False
0076 defaultOptions.restoreRNDSeeds = False
0077 defaultOptions.donotDropOnInput = ''
0078 defaultOptions.python_filename =''
0079 defaultOptions.io=None
0080 defaultOptions.lumiToProcess=None
0081 defaultOptions.fast=False
0082 defaultOptions.runsAndWeightsForMC = None
0083 defaultOptions.runsScenarioForMC = None
0084 defaultOptions.runsAndWeightsForMCIntegerWeights = None
0085 defaultOptions.runsScenarioForMCIntegerWeights = None
0086 defaultOptions.runUnscheduled = False
0087 defaultOptions.timeoutOutput = False
0088 defaultOptions.nThreads = '1'
0089 defaultOptions.nStreams = '0'
0090 defaultOptions.nConcurrentLumis = '0'
0091 defaultOptions.nConcurrentIOVs = '0'
0092 defaultOptions.accelerators = None
0093 
0094 # some helper routines
0095 def dumpPython(process,name):
0096     theObject = getattr(process,name)
0097     if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
0098         return "process."+name+" = " + theObject.dumpPython()
0099     elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
0100         return "process."+name+" = " + theObject.dumpPython()+"\n"
0101     else:
0102         return "process."+name+" = " + theObject.dumpPython()+"\n"
0103 def filesFromList(fileName,s=None):
0104     import os
0105     import FWCore.ParameterSet.Config as cms
0106     prim=[]
0107     sec=[]
0108     for line in open(fileName,'r'):
0109         if line.count(".root")>=2:
0110             #two files solution...
0111             entries=line.replace("\n","").split()
0112             prim.append(entries[0])
0113             sec.append(entries[1])
0114         elif (line.find(".root")!=-1):
0115             entry=line.replace("\n","")
0116             prim.append(entry)
0117     # remove any duplicates but keep the order
0118     file_seen = set()
0119     prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
0120     file_seen = set()
0121     sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
0122     if s:
0123         if not hasattr(s,"fileNames"):
0124             s.fileNames=cms.untracked.vstring(prim)
0125         else:
0126             s.fileNames.extend(prim)
0127         if len(sec)!=0:
0128             if not hasattr(s,"secondaryFileNames"):
0129                 s.secondaryFileNames=cms.untracked.vstring(sec)
0130             else:
0131                 s.secondaryFileNames.extend(sec)
0132     print("found files: ",prim)
0133     if len(prim)==0:
0134         raise Exception("There are not files in input from the file list")
0135     if len(sec)!=0:
0136         print("found parent files:",sec)
0137     return (prim,sec)
0138 
0139 def filesFromDASQuery(query,option="",s=None):
0140     import os,time
0141     import FWCore.ParameterSet.Config as cms
0142     prim=[]
0143     sec=[]
0144     print("the query is",query)
0145     eC=5
0146     count=0
0147     while eC!=0 and count<3:
0148         if count!=0:
0149             print('Sleeping, then retrying DAS')
0150             time.sleep(100)
0151         p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
0152         pipe=p.stdout.read()
0153         tupleP = os.waitpid(p.pid, 0)
0154         eC=tupleP[1]
0155         count=count+1
0156     if eC==0:
0157         print("DAS succeeded after",count,"attempts",eC)
0158     else:
0159         print("DAS failed 3 times- I give up")
0160     for line in pipe.split('\n'):
0161         if line.count(".root")>=2:
0162             #two files solution...
0163             entries=line.replace("\n","").split()
0164             prim.append(entries[0])
0165             sec.append(entries[1])
0166         elif (line.find(".root")!=-1):
0167             entry=line.replace("\n","")
0168             prim.append(entry)
0169     # remove any duplicates
0170     prim = sorted(list(set(prim)))
0171     sec = sorted(list(set(sec)))
0172     if s:
0173         if not hasattr(s,"fileNames"):
0174             s.fileNames=cms.untracked.vstring(prim)
0175         else:
0176             s.fileNames.extend(prim)
0177         if len(sec)!=0:
0178             if not hasattr(s,"secondaryFileNames"):
0179                 s.secondaryFileNames=cms.untracked.vstring(sec)
0180             else:
0181                 s.secondaryFileNames.extend(sec)
0182     print("found files: ",prim)
0183     if len(sec)!=0:
0184         print("found parent files:",sec)
0185     return (prim,sec)
0186 
0187 def anyOf(listOfKeys,dict,opt=None):
0188     for k in listOfKeys:
0189         if k in dict:
0190             toReturn=dict[k]
0191             dict.pop(k)
0192             return toReturn
0193     if opt!=None:
0194         return opt
0195     else:
0196         raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
0197 
0198 class ConfigBuilder(object):
0199     """The main building routines """
0200 
0201     def __init__(self, options, process = None, with_output = False, with_input = False ):
0202         """options taken from old cmsDriver and optparse """
0203 
0204         options.outfile_name = options.dirout+options.fileout
0205 
0206         self._options = options
0207 
0208         if self._options.isData and options.isMC:
0209             raise Exception("ERROR: You may specify only --data or --mc, not both")
0210         #if not self._options.conditions:
0211         #        raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
0212 
0213         # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
0214         if 'ENDJOB' in self._options.step:
0215             if  (hasattr(self._options,"outputDefinition") and \
0216                 self._options.outputDefinition != '' and \
0217                 any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
0218                 (hasattr(self._options,"datatier") and \
0219                 self._options.datatier and \
0220                 'DQMIO' in self._options.datatier):
0221                 print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
0222                 self._options.step=self._options.step.replace(',ENDJOB','')
0223 
0224 
0225 
0226         # what steps are provided by this class?
0227         stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
0228         self.stepMap={}
0229         self.stepKeys=[]
0230         for step in self._options.step.split(","):
0231             if step=='': continue
0232             stepParts = step.split(":")
0233             stepName = stepParts[0]
0234             if stepName not in stepList and not stepName.startswith('re'):
0235                 raise ValueError("Step "+stepName+" unknown")
0236             if len(stepParts)==1:
0237                 self.stepMap[stepName]=""
0238             elif len(stepParts)==2:
0239                 self.stepMap[stepName]=stepParts[1].split('+')
0240             elif len(stepParts)==3:
0241                 self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
0242             else:
0243                 raise ValueError("Step definition "+step+" invalid")
0244             self.stepKeys.append(stepName)
0245 
0246         #print "map of steps is:",self.stepMap
0247 
0248         self.with_output = with_output
0249         self.process=process
0250 
0251         if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
0252             self.with_output = False
0253         self.with_input = with_input
0254         self.imports = []
0255         self.create_process()
0256         self.define_Configs()
0257         self.schedule = list()
0258         self.scheduleIndexOfFirstHLTPath = None
0259 
0260         # we are doing three things here:
0261         # creating a process to catch errors
0262         # building the code to re-create the process
0263 
0264         self.additionalCommands = []
0265         # TODO: maybe a list of to be dumped objects would help as well
0266         self.blacklist_paths = []
0267         self.addedObjects = []
0268         self.additionalOutputs = {}
0269 
0270         self.productionFilterSequence = None
0271         self.labelsToAssociate=[]
0272         self.nextScheduleIsConditional=False
0273         self.conditionalPaths=[]
0274         self.excludedPaths=[]
0275 
0276     def profileOptions(self):
0277         """
0278         addIgProfService
0279         Function to add the igprof profile service so that you can dump in the middle
0280         of the run.
0281         """
0282         profileOpts = self._options.profile.split(':')
0283         profilerStart = 1
0284         profilerInterval = 100
0285         profilerFormat = None
0286         profilerJobFormat = None
0287 
0288         if len(profileOpts):
0289             #type, given as first argument is unused here
0290             profileOpts.pop(0)
0291         if len(profileOpts):
0292             startEvent = profileOpts.pop(0)
0293             if not startEvent.isdigit():
0294                 raise Exception("%s is not a number" % startEvent)
0295             profilerStart = int(startEvent)
0296         if len(profileOpts):
0297             eventInterval = profileOpts.pop(0)
0298             if not eventInterval.isdigit():
0299                 raise Exception("%s is not a number" % eventInterval)
0300             profilerInterval = int(eventInterval)
0301         if len(profileOpts):
0302             profilerFormat = profileOpts.pop(0)
0303 
0304 
0305         if not profilerFormat:
0306             profilerFormat = "%s___%s___%%I.gz" % (
0307                 self._options.evt_type.replace("_cfi", ""),
0308                 hashlib.md5(
0309                     (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
0310                     str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
0311                 ).hexdigest()
0312             )
0313         if not profilerJobFormat and profilerFormat.endswith(".gz"):
0314             profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
0315         elif not profilerJobFormat:
0316             profilerJobFormat = profilerFormat + "_EndOfJob.gz"
0317 
0318         return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
0319 
0320     def load(self,includeFile):
0321         includeFile = includeFile.replace('/','.')
0322         self.process.load(includeFile)
0323         return sys.modules[includeFile]
0324 
0325     def loadAndRemember(self, includeFile):
0326         """helper routine to load am memorize imports"""
0327         # we could make the imports a on-the-fly data method of the process instance itself
0328         # not sure if the latter is a good idea
0329         includeFile = includeFile.replace('/','.')
0330         self.imports.append(includeFile)
0331         self.process.load(includeFile)
0332         return sys.modules[includeFile]
0333 
0334     def executeAndRemember(self, command):
0335         """helper routine to remember replace statements"""
0336         self.additionalCommands.append(command)
0337         if not command.strip().startswith("#"):
0338         # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
0339             import re
0340             exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
0341             #exec(command.replace("process.","self.process."))
0342 
0343     def addCommon(self):
0344         if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
0345             self.process.options.Rethrow = ['ProductNotFound']
0346             self.process.options.fileMode = 'FULLMERGE'
0347 
0348         self.addedObjects.append(("","options"))
0349 
0350         if self._options.lazy_download:
0351             self.process.AdaptorConfig = cms.Service("AdaptorConfig",
0352                                                      stats = cms.untracked.bool(True),
0353                                                      enable = cms.untracked.bool(True),
0354                                                      cacheHint = cms.untracked.string("lazy-download"),
0355                                                      readHint = cms.untracked.string("read-ahead-buffered")
0356                                                      )
0357             self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
0358 
0359         #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
0360         #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
0361 
0362         if self._options.profile:
0363             (start, interval, eventFormat, jobFormat)=self.profileOptions()
0364             self.process.IgProfService = cms.Service("IgProfService",
0365                                                      reportFirstEvent            = cms.untracked.int32(start),
0366                                                      reportEventInterval         = cms.untracked.int32(interval),
0367                                                      reportToFileAtPostEvent     = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
0368                                                      reportToFileAtPostEndJob    = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
0369             self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
0370 
0371     def addMaxEvents(self):
0372         """Here we decide how many evts will be processed"""
0373         self.process.maxEvents.input = int(self._options.number)
0374         if self._options.number_out:
0375             self.process.maxEvents.output = int(self._options.number_out)
0376         self.addedObjects.append(("","maxEvents"))
0377 
0378     def addSource(self):
0379         """Here the source is built. Priority: file, generator"""
0380         self.addedObjects.append(("Input source","source"))
0381 
0382         def filesFromOption(self):
0383             for entry in self._options.filein.split(','):
0384                 print("entry",entry)
0385                 if entry.startswith("filelist:"):
0386                     filesFromList(entry[9:],self.process.source)
0387                 elif entry.startswith("dbs:") or entry.startswith("das:"):
0388                     filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
0389                 else:
0390                     self.process.source.fileNames.append(self._options.dirin+entry)
0391             if self._options.secondfilein:
0392                 if not hasattr(self.process.source,"secondaryFileNames"):
0393                     raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
0394                 for entry in self._options.secondfilein.split(','):
0395                     print("entry",entry)
0396                     if entry.startswith("filelist:"):
0397                         self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
0398                     elif entry.startswith("dbs:") or entry.startswith("das:"):
0399                         self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
0400                     else:
0401                         self.process.source.secondaryFileNames.append(self._options.dirin+entry)
0402 
0403         if self._options.filein or self._options.dasquery:
0404             if self._options.filetype == "EDM":
0405                 self.process.source=cms.Source("PoolSource",
0406                                                fileNames = cms.untracked.vstring(),
0407                                                secondaryFileNames= cms.untracked.vstring())
0408                 filesFromOption(self)
0409             elif self._options.filetype == "DAT":
0410                 self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
0411                 filesFromOption(self)
0412             elif self._options.filetype == "LHE":
0413                 self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
0414                 if self._options.filein.startswith("lhe:"):
0415                     #list the article directory automatically
0416                     args=self._options.filein.split(':')
0417                     article=args[1]
0418                     print('LHE input from article ',article)
0419                     location='/store/lhe/'
0420                     import os
0421                     textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
0422                     for line in textOfFiles:
0423                         for fileName in [x for x in line.split() if '.lhe' in x]:
0424                             self.process.source.fileNames.append(location+article+'/'+fileName)
0425                     #check first if list of LHE files is loaded (not empty)
0426                     if len(line)<2:
0427                         print('Issue to load LHE files, please check and try again.')
0428                         sys.exit(-1)
0429                     #Additional check to protect empty fileNames in process.source
0430                     if len(self.process.source.fileNames)==0:
0431                         print('Issue with empty filename, but can pass line check')
0432                         sys.exit(-1)
0433                     if len(args)>2:
0434                         self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
0435                 else:
0436                     filesFromOption(self)
0437 
0438             elif self._options.filetype == "DQM":
0439                 self.process.source=cms.Source("DQMRootSource",
0440                                                fileNames = cms.untracked.vstring())
0441                 filesFromOption(self)
0442 
0443             elif self._options.filetype == "DQMDAQ":
0444                 # FIXME: how to configure it if there are no input files specified?
0445                 self.process.source=cms.Source("DQMStreamerReader")
0446 
0447 
0448             if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
0449                 self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
0450 
0451         if self._options.dasquery!='':
0452             self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
0453             filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
0454 
0455             if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
0456                 self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
0457 
0458         ##drop LHEXMLStringProduct on input to save memory if appropriate
0459         if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
0460             if self._options.inputCommands:
0461                 self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
0462             else:
0463                 self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
0464 
0465         if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
0466             if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
0467             for command in self._options.inputCommands.split(','):
0468                 # remove whitespace around the keep/drop statements
0469                 command = command.strip()
0470                 if command=='': continue
0471                 self.process.source.inputCommands.append(command)
0472             if not self._options.dropDescendant:
0473                 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
0474 
0475         if self._options.lumiToProcess:
0476             import FWCore.PythonUtilities.LumiList as LumiList
0477             self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
0478 
0479         if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
0480             if self.process.source is None:
0481                 self.process.source=cms.Source("EmptySource")
0482 
0483         # modify source in case of run-dependent MC
0484         self.runsAndWeights=None
0485         if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
0486             if not self._options.isMC :
0487                 raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
0488             if self._options.runsAndWeightsForMC:
0489                 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
0490             else:
0491                 from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
0492                 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
0493                     __import__(RunsAndWeights[self._options.runsScenarioForMC])
0494                     self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
0495                 else:
0496                     self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
0497 
0498         if self.runsAndWeights:
0499             import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
0500             ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
0501             self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
0502             self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
0503 
0504         # modify source in case of run-dependent MC (Run-3 method)
0505         self.runsAndWeightsInt=None
0506         if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
0507             if not self._options.isMC :
0508                 raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
0509             if self._options.runsAndWeightsForMCIntegerWeights:
0510                 self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
0511             else:
0512                 from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
0513                 if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
0514                     __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
0515                     self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
0516                 else:
0517                     self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
0518 
0519         if self.runsAndWeightsInt:
0520             if not self._options.relval:
0521                 raise Exception("--relval option required when using --runsAndWeightsInt")
0522             if 'DATAMIX' in self._options.step:
0523                 from SimGeneral.Configuration.LumiToRun import lumi_to_run
0524                 total_events, events_per_job  = self._options.relval.split(',')
0525                 lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
0526                 self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
0527 
0528         return
0529 
0530     def addOutput(self):
0531         """ Add output module to the process """
0532         result=""
0533         if self._options.outputDefinition:
0534             if self._options.datatier:
0535                 print("--datatier & --eventcontent options ignored")
0536 
0537             #new output convention with a list of dict
0538             outList = eval(self._options.outputDefinition)
0539             for (id,outDefDict) in enumerate(outList):
0540                 outDefDictStr=outDefDict.__str__()
0541                 if not isinstance(outDefDict,dict):
0542                     raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
0543                 #requires option: tier
0544                 theTier=anyOf(['t','tier','dataTier'],outDefDict)
0545                 #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
0546                 ## event content
0547                 theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
0548                 theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
0549                 theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
0550                 theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
0551                 theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
0552                 # module label has a particular role
0553                 if not theModuleLabel:
0554                     tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
0555                               theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
0556                               theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
0557                               ]
0558                     for name in tryNames:
0559                         if not hasattr(self.process,name):
0560                             theModuleLabel=name
0561                             break
0562                 if not theModuleLabel:
0563                     raise Exception("cannot find a module label for specification: "+outDefDictStr)
0564                 if id==0:
0565                     defaultFileName=self._options.outfile_name
0566                 else:
0567                     defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
0568 
0569                 theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
0570                 if not theFileName.endswith('.root'):
0571                     theFileName+='.root'
0572 
0573                 if len(outDefDict):
0574                     raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
0575                 if theStreamType=='DQMIO': theStreamType='DQM'
0576                 if theStreamType=='ALL':
0577                     theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
0578                 else:
0579                     theEventContent = getattr(self.process, theStreamType+"EventContent")
0580 
0581 
0582                 addAlCaSelects=False
0583                 if theStreamType=='ALCARECO' and not theFilterName:
0584                     theFilterName='StreamALCACombined'
0585                     addAlCaSelects=True
0586 
0587                 CppType='PoolOutputModule'
0588                 if self._options.timeoutOutput:
0589                     CppType='TimeoutPoolOutputModule'
0590                 if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
0591                 output = cms.OutputModule(CppType,
0592                                           theEventContent.clone(),
0593                                           fileName = cms.untracked.string(theFileName),
0594                                           dataset = cms.untracked.PSet(
0595                                              dataTier = cms.untracked.string(theTier),
0596                                              filterName = cms.untracked.string(theFilterName))
0597                                           )
0598                 if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
0599                     output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
0600                 if not theSelectEvent and hasattr(self.process,'filtering_step'):
0601                     output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
0602                 if theSelectEvent:
0603                     output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
0604 
0605                 if addAlCaSelects:
0606                     if not hasattr(output,'SelectEvents'):
0607                         output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
0608                     for alca in self.AlCaPaths:
0609                         output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
0610 
0611 
0612                 if hasattr(self.process,theModuleLabel):
0613                     raise Exception("the current process already has a module "+theModuleLabel+" defined")
0614                 #print "creating output module ",theModuleLabel
0615                 setattr(self.process,theModuleLabel,output)
0616                 outputModule=getattr(self.process,theModuleLabel)
0617                 setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
0618                 path=getattr(self.process,theModuleLabel+'_step')
0619                 self.schedule.append(path)
0620 
0621                 if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
0622                     def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
0623                         return label
0624                     outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
0625                 if theExtraOutputCommands:
0626                     if not isinstance(theExtraOutputCommands,list):
0627                         raise Exception("extra ouput command in --option must be a list of strings")
0628                     if hasattr(self.process,theStreamType+"EventContent"):
0629                         self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
0630                     else:
0631                         outputModule.outputCommands.extend(theExtraOutputCommands)
0632 
0633                 result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
0634 
0635             ##ends the --output options model
0636             return result
0637 
0638         streamTypes=self._options.eventcontent.split(',')
0639         tiers=self._options.datatier.split(',')
0640         if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
0641             raise Exception("number of event content arguments does not match number of datatier arguments")
0642 
0643         # if the only step is alca we don't need to put in an output
0644         if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
0645             return "\n"
0646 
0647         for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
0648             if streamType=='': continue
0649             if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
0650             if streamType=='DQMIO': streamType='DQM'
0651             eventContent=streamType
0652             ## override streamType to eventContent in case NANOEDM
0653             if streamType == "NANOEDMAOD" :
0654                 eventContent = "NANOAOD"
0655             elif streamType == "NANOEDMAODSIM" :
0656                 eventContent = "NANOAODSIM"
0657             theEventContent = getattr(self.process, eventContent+"EventContent")
0658             if i==0:
0659                 theFileName=self._options.outfile_name
0660                 theFilterName=self._options.filtername
0661             else:
0662                 theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
0663                 theFilterName=self._options.filtername
0664             CppType='PoolOutputModule'
0665             if self._options.timeoutOutput:
0666                 CppType='TimeoutPoolOutputModule'
0667             if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
0668             if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
0669             output = cms.OutputModule(CppType,
0670                                       theEventContent,
0671                                       fileName = cms.untracked.string(theFileName),
0672                                       dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
0673                                                                    filterName = cms.untracked.string(theFilterName)
0674                                                                    )
0675                                       )
0676             if hasattr(self.process,"generation_step") and streamType!='LHE':
0677                 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
0678             if hasattr(self.process,"filtering_step"):
0679                 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
0680 
0681             if streamType=='ALCARECO':
0682                 output.dataset.filterName = cms.untracked.string('StreamALCACombined')
0683 
0684             if "MINIAOD" in streamType:
0685                 from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
0686                 miniAOD_customizeOutput(output)
0687 
0688             outputModuleName=streamType+'output'
0689             setattr(self.process,outputModuleName,output)
0690             outputModule=getattr(self.process,outputModuleName)
0691             setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
0692             path=getattr(self.process,outputModuleName+'_step')
0693             self.schedule.append(path)
0694 
0695             if self._options.outputCommands and streamType!='DQM':
0696                 for evct in self._options.outputCommands.split(','):
0697                     if not evct: continue
0698                     self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
0699 
0700             if not self._options.inlineEventContent:
0701                 tmpstreamType=streamType
0702                 if "NANOEDM" in tmpstreamType :
0703                     tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
0704                 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
0705                     return label
0706                 outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
0707 
0708             result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
0709 
0710         return result
0711 
0712     def addStandardSequences(self):
0713         """
0714         Add selected standard sequences to the process
0715         """
0716         # load the pile up file
0717         if self._options.pileup:
0718             pileupSpec=self._options.pileup.split(',')[0]
0719 
0720             # Does the requested pile-up scenario exist?
0721             from Configuration.StandardSequences.Mixing import Mixing,defineMixing
0722             if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
0723                 message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
0724                 raise Exception(message)
0725 
0726             # Put mixing parameters in a dictionary
0727             if '.' in pileupSpec:
0728                 mixingDict={'file':pileupSpec}
0729             elif pileupSpec.startswith('file:'):
0730                 mixingDict={'file':pileupSpec[5:]}
0731             else:
0732                 import copy
0733                 mixingDict=copy.copy(Mixing[pileupSpec])
0734             if len(self._options.pileup.split(','))>1:
0735                 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
0736 
0737             # Load the pu cfg file corresponding to the requested pu scenario
0738             if 'file:' in pileupSpec:
0739                 #the file is local
0740                 self.process.load(mixingDict['file'])
0741                 print("inlining mixing module configuration")
0742                 self._options.inlineObjets+=',mix'
0743             else:
0744                 self.loadAndRemember(mixingDict['file'])
0745 
0746             mixingDict.pop('file')
0747             if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
0748                 if self._options.pileup_input:
0749                     if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
0750                         mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
0751                     elif self._options.pileup_input.startswith("filelist:"):
0752                         mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
0753                     else:
0754                         mixingDict['F']=self._options.pileup_input.split(',')
0755                 specialization=defineMixing(mixingDict)
0756                 for command in specialization:
0757                     self.executeAndRemember(command)
0758                 if len(mixingDict)!=0:
0759                     raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
0760 
0761 
0762         # load the geometry file
0763         try:
0764             if len(self.stepMap):
0765                 self.loadAndRemember(self.GeometryCFF)
0766                 if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
0767                     self.loadAndRemember(self.SimGeometryCFF)
0768                     if self.geometryDBLabel:
0769                         self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
0770                         self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
0771 
0772         except ImportError:
0773             print("Geometry option",self._options.geometry,"unknown.")
0774             raise
0775 
0776         if len(self.stepMap):
0777             self.loadAndRemember(self.magFieldCFF)
0778 
0779         for stepName in self.stepKeys:
0780             stepSpec = self.stepMap[stepName]
0781             print("Step:", stepName,"Spec:",stepSpec)
0782             if stepName.startswith('re'):
0783                 ##add the corresponding input content
0784                 if stepName[2:] not in self._options.donotDropOnInput:
0785                     self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
0786                 stepName=stepName[2:]
0787             if stepSpec=="":
0788                 getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
0789             elif isinstance(stepSpec, list):
0790                 getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
0791             elif isinstance(stepSpec, tuple):
0792                 getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
0793             else:
0794                 raise ValueError("Invalid step definition")
0795 
0796         if self._options.restoreRNDSeeds!=False:
0797             #it is either True, or a process name
0798             if self._options.restoreRNDSeeds==True:
0799                 self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
0800             else:
0801                 self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
0802             if self._options.inputEventContent or self._options.inputCommands:
0803                 if self._options.inputCommands:
0804                     self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
0805                 else:
0806                     self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
0807 
0808 
0809     def completeInputCommand(self):
0810         if self._options.inputEventContent:
0811             import copy
0812             def dropSecondDropStar(iec):
0813                 #drop occurence of 'drop *' in the list
0814                 count=0
0815                 for item in iec:
0816                     if item=='drop *':
0817                         if count!=0:
0818                             iec.remove(item)
0819                         count+=1
0820 
0821             ## allow comma separated input eventcontent
0822             if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
0823             for evct in self._options.inputEventContent.split(','):
0824                 if evct=='': continue
0825                 theEventContent = getattr(self.process, evct+"EventContent")
0826                 if hasattr(theEventContent,'outputCommands'):
0827                     self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
0828                 if hasattr(theEventContent,'inputCommands'):
0829                     self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
0830 
0831             dropSecondDropStar(self.process.source.inputCommands)
0832 
0833             if not self._options.dropDescendant:
0834                 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
0835 
0836 
0837         return
0838 
0839     def addConditions(self):
0840         """Add conditions to the process"""
0841         if not self._options.conditions: return
0842 
0843         if 'FrontierConditions_GlobalTag' in self._options.conditions:
0844             print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
0845             self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
0846 
0847         self.loadAndRemember(self.ConditionsDefaultCFF)
0848         from Configuration.AlCa.GlobalTag import GlobalTag
0849         self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
0850         self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
0851         self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
0852 
0853 
0854     def addCustomise(self,unsch=0):
0855         """Include the customise code """
0856 
0857         custOpt=[]
0858         if unsch==0:
0859             for c in self._options.customisation_file:
0860                 custOpt.extend(c.split(","))
0861         else:
0862             for c in self._options.customisation_file_unsch:
0863                 custOpt.extend(c.split(","))
0864 
0865         custMap=DictTypes.SortedKeysDict()
0866         for opt in custOpt:
0867             if opt=='': continue
0868             if opt.count('.')>1:
0869                 raise Exception("more than . in the specification:"+opt)
0870             fileName=opt.split('.')[0]
0871             if opt.count('.')==0:   rest='customise'
0872             else:
0873                 rest=opt.split('.')[1]
0874                 if rest=='py': rest='customise' #catch the case of --customise file.py
0875 
0876             if fileName in custMap:
0877                 custMap[fileName].extend(rest.split('+'))
0878             else:
0879                 custMap[fileName]=rest.split('+')
0880 
0881         if len(custMap)==0:
0882             final_snippet='\n'
0883         else:
0884             final_snippet='\n# customisation of the process.\n'
0885 
0886         allFcn=[]
0887         for opt in custMap:
0888             allFcn.extend(custMap[opt])
0889         for fcn in allFcn:
0890             if allFcn.count(fcn)!=1:
0891                 raise Exception("cannot specify twice "+fcn+" as a customisation method")
0892 
0893         for f in custMap:
0894             # let python search for that package and do syntax checking at the same time
0895             packageName = f.replace(".py","").replace("/",".")
0896             __import__(packageName)
0897             package = sys.modules[packageName]
0898 
0899             # now ask the package for its definition and pick .py instead of .pyc
0900             customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
0901 
0902             final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
0903             if self._options.inline_custom:
0904                 for line in file(customiseFile,'r'):
0905                     if "import FWCore.ParameterSet.Config" in line:
0906                         continue
0907                     final_snippet += line
0908             else:
0909                 final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
0910             for fcn in custMap[f]:
0911                 print("customising the process with",fcn,"from",f)
0912                 if not hasattr(package,fcn):
0913                     #bound to fail at run time
0914                     raise Exception("config "+f+" has no function "+fcn)
0915                 #execute the command
0916                 self.process=getattr(package,fcn)(self.process)
0917                 #and print it in the configuration
0918                 final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
0919                 final_snippet += "\nprocess = %s(process)\n"%(fcn,)
0920 
0921         if len(custMap)!=0:
0922             final_snippet += '\n# End of customisation functions\n'
0923 
0924         ### now for a useful command
0925         return final_snippet
0926 
0927     def addCustomiseCmdLine(self):
0928         final_snippet='\n# Customisation from command line\n'
0929         if self._options.customise_commands:
0930             import string
0931             for com in self._options.customise_commands.split('\\n'):
0932                 com=com.lstrip()
0933                 self.executeAndRemember(com)
0934                 final_snippet +='\n'+com
0935 
0936         return final_snippet
0937 
0938     #----------------------------------------------------------------------------
0939     # here the methods to define the python includes for each step or
0940     # conditions
0941     #----------------------------------------------------------------------------
0942     def define_Configs(self):
0943         if len(self.stepMap):
0944             self.loadAndRemember('Configuration/StandardSequences/Services_cff')
0945         if self._options.particleTable not in defaultOptions.particleTableList:
0946             print('Invalid particle table provided. Options are:')
0947             print(defaultOptions.particleTable)
0948             sys.exit(-1)
0949         else:
0950             if len(self.stepMap):
0951                 self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
0952 
0953         self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
0954 
0955         self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
0956         self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
0957         self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
0958         self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
0959         self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
0960         self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
0961         self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
0962         self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
0963         self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
0964         if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
0965         self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
0966         self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
0967         self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
0968         self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
0969         self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
0970         self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
0971         self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
0972         self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
0973         self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
0974         self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
0975         self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
0976         self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
0977         self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
0978         self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
0979         self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
0980         self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
0981         self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
0982         self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
0983 
0984         if "DATAMIX" in self.stepMap.keys():
0985             self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
0986             self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
0987             self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
0988             self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
0989 
0990         self.ALCADefaultSeq=None
0991         self.LHEDefaultSeq='externalLHEProducer'
0992         self.GENDefaultSeq='pgen'
0993         self.SIMDefaultSeq='psim'
0994         self.DIGIDefaultSeq='pdigi'
0995         self.DATAMIXDefaultSeq=None
0996         self.DIGI2RAWDefaultSeq='DigiToRaw'
0997         self.HLTDefaultSeq='GRun'
0998         self.L1DefaultSeq=None
0999         self.L1REPACKDefaultSeq='GT'
1000         self.HARVESTINGDefaultSeq=None
1001         self.ALCAHARVESTDefaultSeq=None
1002         self.CFWRITERDefaultSeq=None
1003         self.RAW2DIGIDefaultSeq='RawToDigi'
1004         self.L1RecoDefaultSeq='L1Reco'
1005         self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1006         if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1007             self.RECODefaultSeq='reconstruction'
1008         else:
1009             self.RECODefaultSeq='reconstruction_fromRECO'
1010         self.RECOSIMDefaultSeq='recosim'
1011         self.POSTRECODefaultSeq=None
1012         self.L1HwValDefaultSeq='L1HwVal'
1013         self.DQMDefaultSeq='DQMOffline'
1014         self.VALIDATIONDefaultSeq=''
1015         self.ENDJOBDefaultSeq='endOfProcess'
1016         self.REPACKDefaultSeq='DigiToRawRepack'
1017         self.PATDefaultSeq='miniAOD'
1018         self.PATGENDefaultSeq='miniGEN'
1019         #TODO: Check based of file input
1020         self.NANOGENDefaultSeq='nanogenSequence'
1021         self.NANODefaultSeq='nanoSequence'
1022 
1023         self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1024 
1025         if not self._options.beamspot:
1026             self._options.beamspot=VtxSmearedDefaultKey
1027 
1028         # if its MC then change the raw2digi
1029         if self._options.isMC==True:
1030             self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1031             self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1032             self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1033             self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1034             self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1035             self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1036             self.NANODefaultSeq='nanoSequenceMC'
1037         else:
1038             self._options.beamspot = None
1039 
1040         #patch for gen, due to backward incompatibility
1041         if 'reGEN' in self.stepMap:
1042             self.GENDefaultSeq='fixGenInfo'
1043 
1044         if self._options.scenario=='cosmics':
1045             self._options.pileup='Cosmics'
1046             self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1047             self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1048             self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1049             self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1050             self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1051             self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1052             if self._options.isMC==True:
1053                 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1054             self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1055             self.RECODefaultSeq='reconstructionCosmics'
1056             self.DQMDefaultSeq='DQMOfflineCosmics'
1057 
1058         if self._options.scenario=='HeavyIons':
1059             if not self._options.beamspot:
1060                 self._options.beamspot=VtxSmearedHIDefaultKey
1061             self.HLTDefaultSeq = 'HIon'
1062             self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1063             self.VALIDATIONDefaultSeq=''
1064             self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1065             self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1066             self.RECODefaultSeq='reconstructionHeavyIons'
1067             self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1068             self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1069             self.DQMDefaultSeq='DQMOfflineHeavyIons'
1070             self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1071             self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1072             if self._options.isMC==True:
1073                 self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1074 
1075 
1076         self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1077 
1078         self.USERDefaultSeq='user'
1079         self.USERDefaultCFF=None
1080 
1081         # the magnetic field
1082         self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1083         self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1084 
1085         # the geometry
1086         self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1087         self.geometryDBLabel=None
1088         simGeometry=''
1089         if self._options.fast:
1090             if 'start' in self._options.conditions.lower():
1091                 self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1092             else:
1093                 self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1094         else:
1095             def inGeometryKeys(opt):
1096                 from Configuration.StandardSequences.GeometryConf import GeometryConf
1097                 if opt in GeometryConf:
1098                     return GeometryConf[opt]
1099                 else:
1100                     return opt
1101 
1102             geoms=self._options.geometry.split(',')
1103             if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1104             if len(geoms)==2:
1105                 #may specify the reco geometry
1106                 if '/' in geoms[1] or '_cff' in geoms[1]:
1107                     self.GeometryCFF=geoms[1]
1108                 else:
1109                     self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1110 
1111             if (geoms[0].startswith('DB:')):
1112                 self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1113                 self.geometryDBLabel=geoms[0][3:]
1114                 print("with DB:")
1115             else:
1116                 if '/' in geoms[0] or '_cff' in geoms[0]:
1117                     self.SimGeometryCFF=geoms[0]
1118                 else:
1119                     simGeometry=geoms[0]
1120                     if self._options.gflash==True:
1121                         self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1122                     else:
1123                         self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1124 
1125         # synchronize the geometry configuration and the FullSimulation sequence to be used
1126         if simGeometry not in defaultOptions.geometryExtendedOptions:
1127             self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1128 
1129         if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1130             self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1131             self._options.beamspot='NoSmear'
1132 
1133         # fastsim requires some changes to the default cff files and sequences
1134         if self._options.fast:
1135             self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1136             self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1137             self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1138             self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1139             self.NANODefaultSeq = 'nanoSequenceFS'
1140             self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1141 
1142         # Mixing
1143         if self._options.pileup=='default':
1144             from Configuration.StandardSequences.Mixing import MixingDefaultKey
1145             self._options.pileup=MixingDefaultKey
1146 
1147 
1148         #not driven by a default cff anymore
1149         if self._options.isData:
1150             self._options.pileup=None
1151 
1152 
1153         self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1154 
1155     # for alca, skims, etc
1156     def addExtraStream(self, name, stream, workflow='full'):
1157             # define output module and go from there
1158         output = cms.OutputModule("PoolOutputModule")
1159         if stream.selectEvents.parameters_().__len__()!=0:
1160             output.SelectEvents = stream.selectEvents
1161         else:
1162             output.SelectEvents = cms.untracked.PSet()
1163             output.SelectEvents.SelectEvents=cms.vstring()
1164             if isinstance(stream.paths,tuple):
1165                 for path in stream.paths:
1166                     output.SelectEvents.SelectEvents.append(path.label())
1167             else:
1168                 output.SelectEvents.SelectEvents.append(stream.paths.label())
1169 
1170 
1171 
1172         if isinstance(stream.content,str):
1173             evtPset=getattr(self.process,stream.content)
1174             for p in evtPset.parameters_():
1175                 setattr(output,p,getattr(evtPset,p))
1176             if not self._options.inlineEventContent:
1177                 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1178                     return label
1179                 output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1180         else:
1181             output.outputCommands = stream.content
1182 
1183 
1184         output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1185 
1186         output.dataset  = cms.untracked.PSet( dataTier = stream.dataTier,
1187                                               filterName = cms.untracked.string(stream.name))
1188 
1189         if self._options.filtername:
1190             output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1191 
1192         #add an automatic flushing to limit memory consumption
1193         output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1194 
1195         if workflow in ("producers,full"):
1196             if isinstance(stream.paths,tuple):
1197                 for path in stream.paths:
1198                     self.schedule.append(path)
1199             else:
1200                 self.schedule.append(stream.paths)
1201 
1202 
1203         # in case of relvals we don't want to have additional outputs
1204         if (not self._options.relval) and workflow in ("full","output"):
1205             self.additionalOutputs[name] = output
1206             setattr(self.process,name,output)
1207 
1208         if workflow == 'output':
1209             # adjust the select events to the proper trigger results from previous process
1210             filterList = output.SelectEvents.SelectEvents
1211             for i, filter in enumerate(filterList):
1212                 filterList[i] = filter+":"+self._options.triggerResultsProcess
1213 
1214         return output
1215 
1216     #----------------------------------------------------------------------------
1217     # here the methods to create the steps. Of course we are doing magic here ;)
1218     # prepare_STEPNAME modifies self.process and what else's needed.
1219     #----------------------------------------------------------------------------
1220 
1221     def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF):
1222         if ( len(sequence.split('.'))==1 ):
1223             l=self.loadAndRemember(defaultCFF)
1224         elif ( len(sequence.split('.'))==2 ):
1225             l=self.loadAndRemember(sequence.split('.')[0])
1226             sequence=sequence.split('.')[1]
1227         else:
1228             print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1229             print(sequence,"not recognized")
1230             raise
1231         return l
1232 
1233     def scheduleSequence(self,seq,prefix,what='Path'):
1234         if '*' in seq:
1235             #create only one path with all sequences in it
1236             for i,s in enumerate(seq.split('*')):
1237                 if i==0:
1238                     setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1239                 else:
1240                     p=getattr(self.process,prefix)
1241                     tmp = getattr(self.process, s)
1242                     if isinstance(tmp, cms.Task):
1243                         p.associate(tmp)
1244                     else:
1245                         p+=tmp
1246             self.schedule.append(getattr(self.process,prefix))
1247             return
1248         else:
1249             #create as many path as many sequences
1250             if not '+' in seq:
1251                 if self.nextScheduleIsConditional:
1252                     self.conditionalPaths.append(prefix)
1253                 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1254                 self.schedule.append(getattr(self.process,prefix))
1255             else:
1256                 for i,s in enumerate(seq.split('+')):
1257                     sn=prefix+'%d'%(i)
1258                     setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1259                     self.schedule.append(getattr(self.process,sn))
1260             return
1261 
1262     def scheduleSequenceAtEnd(self,seq,prefix):
1263         self.scheduleSequence(seq,prefix,what='EndPath')
1264         return
1265 
1266     def prepare_ALCAPRODUCER(self, sequence = None):
1267         self.prepare_ALCA(sequence, workflow = "producers")
1268 
1269     def prepare_ALCAOUTPUT(self, sequence = None):
1270         self.prepare_ALCA(sequence, workflow = "output")
1271 
1272     def prepare_ALCA(self, sequence = None, workflow = 'full'):
1273         """ Enrich the process with alca streams """
1274         alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1275         sequence = sequence.split('.')[-1]
1276 
1277         MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1278         # decide which ALCA paths to use
1279         alcaList = sequence.split("+")
1280         for alca in alcaList:
1281             if (len(alca)>MAXLEN):
1282                 raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1283 
1284         maxLevel=0
1285         from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1286         # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1287         self.expandMapping(alcaList,autoAlca)
1288         self.AlCaPaths=[]
1289         for name in alcaConfig.__dict__:
1290             alcastream = getattr(alcaConfig,name)
1291             shortName = name.replace('ALCARECOStream','')
1292             if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1293                 if shortName in AlCaNoConcurrentLumis:
1294                     print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1295                     self._options.nConcurrentLumis = "1"
1296                     self._options.nConcurrentIOVs = "1"
1297                 output = self.addExtraStream(name,alcastream, workflow = workflow)
1298                 self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1299                 self.AlCaPaths.append(shortName)
1300                 if 'DQM' in alcaList:
1301                     if not self._options.inlineEventContent and hasattr(self.process,name):
1302                         self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1303                     else:
1304                         output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1305 
1306                 #rename the HLT process name in the alca modules
1307                 if self._options.hltProcess or 'HLT' in self.stepMap:
1308                     if isinstance(alcastream.paths,tuple):
1309                         for path in alcastream.paths:
1310                             self.renameHLTprocessInSequence(path.label())
1311                     else:
1312                         self.renameHLTprocessInSequence(alcastream.paths.label())
1313 
1314                 for i in range(alcaList.count(shortName)):
1315                     alcaList.remove(shortName)
1316 
1317             # DQM needs a special handling
1318             elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1319                 path = getattr(alcaConfig,name)
1320                 self.schedule.append(path)
1321                 alcaList.remove('DQM')
1322 
1323             if isinstance(alcastream,cms.Path):
1324                 #black list the alca path so that they do not appear in the cfg
1325                 self.blacklist_paths.append(alcastream)
1326 
1327 
1328         if len(alcaList) != 0:
1329             available=[]
1330             for name in alcaConfig.__dict__:
1331                 alcastream = getattr(alcaConfig,name)
1332                 if isinstance(alcastream,cms.FilteredStream):
1333                     available.append(name.replace('ALCARECOStream',''))
1334             print("The following alcas could not be found "+str(alcaList))
1335             print("available ",available)
1336             #print "verify your configuration, ignoring for now"
1337             raise Exception("The following alcas could not be found "+str(alcaList))
1338 
1339     def prepare_LHE(self, sequence = None):
1340             #load the fragment
1341             ##make it loadable
1342         loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1343         print("Loading lhe fragment from",loadFragment)
1344         __import__(loadFragment)
1345         self.process.load(loadFragment)
1346         ##inline the modules
1347         self._options.inlineObjets+=','+sequence
1348 
1349         getattr(self.process,sequence).nEvents = int(self._options.number)
1350 
1351         #schedule it
1352         self.process.lhe_step = cms.Path( getattr( self.process,sequence)  )
1353         self.excludedPaths.append("lhe_step")
1354         self.schedule.append( self.process.lhe_step )
1355 
1356     def prepare_GEN(self, sequence = None):
1357         """ load the fragment of generator configuration """
1358         loadFailure=False
1359         #remove trailing .py
1360         #support old style .cfi by changing into something.cfi into something_cfi
1361         #remove python/ from the name
1362         loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1363         #standard location of fragments
1364         if not '/' in loadFragment:
1365             loadFragment='Configuration.Generator.'+loadFragment
1366         else:
1367             loadFragment=loadFragment.replace('/','.')
1368         try:
1369             print("Loading generator fragment from",loadFragment)
1370             __import__(loadFragment)
1371         except:
1372             loadFailure=True
1373             #if self.process.source and self.process.source.type_()=='EmptySource':
1374             if not (self._options.filein or self._options.dasquery):
1375                 raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1376 
1377         if not loadFailure:
1378             from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1379 
1380             generatorModule=sys.modules[loadFragment]
1381             genModules=generatorModule.__dict__
1382             #remove lhe producer module since this should have been
1383             #imported instead in the LHE step
1384             if self.LHEDefaultSeq in genModules:
1385                 del genModules[self.LHEDefaultSeq]
1386 
1387             if self._options.hideGen:
1388                 self.loadAndRemember(loadFragment)
1389             else:
1390                 self.process.load(loadFragment)
1391                 # expose the objects from that fragment to the configuration
1392                 import FWCore.ParameterSet.Modules as cmstypes
1393                 for name in genModules:
1394                     theObject = getattr(generatorModule,name)
1395                     if isinstance(theObject, cmstypes._Module):
1396                         self._options.inlineObjets=name+','+self._options.inlineObjets
1397                         if theObject.type_() in noConcurrentLumiGenerators:
1398                             print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1399                             self._options.nConcurrentLumis = "1"
1400                             self._options.nConcurrentIOVs = "1"
1401                     elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1402                         self._options.inlineObjets+=','+name
1403 
1404             if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1405                 if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1406                     self.productionFilterSequence = 'ProductionFilterSequence'
1407                 elif 'generator' in genModules:
1408                     self.productionFilterSequence = 'generator'
1409 
1410         """ Enrich the schedule with the rest of the generation step """
1411         self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1412         genSeqName=sequence.split('.')[-1]
1413 
1414         if True:
1415             try:
1416                 from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1417                 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1418                 self.loadAndRemember(cffToBeLoaded)
1419             except ImportError:
1420                 raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1421 
1422             if self._options.scenario == 'HeavyIons':
1423                 if self._options.pileup=='HiMixGEN':
1424                     self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1425                 elif self._options.pileup=='HiMixEmbGEN':
1426                     self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1427                 else:
1428                     self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1429 
1430         self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1431         self.schedule.append(self.process.generation_step)
1432 
1433         #register to the genstepfilter the name of the path (static right now, but might evolve)
1434         self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1435 
1436         if 'reGEN' in self.stepMap:
1437             #stop here
1438             return
1439 
1440         """ Enrich the schedule with the summary of the filter step """
1441         #the gen filter in the endpath
1442         self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1443         self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1444         return
1445 
1446     def prepare_SIM(self, sequence = None):
1447         """ Enrich the schedule with the simulation step"""
1448         self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1449         if not self._options.fast:
1450             if self._options.gflash==True:
1451                 self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1452 
1453             if self._options.magField=='0T':
1454                 self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1455         else:
1456             if self._options.magField=='0T':
1457                 self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1458 
1459         self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1460         return
1461 
1462     def prepare_DIGI(self, sequence = None):
1463         """ Enrich the schedule with the digitisation step"""
1464         self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1465 
1466         if self._options.gflash==True:
1467             self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1468 
1469         if sequence == 'pdigi_valid' or sequence == 'pdigi_hi':
1470             self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1471 
1472         if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and sequence != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1473             if self._options.inputEventContent=='':
1474                 self._options.inputEventContent='REGEN'
1475             else:
1476                 self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1477 
1478 
1479         self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1480         return
1481 
1482     def prepare_CFWRITER(self, sequence = None):
1483         """ Enrich the schedule with the crossing frame writer step"""
1484         self.loadAndRemember(self.CFWRITERDefaultCFF)
1485         self.scheduleSequence('pcfw','cfwriter_step')
1486         return
1487 
1488     def prepare_DATAMIX(self, sequence = None):
1489         """ Enrich the schedule with the digitisation step"""
1490         self.loadAndRemember(self.DATAMIXDefaultCFF)
1491         self.scheduleSequence('pdatamix','datamixing_step')
1492 
1493         if self._options.pileup_input:
1494             theFiles=''
1495             if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1496                 theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1497             elif self._options.pileup_input.startswith("filelist:"):
1498                 theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1499             else:
1500                 theFiles=self._options.pileup_input.split(',')
1501             #print theFiles
1502             self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%(  theFiles ) )
1503 
1504         return
1505 
1506     def prepare_DIGI2RAW(self, sequence = None):
1507         self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1508         self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1509         return
1510 
1511     def prepare_REPACK(self, sequence = None):
1512         self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1513         self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1514         return
1515 
1516     def prepare_L1(self, sequence = None):
1517         """ Enrich the schedule with the L1 simulation step"""
1518         assert(sequence == None)
1519         self.loadAndRemember(self.L1EMDefaultCFF)
1520         self.scheduleSequence('SimL1Emulator','L1simulation_step')
1521         return
1522 
1523     def prepare_L1REPACK(self, sequence = None):
1524         """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1525         supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1526         if sequence in supported:
1527             self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1528             if self._options.scenario == 'HeavyIons':
1529                 self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1530             self.scheduleSequence('SimL1Emulator','L1RePack_step')
1531         else:
1532             print("L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported)
1533             raise Exception('unsupported feature')
1534 
1535     def prepare_HLT(self, sequence = None):
1536         """ Enrich the schedule with the HLT simulation step"""
1537         if not sequence:
1538             print("no specification of the hlt menu has been given, should never happen")
1539             raise  Exception('no HLT sequence provided')
1540 
1541         if '@' in sequence:
1542             # case where HLT:@something was provided
1543             from Configuration.HLT.autoHLT import autoHLT
1544             key = sequence[1:]
1545             if key in autoHLT:
1546                 sequence = autoHLT[key]
1547             else:
1548                 raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1549 
1550         if ',' in sequence:
1551             #case where HLT:something:something was provided
1552             self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1553             optionsForHLT = {}
1554             if self._options.scenario == 'HeavyIons':
1555                 optionsForHLT['type'] = 'HIon'
1556             else:
1557                 optionsForHLT['type'] = 'GRun'
1558             optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1559             if sequence == 'run,fromSource':
1560                 if hasattr(self.process.source,'firstRun'):
1561                     self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1562                 elif hasattr(self.process.source,'setRunNumber'):
1563                     self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1564                 else:
1565                     raise Exception('Cannot replace menu to load %s'%(sequence))
1566             else:
1567                 self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1568         else:
1569             self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1570 
1571         if self._options.isMC:
1572             self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1573 
1574         if self._options.name != 'HLT':
1575             self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1576             self.additionalCommands.append('process = ProcessName(process)')
1577             self.additionalCommands.append('')
1578             from HLTrigger.Configuration.CustomConfigs import ProcessName
1579             self.process = ProcessName(self.process)
1580 
1581         if self.process.schedule == None:
1582             raise Exception('the HLT step did not attach a valid schedule to the process')
1583 
1584         self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1585         [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1586 
1587         # this is a fake, to be removed with fastim migration and HLT menu dump
1588         if self._options.fast:
1589             if not hasattr(self.process,'HLTEndSequence'):
1590                 self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1591 
1592 
1593     def prepare_RAW2RECO(self, sequence = None):
1594         if ','in sequence:
1595             seqReco=sequence.split(',')[1]
1596             seqDigi=sequence.split(',')[0]
1597         else:
1598             print("RAW2RECO requires two specifications",sequence,"insufficient")
1599 
1600         self.prepare_RAW2DIGI(seqDigi)
1601         self.prepare_RECO(seqReco)
1602         return
1603 
1604     def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1605         self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1606         self.scheduleSequence(sequence,'raw2digi_step')
1607          #          if self._options.isRepacked:
1608         #self.renameInputTagsInSequence(sequence)
1609         return
1610 
1611     def prepare_PATFILTER(self, sequence=None):
1612         self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1613         from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1614         for filt in allMetFilterPaths:
1615             self.schedule.append(getattr(self.process,'Flag_'+filt))
1616 
1617     def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1618         ''' Enrich the schedule with L1 HW validation '''
1619         self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1620         #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1621         print('\n\n\n DEPRECATED this has no action \n\n\n')
1622         return
1623 
1624     def prepare_L1Reco(self, sequence = "L1Reco"):
1625         ''' Enrich the schedule with L1 reconstruction '''
1626         self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1627         self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1628         return
1629 
1630     def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1631         ''' Enrich the schedule with L1 reconstruction '''
1632         self.loadDefaultOrSpecifiedCFF(sequence,self.L1TrackTriggerDefaultCFF)
1633         self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1634         return
1635 
1636     def prepare_FILTER(self, sequence = None):
1637         ''' Enrich the schedule with a user defined filter sequence '''
1638         ## load the relevant part
1639         filterConfig=self.load(sequence.split('.')[0])
1640         filterSeq=sequence.split('.')[-1]
1641         ## print it in the configuration
1642         class PrintAllModules(object):
1643             def __init__(self):
1644                 self.inliner=''
1645                 pass
1646             def enter(self,visitee):
1647                 try:
1648                     label=visitee.label()
1649                     ##needs to be in reverse order
1650                     self.inliner=label+','+self.inliner
1651                 except:
1652                     pass
1653             def leave(self,v): pass
1654 
1655         expander=PrintAllModules()
1656         getattr(self.process,filterSeq).visit( expander )
1657         self._options.inlineObjets+=','+expander.inliner
1658         self._options.inlineObjets+=','+filterSeq
1659 
1660         ## put the filtering path in the schedule
1661         self.scheduleSequence(filterSeq,'filtering_step')
1662         self.nextScheduleIsConditional=True
1663         ## put it before all the other paths
1664         self.productionFilterSequence = filterSeq
1665 
1666         return
1667 
1668     def prepare_RECO(self, sequence = "reconstruction"):
1669         ''' Enrich the schedule with reconstruction '''
1670         self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1671         self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1672         return
1673 
1674     def prepare_RECOSIM(self, sequence = "recosim"):
1675         ''' Enrich the schedule with reconstruction '''
1676         self.loadDefaultOrSpecifiedCFF(sequence,self.RECOSIMDefaultCFF)
1677         self.scheduleSequence(sequence.split('.')[-1],'recosim_step')
1678         return
1679 
1680     def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1681         ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1682         if not self._options.fast:
1683             print("ERROR: this step is only implemented for FastSim")
1684             sys.exit()
1685         self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1686         self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1687         return
1688 
1689     def prepare_PAT(self, sequence = "miniAOD"):
1690         ''' Enrich the schedule with PAT '''
1691         self.prepare_PATFILTER(self)
1692         self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF)
1693         self.labelsToAssociate.append('patTask')
1694         if self._options.isData:
1695             self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1696         else:
1697             if self._options.fast:
1698                 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1699             else:
1700                 self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1701 
1702         if self._options.hltProcess:
1703             if len(self._options.customise_commands) > 1:
1704                 self._options.customise_commands = self._options.customise_commands + " \n"
1705             self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1706             self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1707             self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1708 
1709 #            self.renameHLTprocessInSequence(sequence)
1710 
1711         return
1712 
1713     def prepare_PATGEN(self, sequence = "miniGEN"):
1714         ''' Enrich the schedule with PATGEN '''
1715         self.loadDefaultOrSpecifiedCFF(sequence,self.PATGENDefaultCFF) #this is unscheduled
1716         self.labelsToAssociate.append('patGENTask')
1717         if self._options.isData:
1718             raise Exception("PATGEN step can only run on MC")
1719         return
1720 
1721     def prepare_NANO(self, sequence = "nanoAOD"):
1722         ''' Enrich the schedule with NANO '''
1723         self.loadDefaultOrSpecifiedCFF(sequence,self.NANODefaultCFF)
1724         self.scheduleSequence(sequence.split('.')[-1],'nanoAOD_step')
1725         custom = "nanoAOD_customizeData" if self._options.isData else "nanoAOD_customizeMC"
1726         self._options.customisation_file.insert(0,"PhysicsTools/NanoAOD/nano_cff."+custom)
1727         if self._options.hltProcess:
1728             if len(self._options.customise_commands) > 1:
1729                 self._options.customise_commands = self._options.customise_commands + " \n"
1730             self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1731 
1732     def prepare_NANOGEN(self, sequence = "nanoAOD"):
1733         ''' Enrich the schedule with NANOGEN '''
1734         # TODO: Need to modify this based on the input file type
1735         fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1736         self.loadDefaultOrSpecifiedCFF(sequence,self.NANOGENDefaultCFF)
1737         self.scheduleSequence(sequence.split('.')[-1],'nanoAOD_step')
1738         custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1739         if self._options.runUnscheduled:
1740             self._options.customisation_file_unsch.insert(0, '.'.join([self.NANOGENDefaultCFF, custom]))
1741         else:
1742             self._options.customisation_file.insert(0, '.'.join([self.NANOGENDefaultCFF, custom]))
1743 
1744     def prepare_SKIM(self, sequence = "all"):
1745         ''' Enrich the schedule with skimming fragments'''
1746         skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1747         sequence = sequence.split('.')[-1]
1748 
1749         skimlist=sequence.split('+')
1750         ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1751         from Configuration.Skimming.autoSkim import autoSkim
1752         self.expandMapping(skimlist,autoSkim)
1753 
1754         #print "dictionnary for skims:",skimConfig.__dict__
1755         for skim in skimConfig.__dict__:
1756             skimstream = getattr(skimConfig,skim)
1757             if isinstance(skimstream,cms.Path):
1758             #black list the alca path so that they do not appear in the cfg
1759                 self.blacklist_paths.append(skimstream)
1760             if (not isinstance(skimstream,cms.FilteredStream)):
1761                 continue
1762             shortname = skim.replace('SKIMStream','')
1763             if (sequence=="all"):
1764                 self.addExtraStream(skim,skimstream)
1765             elif (shortname in skimlist):
1766                 self.addExtraStream(skim,skimstream)
1767                 #add a DQM eventcontent for this guy
1768                 if self._options.datatier=='DQM':
1769                     self.process.load(self.EVTCONTDefaultCFF)
1770                     skimstreamDQM = cms.FilteredStream(
1771                             responsible = skimstream.responsible,
1772                             name = skimstream.name+'DQM',
1773                             paths = skimstream.paths,
1774                             selectEvents = skimstream.selectEvents,
1775                             content = self._options.datatier+'EventContent',
1776                             dataTier = cms.untracked.string(self._options.datatier)
1777                             )
1778                     self.addExtraStream(skim+'DQM',skimstreamDQM)
1779                 for i in range(skimlist.count(shortname)):
1780                     skimlist.remove(shortname)
1781 
1782 
1783 
1784         if (skimlist.__len__()!=0 and sequence!="all"):
1785             print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1786             raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1787 
1788     def prepare_USER(self, sequence = None):
1789         ''' Enrich the schedule with a user defined sequence '''
1790         self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1791         self.scheduleSequence(sequence.split('.')[-1],'user_step')
1792         return
1793 
1794     def prepare_POSTRECO(self, sequence = None):
1795         """ Enrich the schedule with the postreco step """
1796         self.loadAndRemember(self.POSTRECODefaultCFF)
1797         self.scheduleSequence('postreco_generator','postreco_step')
1798         return
1799 
1800 
1801     def prepare_VALIDATION(self, sequence = 'validation'):
1802         print(sequence,"in preparing validation")
1803         self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1804         from Validation.Configuration.autoValidation import autoValidation
1805         #in case VALIDATION:something:somethingelse -> something,somethingelse
1806         sequence=sequence.split('.')[-1]
1807         if sequence.find(',')!=-1:
1808             prevalSeqName=sequence.split(',')[0].split('+')
1809             valSeqName=sequence.split(',')[1].split('+')
1810             self.expandMapping(prevalSeqName,autoValidation,index=0)
1811             self.expandMapping(valSeqName,autoValidation,index=1)
1812         else:
1813             if '@' in sequence:
1814                 prevalSeqName=sequence.split('+')
1815                 valSeqName=sequence.split('+')
1816                 self.expandMapping(prevalSeqName,autoValidation,index=0)
1817                 self.expandMapping(valSeqName,autoValidation,index=1)
1818             else:
1819                 postfix=''
1820                 if sequence:
1821                     postfix='_'+sequence
1822                 prevalSeqName=['prevalidation'+postfix]
1823                 valSeqName=['validation'+postfix]
1824                 if not hasattr(self.process,valSeqName[0]):
1825                     prevalSeqName=['']
1826                     valSeqName=[sequence]
1827 
1828         def NFI(index):
1829             ##name from index, required to keep backward compatibility
1830             if index==0:
1831                 return ''
1832             else:
1833                 return '%s'%index
1834 
1835 
1836         #rename the HLT process in validation steps
1837         if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1838             for s in valSeqName+prevalSeqName:
1839                 if s:
1840                     self.renameHLTprocessInSequence(s)
1841         for (i,s) in enumerate(prevalSeqName):
1842             if s:
1843                 setattr(self.process,'prevalidation_step%s'%NFI(i),  cms.Path( getattr(self.process, s)) )
1844                 self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1845 
1846         for (i,s) in enumerate(valSeqName):
1847             setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1848             self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1849 
1850         #needed in case the miniAODValidation sequence is run starting from AODSIM
1851         if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1852             return
1853 
1854         if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1855             if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1856                 self._options.restoreRNDSeeds=True
1857 
1858         if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1859             self.executeAndRemember("process.mix.playback = True")
1860             self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1861             self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1862             self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1863 
1864         if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1865             #will get in the schedule, smoothly
1866             for (i,s) in enumerate(valSeqName):
1867                 getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1868 
1869         return
1870 
1871 
1872     class MassSearchReplaceProcessNameVisitor(object):
1873         """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1874         It will climb down within PSets, VPSets and VInputTags to find its target"""
1875         def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1876             self._paramReplace = paramReplace
1877             self._paramSearch = paramSearch
1878             self._verbose = verbose
1879             self._whitelist = whitelist
1880 
1881         def doIt(self,pset,base):
1882             if isinstance(pset, cms._Parameterizable):
1883                 for name in pset.parameters_().keys():
1884                     # skip whitelisted parameters
1885                     if name in self._whitelist:
1886                         continue
1887                     # if I use pset.parameters_().items() I get copies of the parameter values
1888                     # so I can't modify the nested pset
1889                     value = getattr(pset,name)
1890                     type = value.pythonTypeName()
1891                     if type in ('cms.PSet', 'cms.untracked.PSet'):
1892                         self.doIt(value,base+"."+name)
1893                     elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1894                         for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1895                     elif type in ('cms.string', 'cms.untracked.string'):
1896                         if value.value() == self._paramSearch:
1897                             if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
1898                             setattr(pset, name,self._paramReplace)
1899                     elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1900                         for (i,n) in enumerate(value):
1901                             if not isinstance(n, cms.InputTag):
1902                                 n=cms.InputTag(n)
1903                             if n.processName == self._paramSearch:
1904                                 # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1905                                 if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
1906                                 setattr(n,"processName",self._paramReplace)
1907                                 value[i]=n
1908                     elif type in ('cms.vstring', 'cms.untracked.vstring'):
1909                         for (i,n) in enumerate(value):
1910                             if n==self._paramSearch:
1911                                 getattr(pset,name)[i]=self._paramReplace
1912                     elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1913                         if value.processName == self._paramSearch:
1914                             if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
1915                             setattr(getattr(pset, name),"processName",self._paramReplace)
1916 
1917         def enter(self,visitee):
1918             label = ''
1919             try:
1920                 label = visitee.label()
1921             except AttributeError:
1922                 label = '<Module not in a Process>'
1923             except:
1924                 label = 'other execption'
1925             self.doIt(visitee, label)
1926 
1927         def leave(self,visitee):
1928             pass
1929 
1930     #visit a sequence to repalce all input tags
1931     def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1932         print("Replacing all InputTag %s => %s"%(oldT,newT))
1933         from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1934         massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1935         loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1936         if not loadMe in self.additionalCommands:
1937             self.additionalCommands.append(loadMe)
1938         self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1939 
1940     #change the process name used to address HLT results in any sequence
1941     def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1942         if self._options.hltProcess:
1943             proc=self._options.hltProcess
1944         else:
1945             proc=self.process.name_()
1946         if proc==HLTprocess:    return
1947         # look up all module in dqm sequence
1948         print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1949         getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1950         if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1951             self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1952         self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1953 
1954 
1955     def expandMapping(self,seqList,mapping,index=None):
1956         maxLevel=30
1957         level=0
1958         while '@' in repr(seqList) and level<maxLevel:
1959             level+=1
1960             for specifiedCommand in seqList:
1961                 if specifiedCommand.startswith('@'):
1962                     location=specifiedCommand[1:]
1963                     if not location in mapping:
1964                         raise Exception("Impossible to map "+location+" from "+repr(mapping))
1965                     mappedTo=mapping[location]
1966                     if index!=None:
1967                         mappedTo=mappedTo[index]
1968                     seqList.remove(specifiedCommand)
1969                     seqList.extend(mappedTo.split('+'))
1970                     break;
1971         if level==maxLevel:
1972             raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1973 
1974     def prepare_DQM(self, sequence = 'DQMOffline'):
1975         # this one needs replacement
1976 
1977         # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
1978         self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
1979         self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1980         sequenceList=sequence.split('.')[-1].split('+')
1981         postSequenceList=sequence.split('.')[-1].split('+')
1982         from DQMOffline.Configuration.autoDQM import autoDQM
1983         self.expandMapping(sequenceList,autoDQM,index=0)
1984         self.expandMapping(postSequenceList,autoDQM,index=1)
1985 
1986         if len(set(sequenceList))!=len(sequenceList):
1987             sequenceList=list(set(sequenceList))
1988             print("Duplicate entries for DQM:, using",sequenceList)
1989 
1990         pathName='dqmoffline_step'
1991         for (i,sequence) in enumerate(sequenceList):
1992             if (i!=0):
1993                 pathName='dqmoffline_%d_step'%(i)
1994 
1995             if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1996                 self.renameHLTprocessInSequence(sequence)
1997 
1998             setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1999             self.schedule.append(getattr(self.process,pathName))
2000 
2001             if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2002                 #will get in the schedule, smoothly
2003                 getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2004 
2005 
2006         pathName='dqmofflineOnPAT_step'
2007         for (i,sequence) in enumerate(postSequenceList):
2008         #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2009             if (sequenceList[i]==postSequenceList[i]):
2010                       continue
2011             if (i!=0):
2012                 pathName='dqmofflineOnPAT_%d_step'%(i)
2013 
2014             setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
2015             self.schedule.append(getattr(self.process,pathName))
2016 
2017     def prepare_HARVESTING(self, sequence = None):
2018         """ Enrich the process with harvesting step """
2019         self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2020         self.loadAndRemember(self.DQMSaverCFF)
2021 
2022         harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
2023         sequence = sequence.split('.')[-1]
2024 
2025         # decide which HARVESTING paths to use
2026         harvestingList = sequence.split("+")
2027         from DQMOffline.Configuration.autoDQM import autoDQM
2028         from Validation.Configuration.autoValidation import autoValidation
2029         import copy
2030         combined_mapping = copy.deepcopy( autoDQM )
2031         combined_mapping.update( autoValidation )
2032         self.expandMapping(harvestingList,combined_mapping,index=-1)
2033 
2034         if len(set(harvestingList))!=len(harvestingList):
2035             harvestingList=list(set(harvestingList))
2036             print("Duplicate entries for HARVESTING, using",harvestingList)
2037 
2038         for name in harvestingList:
2039             if not name in harvestingConfig.__dict__:
2040                 print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2041                 # trigger hard error, like for other sequence types
2042                 getattr(self.process, name)
2043                 continue
2044             harvestingstream = getattr(harvestingConfig,name)
2045             if isinstance(harvestingstream,cms.Path):
2046                 self.schedule.append(harvestingstream)
2047                 self.blacklist_paths.append(harvestingstream)
2048             if isinstance(harvestingstream,cms.Sequence):
2049                 setattr(self.process,name+"_step",cms.Path(harvestingstream))
2050                 self.schedule.append(getattr(self.process,name+"_step"))
2051 
2052         self.scheduleSequence('DQMSaver','dqmsave_step')
2053         return
2054 
2055     def prepare_ALCAHARVEST(self, sequence = None):
2056         """ Enrich the process with AlCaHarvesting step """
2057         harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2058         sequence=sequence.split(".")[-1]
2059 
2060         # decide which AlcaHARVESTING paths to use
2061         harvestingList = sequence.split("+")
2062 
2063 
2064 
2065         from Configuration.AlCa.autoPCL import autoPCL
2066         self.expandMapping(harvestingList,autoPCL)
2067 
2068         for name in harvestingConfig.__dict__:
2069             harvestingstream = getattr(harvestingConfig,name)
2070             if name in harvestingList and isinstance(harvestingstream,cms.Path):
2071                 self.schedule.append(harvestingstream)
2072                 if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2073                    isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2074                     self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2075                     self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2076                 else:
2077                     self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2078                     self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2079                 harvestingList.remove(name)
2080         # append the common part at the end of the sequence
2081         lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2082         self.schedule.append(lastStep)
2083 
2084         if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2085             print("The following harvesting could not be found : ", harvestingList)
2086             raise Exception("The following harvesting could not be found : "+str(harvestingList))
2087 
2088 
2089 
2090     def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2091         self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2092         self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2093         return
2094 
2095     def finalizeFastSimHLT(self):
2096         self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2097         self.schedule.append(self.process.reconstruction)
2098 
2099 
2100     def build_production_info(self, evt_type, evtnumber):
2101         """ Add useful info for the production. """
2102         self.process.configurationMetadata=cms.untracked.PSet\
2103                                             (version=cms.untracked.string("$Revision: 1.19 $"),
2104                                              name=cms.untracked.string("Applications"),
2105                                              annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2106                                              )
2107 
2108         self.addedObjects.append(("Production Info","configurationMetadata"))
2109 
2110 
2111     def create_process(self):
2112         self.pythonCfgCode =  "# Auto generated configuration file\n"
2113         self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2114         self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2115         self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2116 
2117         # now set up the modifies
2118         modifiers=[]
2119         modifierStrings=[]
2120         modifierImports=[]
2121 
2122         if hasattr(self._options,"era") and self._options.era :
2123         # Multiple eras can be specified in a comma seperated list
2124             from Configuration.StandardSequences.Eras import eras
2125             for requestedEra in self._options.era.split(",") :
2126                 modifierStrings.append(requestedEra)
2127                 modifierImports.append(eras.pythonCfgLines[requestedEra])
2128                 modifiers.append(getattr(eras,requestedEra))
2129 
2130 
2131         if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2132             import importlib
2133             thingsImported=[]
2134             for c in self._options.procModifiers:
2135                 thingsImported.extend(c.split(","))
2136             for pm in thingsImported:
2137                 modifierStrings.append(pm)
2138                 modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2139                 modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2140 
2141         self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2142         self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2143 
2144 
2145         if len(modifierStrings)>0:
2146             self.pythonCfgCode+= ','+','.join(modifierStrings)
2147         self.pythonCfgCode+=')\n\n'
2148 
2149         #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2150         #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2151         if self.process == None:
2152             if len(modifiers)>0:
2153                 self.process = cms.Process(self._options.name,*modifiers)
2154             else:
2155                 self.process = cms.Process(self._options.name)
2156 
2157 
2158 
2159 
2160     def prepare(self, doChecking = False):
2161         """ Prepare the configuration string and add missing pieces."""
2162 
2163         self.loadAndRemember(self.EVTCONTDefaultCFF)  #load the event contents regardless
2164         self.addMaxEvents()
2165         if self.with_input:
2166             self.addSource()
2167         self.addStandardSequences()
2168         ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2169         self.completeInputCommand()
2170         self.addConditions()
2171 
2172 
2173         outputModuleCfgCode=""
2174         if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2175             outputModuleCfgCode=self.addOutput()
2176 
2177         self.addCommon()
2178 
2179         self.pythonCfgCode += "# import of standard configurations\n"
2180         for module in self.imports:
2181             self.pythonCfgCode += ("process.load('"+module+"')\n")
2182 
2183         # production info
2184         if not hasattr(self.process,"configurationMetadata"):
2185             self.build_production_info(self._options.evt_type, self._options.number)
2186         else:
2187             #the PSet was added via a load
2188             self.addedObjects.append(("Production Info","configurationMetadata"))
2189 
2190         self.pythonCfgCode +="\n"
2191         for comment,object in self.addedObjects:
2192             if comment!="":
2193                 self.pythonCfgCode += "\n# "+comment+"\n"
2194             self.pythonCfgCode += dumpPython(self.process,object)
2195 
2196         # dump the output definition
2197         self.pythonCfgCode += "\n# Output definition\n"
2198         self.pythonCfgCode += outputModuleCfgCode
2199 
2200         # dump all additional outputs (e.g. alca or skim streams)
2201         self.pythonCfgCode += "\n# Additional output definition\n"
2202         #I do not understand why the keys are not normally ordered.
2203         nl=sorted(self.additionalOutputs.keys())
2204         for name in nl:
2205             output = self.additionalOutputs[name]
2206             self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2207             tmpOut = cms.EndPath(output)
2208             setattr(self.process,name+'OutPath',tmpOut)
2209             self.schedule.append(tmpOut)
2210 
2211         # dump all additional commands
2212         self.pythonCfgCode += "\n# Other statements\n"
2213         for command in self.additionalCommands:
2214             self.pythonCfgCode += command + "\n"
2215 
2216         #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2217         for object in self._options.inlineObjets.split(','):
2218             if not object:
2219                 continue
2220             if not hasattr(self.process,object):
2221                 print('cannot inline -'+object+'- : not known')
2222             else:
2223                 self.pythonCfgCode +='\n'
2224                 self.pythonCfgCode +=dumpPython(self.process,object)
2225 
2226         if self._options.pileup=='HiMixEmbGEN':
2227             self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2228 
2229         # dump all paths
2230         self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2231         for path in self.process.paths:
2232             if getattr(self.process,path) not in self.blacklist_paths:
2233                 self.pythonCfgCode += dumpPython(self.process,path)
2234 
2235         for endpath in self.process.endpaths:
2236             if getattr(self.process,endpath) not in self.blacklist_paths:
2237                 self.pythonCfgCode += dumpPython(self.process,endpath)
2238 
2239         # dump the schedule
2240         self.pythonCfgCode += "\n# Schedule definition\n"
2241 
2242         # handling of the schedule
2243         pathNames = ['process.'+p.label_() for p in self.schedule]
2244         if self.process.schedule == None:
2245             self.process.schedule = cms.Schedule()
2246             for item in self.schedule:
2247                 self.process.schedule.append(item)
2248             result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2249         else:
2250             if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2251                 raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2252 
2253             for index, item in enumerate(self.schedule):
2254                 if index < self.scheduleIndexOfFirstHLTPath:
2255                     self.process.schedule.insert(index, item)
2256                 else:
2257                     self.process.schedule.append(item)
2258 
2259             result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2260             for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2261                 result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2262             if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2263                 result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2264 
2265         self.pythonCfgCode += result
2266 
2267         for labelToAssociate in self.labelsToAssociate:
2268             self.process.schedule.associate(getattr(self.process, labelToAssociate))
2269             self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2270 
2271         from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2272         associatePatAlgosToolsTask(self.process)
2273         self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2274         self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2275 
2276         overrideThreads = (self._options.nThreads != "1")
2277         overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2278         overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2279 
2280         if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2281             self.pythonCfgCode +="\n"
2282             self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2283             if overrideThreads:
2284                 self.pythonCfgCode +="process.options.numberOfThreads = "+self._options.nThreads+"\n"
2285                 self.pythonCfgCode +="process.options.numberOfStreams = "+self._options.nStreams+"\n"
2286                 self.process.options.numberOfThreads = int(self._options.nThreads)
2287                 self.process.options.numberOfStreams = int(self._options.nStreams)
2288             if overrideConcurrentLumis:
2289                 self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = "+self._options.nConcurrentLumis+"\n"
2290                 self.process.options.numberOfConcurrentLuminosityBlocks = int(self._options.nConcurrentLumis)
2291             if overrideConcurrentIOVs:
2292                 self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = "+self._options.nConcurrentIOVs+"\n"
2293                 self.process.options.eventSetup.numberOfConcurrentIOVs = int(self._options.nConcurrentIOVs)
2294 
2295         if self._options.accelerators is not None:
2296             accelerators = self._options.accelerators.split(',')
2297             self.pythonCfgCode += "\n"
2298             self.pythonCfgCode += "# Enable only these accelerator backends\n"
2299             self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2300             self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2301             self.process.load('Configuration.StandardSequences.Accelerators_cff')
2302             self.process.options.accelerators = accelerators
2303 
2304         #repacked version
2305         if self._options.isRepacked:
2306             self.pythonCfgCode +="\n"
2307             self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2308             self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2309             MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2310 
2311         # special treatment in case of production filter sequence 2/2
2312         if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2313             self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2314             self.pythonCfgCode +='for path in process.paths:\n'
2315             if len(self.conditionalPaths):
2316                 self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2317             if len(self.excludedPaths):
2318                 self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2319             self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2320             pfs = getattr(self.process,self.productionFilterSequence)
2321             for path in self.process.paths:
2322                 if not path in self.conditionalPaths: continue
2323                 if path in self.excludedPaths: continue
2324                 getattr(self.process,path).insert(0, pfs)
2325 
2326 
2327         # dump customise fragment
2328         self.pythonCfgCode += self.addCustomise()
2329 
2330         if self._options.runUnscheduled:
2331             print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2332         # Keep the "unscheduled customise functions" separate for now,
2333         # there are customize functions given by users (in our unit
2334         # tests) that need to be run before the "unscheduled customise
2335         # functions"
2336         self.pythonCfgCode += self.addCustomise(1)
2337 
2338         self.pythonCfgCode += self.addCustomiseCmdLine()
2339 
2340         if hasattr(self.process,"logErrorHarvester"):
2341             #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2342             self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2343             self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2344             self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2345             from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2346             self.process = customiseLogErrorHarvesterUsingOutputCommands(self.process)
2347 
2348         # Temporary hack to put the early delete customization after
2349         # everything else
2350         #
2351         # FIXME: remove when no longer needed
2352         self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2353         self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2354         self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2355         self.pythonCfgCode += "# End adding early deletion\n"
2356         from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2357         self.process = customiseEarlyDelete(self.process)
2358 
2359         imports = cms.specialImportRegistry.getSpecialImports()
2360         if len(imports) > 0:
2361             #need to inject this at the top
2362             index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2363             #now find the end of line
2364             index = self.pythonCfgCode.find("\n",index)
2365             self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2366 
2367 
2368         # make the .io file
2369 
2370         if self._options.io:
2371             #io=open(self._options.python_filename.replace('.py','.io'),'w')
2372             if not self._options.io.endswith('.io'): self._option.io+='.io'
2373             io=open(self._options.io,'w')
2374             ioJson={}
2375             if hasattr(self.process.source,"fileNames"):
2376                 if len(self.process.source.fileNames.value()):
2377                     ioJson['primary']=self.process.source.fileNames.value()
2378             if hasattr(self.process.source,"secondaryFileNames"):
2379                 if len(self.process.source.secondaryFileNames.value()):
2380                     ioJson['secondary']=self.process.source.secondaryFileNames.value()
2381             if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2382                 ioJson['pileup']=self._options.pileup_input[4:]
2383             for (o,om) in self.process.outputModules_().items():
2384                 ioJson[o]=om.fileName.value()
2385             ioJson['GT']=self.process.GlobalTag.globaltag.value()
2386             if self.productionFilterSequence:
2387                 ioJson['filter']=self.productionFilterSequence
2388             import json
2389             io.write(json.dumps(ioJson))
2390         return
2391