File indexing completed on 2023-03-17 10:48:54
0001
0002 """
0003 _Test_
0004
0005 Test Scenario implementation for unittests/development purposes
0006
0007 Not for use with data taking
0008
0009 """
0010
0011
0012 from Configuration.DataProcessing.Scenario import Scenario
0013 import FWCore.ParameterSet.Config as cms
0014
0015 class Test(Scenario):
0016 def __init__(self):
0017 Scenario.__init__(self)
0018 """
0019 _Test_
0020
0021 Test Scenario
0022
0023 """
0024
0025
0026 def promptReco(self, globalTag):
0027 """
0028 _promptReco_
0029
0030 Returns skeleton process object
0031
0032 """
0033 return cms.Process("RECO", self.eras)
0034
0035
0036 def expressProcessing(self, globalTag):
0037 """
0038 _expressProcessing_
0039
0040 Returns skeleton process object
0041
0042 """
0043 return cms.Process("Express", self.eras)
0044
0045
0046 def alcaSkim(self, skims):
0047 """
0048 _alcaSkim_
0049
0050 Returns skeleton process object
0051
0052 """
0053 return cms.Process("ALCARECO", self.eras)
0054
0055
0056 def dqmHarvesting(self, datasetName, runNumber, globalTag, **args):
0057 """
0058 _dqmHarvesting_
0059
0060 build a DQM Harvesting configuration
0061
0062 this method can be used to test an extra scenario, all the
0063 ConfigBuilder options can be overwritten by using **args. This will be
0064 useful for testing with real jobs.
0065
0066 Arguments:
0067
0068 datasetName - aka workflow name for DQMServer, this is the name of the
0069 dataset containing the harvested run
0070 runNumber - The run being harvested
0071 globalTag - The global tag being used
0072 inputFiles - The list of LFNs being harvested
0073
0074 """
0075 options = defaultOptions
0076 options.scenario = "cosmics"
0077 options.step = "HARVESTING:dqmHarvesting"
0078 options.isMC = False
0079 options.isData = True
0080 options.beamspot = None
0081 options.eventcontent = None
0082 options.name = "EDMtoMEConvert"
0083 options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag
0084 options.arguments = ""
0085 options.evt_type = ""
0086 options.filein = []
0087
0088 options.__dict__.update(args)
0089
0090 process = cms.Process("HARVESTING", self.eras)
0091 process.source = cms.Source("PoolSource")
0092 configBuilder = ConfigBuilder(options, process = process)
0093 configBuilder.prepare()
0094
0095
0096
0097
0098 process.source.processingMode = cms.untracked.string('RunsAndLumis')
0099 process.source.fileNames = cms.untracked(cms.vstring())
0100 process.maxEvents.input = -1
0101 process.dqmSaver.workflow = datasetName
0102 if 'saveByLumiSection' in args and \
0103 args.get('saveByLumiSection', ''):
0104 process.dqmSaver.saveByLumiSection = int(args['saveByLumiSection'])
0105
0106 return process
0107
0108
0109 def skimming(self, *skims):
0110 """
0111 _skimming_
0112
0113 Returns skeleton process object
0114
0115 """
0116 return cms.Process("Skimming", self.eras)