Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-04-06 12:33:19

0001 from __future__ import print_function
0002 #!/usr/bin/env cmsRun
0003 
0004 import shutil
0005 import sys
0006 
0007 from Validation.RecoTau.ValidationOptions_cff import *
0008 
0009 process = cms.Process("TEST")
0010 
0011 # command options defined in Validation/RecoTau/python/ValidationOptions_cfi
0012 options.parseArguments()
0013 
0014 checkOptionsForBadInput()
0015 
0016 ## if not calledBycmsRun() and not options.gridJob:
0017 ##    print "Run 'cmsRun RunTauValidation_cfg.py help' for options."
0018 ##    # quit here so we dont' create a bunch of directories
0019 ##    #  if the user only wants the help
0020 ##    sys.exit()
0021 
0022 # Make sure we dont' clobber another directory! Skip in batch mode (runs from an LSF machine)
0023 if not CMSSWEnvironmentIsCurrent() and options.batchNumber == -1 and not options.gridJob:
0024    print("CMSSW_BASE points to a different directory, please rerun cmsenv!")
0025    sys.exit()
0026 
0027 
0028 # DQM store, PDT sources etc
0029 process.load("Configuration.StandardSequences.Services_cff")
0030 
0031 ######################################
0032 #                                    #
0033 #       Output Info Store            #
0034 #                                    #
0035 ######################################
0036 
0037 """
0038    Data is stored in
0039 
0040    TauID/[EventType]_[DataSource]_[Conditions][label]
0041 
0042 """
0043 
0044 #outputDirName = "Validation_%s" % ReleaseVersion
0045 outputDirName = "TauID"
0046 
0047 
0048 outputDir = os.path.join(os.getcwd(), outputDirName) 
0049 # This is the directory where we store the stuff about our current configuration
0050 outputBaseDir = outputDir
0051 
0052 subDirName = ""
0053 
0054 subDirName += "%s_%s" % (options.eventType, options.dataSource)
0055 
0056 if options.conditions != "whatever":
0057    subDirName += "_%s" % options.conditions.replace('::', '_')
0058 
0059 if (options.label != "none"):
0060    subDirName += "_" + options.label
0061 
0062 outputDir = os.path.join(outputDir, subDirName)
0063 
0064 # Store configuration, showtags, etc in a sub directory
0065 configDir = os.path.join(outputDir, "Config")
0066 
0067 if os.path.exists(outputDir) and options.batchNumber < 0:# and not options.gridJob:
0068    print("Output directory %s already exists!  OK to overwrite?" % outputDir)
0069    while True:
0070       input = raw_input("Please enter [y/n] ")
0071       if (input == 'y'):
0072          break
0073       elif (input == 'n'):
0074          print(" ...exiting.")
0075          sys.exit()
0076 
0077 if not os.path.exists(outputDir):
0078    os.makedirs(outputDir)
0079 
0080 if not os.path.exists(configDir):
0081    os.makedirs(configDir)
0082 
0083 ######################################
0084 #                                    #
0085 #       Data Source Setup            #
0086 #                                    #
0087 ######################################
0088 
0089 def LoadDataCffFile(theFile):
0090    if not os.path.isfile(theFile):
0091       print("Error - %s is not a file!" % theFile)
0092       sys.exit()
0093    outputFile = os.path.join(configDir, "DataSource_cff.py")
0094    shutil.copy(theFile, outputFile)
0095    process.load(theFile.replace(".py", ""))
0096 
0097 myFile = options.sourceFile
0098 if myFile == 'none':
0099    myFile = "EventSource_%s_RECO_cff.py" % options.eventType
0100    #myFile = os.path.join(ReleaseBase, "Validation/RecoTau/test", "EventSource_%s_RECO_cff.py" % options.eventType)
0101 LoadDataCffFile(myFile)
0102 #Reruns PFTau
0103 process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
0104 process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
0105 process.load("Configuration.StandardSequences.MagneticField_cff")
0106 process.load("RecoTauTag.Configuration.RecoPFTauTag_cff")
0107 process.hpsSequence = cms.Sequence( process.recoTauCommonSequence*process.recoTauClassicHPSSequence )
0108 
0109 process.GlobalTag.globaltag = options.conditions
0110 
0111 # have to set max events here, since it may get written by the 
0112 # dataSource cffs
0113 process.maxEvents = cms.untracked.PSet(
0114     input = cms.untracked.int32(options.maxEvents)
0115 )
0116 
0117 # Skip events, if we are running in batch mode on files
0118 if options.batchNumber >= 0 and options.dataSource.find('Files') != -1:
0119    process.source.skipEvents = cms.untracked.uint32(options.batchNumber*options.maxEvents)
0120 
0121 ######################################
0122 #                                    #
0123 #       Validation Setup             #
0124 #                                    #
0125 ######################################
0126 
0127 # Store the tags and CVS diff to the tags, and the current release
0128 #  only do this once in a batch job.  The additional tar file is a fail safe - 
0129 #  the parameters shouldn't change in outputDir.
0130 if (options.batchNumber <= 0 ):#and not options.gridJob):
0131    os.system("cd $CMSSW_BASE/src; \
0132               showtags -t -r > showtags.txt; \
0133               cvs -q diff >& diffToTags.patch;\
0134               cvs -q diff -r %s >& diffToVanillaRelease.patch; \
0135               tar -cvzf TagsAndDiff.tar.gz showtags.txt *.patch; \
0136               mv showtags.txt *.patch %s; \
0137               mv TagsAndDiff.tar.gz %s" % (ReleaseVersion, configDir, configDir))
0138 
0139 if options.batchNumber >= 0:
0140    # store the batch produced root files in a sub directory
0141    outputDir = os.path.join(outputDir, "BatchJobs")
0142    if not os.path.exists(outputDir):
0143       os.mkdir(outputDir)
0144 
0145 #Validation output file
0146 outputFileNameBase = "TauVal_%s" % ReleaseVersion
0147 if options.label != "none":
0148    outputFileNameBase += "_%s" % options.label
0149 outputFileNameBase += "_"
0150 outputFileNameBase += options.eventType
0151 
0152 if options.batchNumber >= 0:
0153    outputFileNameBase += "_%i" % options.batchNumber
0154    options.writeEDMFile = options.writeEDMFile.replace(".root", "_%i.root" % options.batchNumber)
0155 outputFileNameBase += "_DBScan.root"
0156 
0157 if options.gridJob:
0158    outputFileName = 'TauVal_GridJob.root'
0159 else:
0160    outputFileName = os.path.join(outputDir, outputFileNameBase)
0161 
0162 print('The output file will be: '+outputFileName)
0163 if options.gridJob:
0164    cfg=open('./crab.cfg', 'r')
0165    cfgContent=cfg.read()
0166    if cfgContent.find(outputFileName) == -1:
0167       print("ERROR: CRAB output file not matching the grid one!\nexiting...")
0168       sys.exit()
0169 
0170 process.saveTauEff = cms.EDAnalyzer("TauDQMSimpleFileSaver",
0171   outputFileName = cms.string(outputFileName)
0172 )
0173 
0174 process.load("Validation.RecoTau.ValidateTausOn%s_cff" % options.eventType)
0175 
0176 #Sets the cuts to what defined in 2/5/11 meeting
0177 process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0178 process.hpsPFTauDiscriminationByVLooseIsolation.ApplyDiscriminationByECALIsolation = False
0179 process.hpsPFTauDiscriminationByVLooseIsolation.applyDeltaBetaCorrection = False
0180 process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0181 process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0182 
0183 process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0184 process.hpsPFTauDiscriminationByMediumIsolation.ApplyDiscriminationByECALIsolation = False
0185 process.hpsPFTauDiscriminationByMediumIsolation.applyDeltaBetaCorrection = False
0186 process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0187 process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0188 
0189 process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0190 process.hpsPFTauDiscriminationByLooseIsolation.ApplyDiscriminationByECALIsolation = False
0191 process.hpsPFTauDiscriminationByLooseIsolation.applyDeltaBetaCorrection = False
0192 process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0193 process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0194 
0195 process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0196 process.hpsPFTauDiscriminationByTightIsolation.ApplyDiscriminationByECALIsolation = False
0197 process.hpsPFTauDiscriminationByTightIsolation.applyDeltaBetaCorrection = False
0198 process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0199 process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0200 
0201 
0202 process.preValidation = cms.Sequence(process.recoTauCommonSequence)
0203 
0204 process.validation = cms.Sequence(
0205    process.ak5PFJetsLegacyHPSPiZeros *
0206    process.combinatoricRecoTaus *
0207    process.produceAndDiscriminateHPSPFTaus*
0208    process.produceDenominator *
0209    process.runTauValidationBatchMode #in batch mode, the efficiencies are not computed - only the num/denom
0210   )
0211 
0212 import PhysicsTools.PatAlgos.tools.helpers as configtools
0213 
0214 process.vtxStudy = cms.Sequence()
0215 
0216 #---------------------------------------------------------------------------------
0217 #               Cloning process to scan over several DzCuts
0218 #---------------------------------------------------------------------------------
0219 dzCuts = [0.05 ,0.10 , 0.15 , 0.2]
0220 for dzCut in dzCuts:
0221    # Make a loose-DZ copy
0222    #print 'creating '+addedLabel
0223    process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0224    process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0225    process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0226    process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0227    process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0228    process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0229    process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0230    process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0231 
0232    addedLabel = 'DZCut%i'%(int(dzCut*100))
0233    configtools.cloneProcessingSnippet( process, process.validation, addedLabel)
0234    #checking we did everything correctly
0235    assert( hasattr(process,'validation%s'%(addedLabel) ) )
0236    assert( getattr(process,'hpsPFTauDiscriminationByVLooseIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0237    assert( getattr(process,'hpsPFTauDiscriminationByMediumIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0238    assert( getattr(process,'hpsPFTauDiscriminationByLooseIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0239    assert( getattr(process,'hpsPFTauDiscriminationByTightIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0240    process.vtxStudy += getattr(process,'validation%s'%(addedLabel) )
0241    assert( hasattr(process, 'RunHPSValidation%s'%(addedLabel))  )
0242    for entry in getattr(process, 'RunHPSValidation%s'%(addedLabel)).discriminators:
0243       entry.discriminator = entry.discriminator.value() + addedLabel
0244       #print addedLabel+' created'
0245 
0246 
0247 
0248 #process.validation *= process.saveTauEff #save the output
0249 
0250 process.preValPath = cms.Path(process.preValidation)
0251 process.vtxPath = cms.Path(process.vtxStudy)
0252 process.savePath = cms.Path(process.saveTauEff)
0253 
0254 process.schedule = cms.Schedule()
0255 process.schedule.append(process.preValPath)
0256 process.schedule.append(process.vtxPath)
0257 process.schedule.append(process.savePath)
0258 
0259 process.load("RecoTauTag.Configuration.RecoTauTag_EventContent_cff")
0260 
0261 TauTagValOutputCommands = cms.PSet(
0262       outputCommands = cms.untracked.vstring('drop *',
0263          'keep recoPFCandidates_*_*_*',
0264          'keep *_genParticles*_*_*',
0265          'keep *_iterativeCone5GenJets_*_*',
0266          'keep *_tauGenJets*_*_*',
0267          'keep *_selectedGenTauDecays*_*_*'
0268          )
0269       )
0270 
0271 TauTagValOutputCommands.outputCommands.extend(process.RecoTauTagRECO.outputCommands)
0272 
0273 ######################################
0274 #                                    #
0275 #       CFG dump                     #
0276 #                                    #
0277 ######################################
0278 #process.Timing = cms.Service("Timing",
0279 #         useJobReport = cms.untracked.bool(True)
0280 #    )
0281 #process.SimpleMemoryCheck = cms.Service("SimpleMemoryCheck",
0282 #         useJobReport = cms.untracked.bool(True)
0283 #    )
0284 
0285 processDumpFile = open('VtxTest.py','w')
0286 print(process.dumpPython(), file=processDumpFile)
0287 #if grid job end here
0288 ## if not options.gridJob:
0289 
0290 ##    dumpFileName = "cfgDump"
0291 ##    if options.batchNumber >= 0:
0292 ##       dumpFileName += "_"
0293 ##       dumpFileName += str(options.batchNumber)
0294       
0295 ##    dumpFileName += ".py"
0296    
0297 ##    processDumpFile = open('%s/%s' % (configDir, dumpFileName), 'w')
0298    
0299 ##    print >> processDumpFile, process.dumpPython()
0300