Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-11-26 02:34:38

0001 #!/usr/bin/env cmsRun
0002 
0003 import shutil
0004 import sys
0005 
0006 from Validation.RecoTau.ValidationOptions_cff import *
0007 
0008 process = cms.Process("TEST")
0009 
0010 # command options defined in Validation/RecoTau/python/ValidationOptions_cfi
0011 options.parseArguments()
0012 
0013 checkOptionsForBadInput()
0014 
0015 ## if not calledBycmsRun() and not options.gridJob:
0016 ##    print "Run 'cmsRun RunTauValidation_cfg.py help' for options."
0017 ##    # quit here so we dont' create a bunch of directories
0018 ##    #  if the user only wants the help
0019 ##    sys.exit()
0020 
0021 # Make sure we dont' clobber another directory! Skip in batch mode (runs from an LSF machine)
0022 if not CMSSWEnvironmentIsCurrent() and options.batchNumber == -1 and not options.gridJob:
0023    print("CMSSW_BASE points to a different directory, please rerun cmsenv!")
0024    sys.exit()
0025 
0026 
0027 # DQM store, PDT sources etc
0028 process.load("Configuration.StandardSequences.Services_cff")
0029 
0030 ######################################
0031 #                                    #
0032 #       Output Info Store            #
0033 #                                    #
0034 ######################################
0035 
0036 """
0037    Data is stored in
0038 
0039    TauID/[EventType]_[DataSource]_[Conditions][label]
0040 
0041 """
0042 
0043 #outputDirName = "Validation_%s" % ReleaseVersion
0044 outputDirName = "TauID"
0045 
0046 
0047 outputDir = os.path.join(os.getcwd(), outputDirName) 
0048 # This is the directory where we store the stuff about our current configuration
0049 outputBaseDir = outputDir
0050 
0051 subDirName = ""
0052 
0053 subDirName += "%s_%s" % (options.eventType, options.dataSource)
0054 
0055 if options.conditions != "whatever":
0056    subDirName += "_%s" % options.conditions.replace('::', '_')
0057 
0058 if (options.label != "none"):
0059    subDirName += "_" + options.label
0060 
0061 outputDir = os.path.join(outputDir, subDirName)
0062 
0063 # Store configuration, showtags, etc in a sub directory
0064 configDir = os.path.join(outputDir, "Config")
0065 
0066 if os.path.exists(outputDir) and options.batchNumber < 0:# and not options.gridJob:
0067    print("Output directory %s already exists!  OK to overwrite?" % outputDir)
0068    while True:
0069       input = raw_input("Please enter [y/n] ")
0070       if (input == 'y'):
0071          break
0072       elif (input == 'n'):
0073          print(" ...exiting.")
0074          sys.exit()
0075 
0076 if not os.path.exists(outputDir):
0077    os.makedirs(outputDir)
0078 
0079 if not os.path.exists(configDir):
0080    os.makedirs(configDir)
0081 
0082 ######################################
0083 #                                    #
0084 #       Data Source Setup            #
0085 #                                    #
0086 ######################################
0087 
0088 def LoadDataCffFile(theFile):
0089    if not os.path.isfile(theFile):
0090       print("Error - %s is not a file!" % theFile)
0091       sys.exit()
0092    outputFile = os.path.join(configDir, "DataSource_cff.py")
0093    shutil.copy(theFile, outputFile)
0094    process.load(theFile.replace(".py", ""))
0095 
0096 myFile = options.sourceFile
0097 if myFile == 'none':
0098    myFile = "EventSource_%s_RECO_cff.py" % options.eventType
0099    #myFile = os.path.join(ReleaseBase, "Validation/RecoTau/test", "EventSource_%s_RECO_cff.py" % options.eventType)
0100 LoadDataCffFile(myFile)
0101 #Reruns PFTau
0102 process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
0103 process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
0104 process.load("Configuration.StandardSequences.MagneticField_cff")
0105 process.load("RecoTauTag.Configuration.RecoPFTauTag_cff")
0106 process.hpsSequence = cms.Sequence( process.recoTauCommonSequence*process.recoTauClassicHPSSequence )
0107 
0108 process.GlobalTag.globaltag = options.conditions
0109 
0110 # have to set max events here, since it may get written by the 
0111 # dataSource cffs
0112 process.maxEvents = cms.untracked.PSet(
0113     input = cms.untracked.int32(options.maxEvents)
0114 )
0115 
0116 # Skip events, if we are running in batch mode on files
0117 if options.batchNumber >= 0 and options.dataSource.find('Files') != -1:
0118    process.source.skipEvents = cms.untracked.uint32(options.batchNumber*options.maxEvents)
0119 
0120 ######################################
0121 #                                    #
0122 #       Validation Setup             #
0123 #                                    #
0124 ######################################
0125 
0126 # Store the tags and CVS diff to the tags, and the current release
0127 #  only do this once in a batch job.  The additional tar file is a fail safe - 
0128 #  the parameters shouldn't change in outputDir.
0129 if (options.batchNumber <= 0 ):#and not options.gridJob):
0130    os.system("cd $CMSSW_BASE/src; \
0131               showtags -t -r > showtags.txt; \
0132               cvs -q diff >& diffToTags.patch;\
0133               cvs -q diff -r %s >& diffToVanillaRelease.patch; \
0134               tar -cvzf TagsAndDiff.tar.gz showtags.txt *.patch; \
0135               mv showtags.txt *.patch %s; \
0136               mv TagsAndDiff.tar.gz %s" % (ReleaseVersion, configDir, configDir))
0137 
0138 if options.batchNumber >= 0:
0139    # store the batch produced root files in a sub directory
0140    outputDir = os.path.join(outputDir, "BatchJobs")
0141    if not os.path.exists(outputDir):
0142       os.mkdir(outputDir)
0143 
0144 #Validation output file
0145 outputFileNameBase = "TauVal_%s" % ReleaseVersion
0146 if options.label != "none":
0147    outputFileNameBase += "_%s" % options.label
0148 outputFileNameBase += "_"
0149 outputFileNameBase += options.eventType
0150 
0151 if options.batchNumber >= 0:
0152    outputFileNameBase += "_%i" % options.batchNumber
0153    options.writeEDMFile = options.writeEDMFile.replace(".root", "_%i.root" % options.batchNumber)
0154 outputFileNameBase += "_DBScan.root"
0155 
0156 if options.gridJob:
0157    outputFileName = 'TauVal_GridJob.root'
0158 else:
0159    outputFileName = os.path.join(outputDir, outputFileNameBase)
0160 
0161 print('The output file will be: '+outputFileName)
0162 if options.gridJob:
0163    cfg=open('./crab.cfg', 'r')
0164    cfgContent=cfg.read()
0165    if cfgContent.find(outputFileName) == -1:
0166       print("ERROR: CRAB output file not matching the grid one!\nexiting...")
0167       sys.exit()
0168 
0169 process.saveTauEff = cms.EDAnalyzer("TauDQMSimpleFileSaver",
0170   outputFileName = cms.string(outputFileName)
0171 )
0172 
0173 process.load("Validation.RecoTau.ValidateTausOn%s_cff" % options.eventType)
0174 
0175 #Sets the cuts to what defined in 2/5/11 meeting
0176 process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0177 process.hpsPFTauDiscriminationByVLooseIsolation.ApplyDiscriminationByECALIsolation = False
0178 process.hpsPFTauDiscriminationByVLooseIsolation.applyDeltaBetaCorrection = False
0179 process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0180 process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0181 
0182 process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0183 process.hpsPFTauDiscriminationByMediumIsolation.ApplyDiscriminationByECALIsolation = False
0184 process.hpsPFTauDiscriminationByMediumIsolation.applyDeltaBetaCorrection = False
0185 process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0186 process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0187 
0188 process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0189 process.hpsPFTauDiscriminationByLooseIsolation.ApplyDiscriminationByECALIsolation = False
0190 process.hpsPFTauDiscriminationByLooseIsolation.applyDeltaBetaCorrection = False
0191 process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0192 process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0193 
0194 process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.pvFindingAlgo = 'highestWeightForLeadTrack'
0195 process.hpsPFTauDiscriminationByTightIsolation.ApplyDiscriminationByECALIsolation = False
0196 process.hpsPFTauDiscriminationByTightIsolation.applyDeltaBetaCorrection = False
0197 process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.signalQualityCuts.minTrackVertexWeight = -1
0198 process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.minTrackVertexWeight = -1
0199 
0200 
0201 process.preValidation = cms.Sequence(process.recoTauCommonSequence)
0202 
0203 process.validation = cms.Sequence(
0204    process.ak5PFJetsLegacyHPSPiZeros *
0205    process.combinatoricRecoTaus *
0206    process.produceAndDiscriminateHPSPFTaus*
0207    process.produceDenominator *
0208    process.runTauValidationBatchMode #in batch mode, the efficiencies are not computed - only the num/denom
0209   )
0210 
0211 import PhysicsTools.PatAlgos.tools.helpers as configtools
0212 
0213 process.vtxStudy = cms.Sequence()
0214 
0215 #---------------------------------------------------------------------------------
0216 #               Cloning process to scan over several DzCuts
0217 #---------------------------------------------------------------------------------
0218 dzCuts = [0.05 ,0.10 , 0.15 , 0.2]
0219 for dzCut in dzCuts:
0220    # Make a loose-DZ copy
0221    #print 'creating '+addedLabel
0222    process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0223    process.hpsPFTauDiscriminationByVLooseIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0224    process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0225    process.hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0226    process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0227    process.hpsPFTauDiscriminationByLooseIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0228    process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.signalQualityCuts.maxDeltaZ = dzCut
0229    process.hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.maxDeltaZ = dzCut
0230 
0231    addedLabel = 'DZCut%i'%(int(dzCut*100))
0232    configtools.cloneProcessingSnippet( process, process.validation, addedLabel)
0233    #checking we did everything correctly
0234    assert( hasattr(process,'validation%s'%(addedLabel) ) )
0235    assert( getattr(process,'hpsPFTauDiscriminationByVLooseIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0236    assert( getattr(process,'hpsPFTauDiscriminationByMediumIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0237    assert( getattr(process,'hpsPFTauDiscriminationByLooseIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0238    assert( getattr(process,'hpsPFTauDiscriminationByTightIsolation%s'%(addedLabel) ).qualityCuts.signalQualityCuts.maxDeltaZ  == dzCut )
0239    process.vtxStudy += getattr(process,'validation%s'%(addedLabel) )
0240    assert( hasattr(process, 'RunHPSValidation%s'%(addedLabel))  )
0241    for entry in getattr(process, 'RunHPSValidation%s'%(addedLabel)).discriminators:
0242       entry.discriminator = entry.discriminator.value() + addedLabel
0243       #print addedLabel+' created'
0244 
0245 
0246 
0247 #process.validation *= process.saveTauEff #save the output
0248 
0249 process.preValPath = cms.Path(process.preValidation)
0250 process.vtxPath = cms.Path(process.vtxStudy)
0251 process.savePath = cms.Path(process.saveTauEff)
0252 
0253 process.schedule = cms.Schedule()
0254 process.schedule.append(process.preValPath)
0255 process.schedule.append(process.vtxPath)
0256 process.schedule.append(process.savePath)
0257 
0258 process.load("RecoTauTag.Configuration.RecoTauTag_EventContent_cff")
0259 
0260 TauTagValOutputCommands = cms.PSet(
0261       outputCommands = cms.untracked.vstring('drop *',
0262          'keep recoPFCandidates_*_*_*',
0263          'keep *_genParticles*_*_*',
0264          'keep *_iterativeCone5GenJets_*_*',
0265          'keep *_tauGenJets*_*_*',
0266          'keep *_selectedGenTauDecays*_*_*'
0267          )
0268       )
0269 
0270 TauTagValOutputCommands.outputCommands.extend(process.RecoTauTagRECO.outputCommands)
0271 
0272 ######################################
0273 #                                    #
0274 #       CFG dump                     #
0275 #                                    #
0276 ######################################
0277 #process.Timing = cms.Service("Timing",
0278 #         useJobReport = cms.untracked.bool(True)
0279 #    )
0280 #process.SimpleMemoryCheck = cms.Service("SimpleMemoryCheck",
0281 #         useJobReport = cms.untracked.bool(True)
0282 #    )
0283 
0284 processDumpFile = open('VtxTest.py','w')
0285 print(process.dumpPython(), file=processDumpFile)
0286 #if grid job end here
0287 ## if not options.gridJob:
0288 
0289 ##    dumpFileName = "cfgDump"
0290 ##    if options.batchNumber >= 0:
0291 ##       dumpFileName += "_"
0292 ##       dumpFileName += str(options.batchNumber)
0293       
0294 ##    dumpFileName += ".py"
0295    
0296 ##    processDumpFile = open('%s/%s' % (configDir, dumpFileName), 'w')
0297    
0298 ##    print >> processDumpFile, process.dumpPython()
0299