Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-11-25 02:29:58

0001 #!/usr/bin/env python3
0002 from builtins import range
0003 VERSION='1.00'
0004 import os, sys, time
0005 import argparse
0006 from RecoLuminosity.LumiDB import pileupParser
0007 from RecoLuminosity.LumiDB import selectionParser
0008 import numpy as np
0009 from scipy.special import loggamma
0010 
0011 def parseInputFile(inputfilename):
0012     '''
0013     output ({run:[ls:[inlumi, meanint]]})
0014     '''
0015     selectf=open(inputfilename,'r')
0016     inputfilecontent=selectf.read()
0017     p=pileupParser.pileupParser(inputfilecontent)                            
0018     
0019 #    p=inputFilesetParser.inputFilesetParser(inputfilename)
0020     runlsbyfile=p.runsandls()
0021     return runlsbyfile
0022 
0023 def MyErf(xInput):
0024     # Abramowitz and Stegun approximations for Erf (equations 7.1.25-28)
0025     X = np.abs(xInput)
0026 
0027     p = 0.47047
0028     b1 = 0.3480242
0029     b2 = -0.0958798
0030     b3 = 0.7478556
0031 
0032     T = 1.0/(1.0+p*X)
0033     cErf = 1.0 - (b1*T + b2*T*T + b3*T*T*T)*np.exp(-1.0*X*X)
0034 
0035     # Alternate Erf approximation:
0036     #A1 = 0.278393
0037     #A2 = 0.230389
0038     #A3 = 0.000972
0039     #A4 = 0.078108
0040 
0041     #term = 1.0+ A1*X+ A2*X*X+ A3*X*X*X+ A4*X*X*X*X
0042     #denom = term*term*term*term
0043 
0044     #dErf = 1.0 - 1.0/denom
0045 
0046     return np.where(xInput < 0, -cErf, cErf)
0047 
0048 def poisson(x, par):
0049     ## equivalent to TMath::Poisson (without x<0 and par<0 checks)
0050     return np.where(x == 0., np.exp(-par),
0051            np.exp(x*np.log(par)-loggamma(x+1)-par))
0052 
0053 class EquidistantBinning(object):
0054     def __init__(self, num, xMin, xMax):
0055         self.num = num
0056         self.xMin = xMin
0057         self.xMax = xMax
0058         self.edges = np.linspace(xMin, xMax, num=num+1)
0059         self.centers = .5*(self.edges[:-1] + self.edges[1:])
0060     @property
0061     def width(self):
0062         return (self.xMax-self.xMin)/self.num
0063     def find(self, x):
0064         return np.floor((x-self.xMin)*self.num/(self.xMax-self.xMin)).astype(int)
0065 
0066 Sqrt2 = np.sqrt(2)
0067 
0068 def fillPileupHistogram(lumiInfo, calcOption, binning, hContents, minbXsec, run, ls):
0069     '''
0070     lumiinfo:[intlumi per LS, mean interactions ]
0071 
0072     intlumi is the deadtime corrected average integrated lumi per lumisection
0073     '''
0074 
0075     LSintLumi = lumiInfo[0]
0076     RMSInt = lumiInfo[1]*minbXsec
0077     AveNumInt = lumiInfo[2]*minbXsec
0078 
0079     # First, re-constitute lumi distribution for this LS from RMS:
0080     if RMSInt > 0:
0081         areaAbove = MyErf((AveNumInt-binning.edges)/Sqrt2/RMSInt)
0082         ## area above edge, so areaAbove[i]-areaAbove[i+1] = area in bin
0083         ProbFromRMS = .5*(areaAbove[:-1]-areaAbove[1:])
0084     else:
0085         ProbFromRMS = np.zeros(hContents.shape)
0086         obs = binning.find(AveNumInt)
0087         if ( obs < binning.num ) and ( AveNumInt >= 1.0E-5 ): # just ignore zero values
0088             ProbFromRMS[obs] = 1.0
0089         
0090     if calcOption == 'true':  # Just put distribution into histogram
0091         if RMSInt > 0:
0092             hContents += ProbFromRMS*LSintLumi
0093             totalProb = np.sum(ProbFromRMS)
0094 
0095             if 1.0-totalProb > 0.01:
0096                 print("Run %d, LS %d: Significant probability density outside of your histogram (mean %.2f," % (run, ls, AveNumInt))
0097                 print("rms %.2f, integrated probability %.3f). Consider using a higher value of --maxPileupBin." % (RMSInt, totalProb))
0098         else:
0099             hContents[obs] += LSintLumi ## obs = FindBin(AveNumInt), -1 because hContents has no overflows
0100     else: # have to convolute with a poisson distribution to get observed Nint
0101         if not hasattr(binning, "poissConv"): ## only depends on binning, cache
0102             ## poissConv[i,j] = TMath.Poisson(e[i], c[j])
0103             binning.poissConv = poisson(
0104                 binning.edges[:-1,np.newaxis], ## e'[i,] = e[i]
0105                 binning.centers[np.newaxis,:]) ## c'[,j] = c[j]
0106         # prob[i] = sum_j ProbFromRMS[j]*TMath.Poisson(e[i], c[j])
0107         prob = np.sum(binning.poissConv * ProbFromRMS[np.newaxis,:], axis=1)
0108         hContents += prob*LSintLumi
0109         #if ( not np.all(ProbFromRMS == 0) ) and 1.0-np.sum(prob) > 0.01:
0110         #    print("Run %d, LS %d: significant probability density outside of your histogram, %f" % (run, ls, np.sum(prob)))
0111         #    print("Consider using a higher value of --maxPileupBin")
0112 
0113 ##############################
0114 ## ######################## ##
0115 ## ## ################## ## ##
0116 ## ## ## Main Program ## ## ##
0117 ## ## ################## ## ##
0118 ## ######################## ##
0119 ##############################
0120 
0121 if __name__ == '__main__':
0122 
0123     parser = argparse.ArgumentParser(description = "Script to estimate pileup distribution using bunch instantaneous luminosity information and minimum bias cross section. Outputs a TH1D of the pileup distribution stored in a ROOT file.")
0124 
0125     # required
0126     req_group = parser.add_argument_group('required arguments')
0127     req_group.add_argument('outputfile', action='store', help='output ROOT file')
0128     req_group.add_argument('-i', '--input', dest='inputfile', action='store', required=True,
0129                            help='input Run/LS file for your analysis in JSON format')
0130     req_group.add_argument('-j', '--inputLumiJSON', dest='inputLumiJSON', action='store', required=True,
0131                            help='input pileup file in JSON format')
0132     req_group.add_argument('-c', '--calcMode' ,dest='calcMode', action='store',
0133                            help='calculate either "true" or "observed" distributions',
0134                            choices=['true', 'observed'], required=True)
0135 
0136     # optional
0137     parser.add_argument('-x', '--minBiasXsec', dest='minBiasXsec', action='store',
0138                            type=float, default=69200.0,
0139                            help='minimum bias cross section to use (in microbarn) (default: %(default).0f)')
0140     parser.add_argument('-m', '--maxPileupBin', dest='maxPileupBin', action='store',
0141                            type=int, default=100, help='maximum value of pileup histogram (default: %(default)d)')
0142     parser.add_argument('-n', '--numPileupBins', dest='numPileupBins', action='store',
0143                            type=int, default=1000, help='number of bins in pileup histogram (default: %(default)d)')
0144     parser.add_argument('--pileupHistName', dest='pileupHistName', action='store',
0145                            default='pileup', help='name of pileup histogram (default: %(default)s)')
0146     parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
0147                            help='verbose mode for printing' )
0148 
0149     options = parser.parse_args()
0150     output = options.outputfile
0151     
0152     if options.verbose:
0153         print('General configuration')
0154         print('\toutputfile:' ,options.outputfile)
0155         print('\tAction:' ,options.calcMode, 'luminosity distribution will be calculated')
0156         print('\tinput selection file:', options.inputfile)
0157         print('\tinput lumi JSON:', options.inputLumiJSON)
0158         print('\tMinBiasXsec:', options.minBiasXsec)
0159         print('\tmaxPileupBin:', options.maxPileupBin)
0160         print('\tnumPileupBins:', options.numPileupBins)
0161 
0162     binning = EquidistantBinning(options.numPileupBins, 0., options.maxPileupBin)
0163     hContents = np.zeros(binning.centers.shape)
0164 
0165     inpf = open(options.inputfile, 'r')
0166     inputfilecontent = inpf.read()
0167     inputRange = selectionParser.selectionParser (inputfilecontent).runsandls()
0168 
0169     #inputRange=inputFilesetParser.inputFilesetParser(options.inputfile)
0170 
0171     inputPileupRange=parseInputFile(options.inputLumiJSON)
0172 
0173     # now, we have to find the information for the input runs and lumi sections
0174     # in the Lumi/Pileup list. First, loop over inputs
0175 
0176     for (run, lslist) in sorted (inputRange.items()):
0177         # now, look for matching run, then match lumi sections
0178         # print "searching for run %d" % (run)
0179         if run in inputPileupRange.keys():
0180             #print run
0181             LSPUlist = inputPileupRange[run]
0182             # print "LSPUlist", LSPUlist
0183             for LSnumber in lslist:
0184                 if LSnumber in LSPUlist.keys():
0185                     #print "found LS %d" % (LSnumber)
0186                     lumiInfo = LSPUlist[LSnumber]
0187                     # print lumiInfo
0188                     fillPileupHistogram(lumiInfo, options.calcMode,
0189                                         binning, hContents,
0190                                         options.minBiasXsec, run, LSnumber)
0191                 else: # trouble
0192                     print("Run %d, LumiSection %d not found in Lumi/Pileup input file. Check your files!" \
0193                             % (run,LSnumber))
0194 
0195         else:  # trouble
0196             print("Run %d not found in Lumi/Pileup input file.  Check your files!" % (run))
0197 
0198         # print run
0199         # print lslist
0200 
0201     ## convert hContents to TH1F
0202     import ROOT
0203     pileupHist = ROOT.TH1D(options.pileupHistName, options.pileupHistName,
0204             options.numPileupBins, 0., options.maxPileupBin)
0205     for i,ct in enumerate(hContents):
0206         pileupHist.SetBinContent(i+1, ct)
0207 
0208     histFile = ROOT.TFile.Open(output, 'recreate')
0209     if not histFile:
0210         raise RuntimeError("Could not open '%s' as an output root file" % output)
0211     pileupHist.Write()
0212     histFile.Close()
0213     print("Wrote output histogram to", output)