Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-04-06 12:26:34

0001 #!/usr/bin/env python3
0002 from __future__ import print_function
0003 from builtins import range
0004 VERSION='1.00'
0005 import os, sys, time
0006 import argparse
0007 from RecoLuminosity.LumiDB import pileupParser
0008 from RecoLuminosity.LumiDB import selectionParser
0009 import numpy as np
0010 from scipy.special import loggamma
0011 
0012 def parseInputFile(inputfilename):
0013     '''
0014     output ({run:[ls:[inlumi, meanint]]})
0015     '''
0016     selectf=open(inputfilename,'r')
0017     inputfilecontent=selectf.read()
0018     p=pileupParser.pileupParser(inputfilecontent)                            
0019     
0020 #    p=inputFilesetParser.inputFilesetParser(inputfilename)
0021     runlsbyfile=p.runsandls()
0022     return runlsbyfile
0023 
0024 def MyErf(xInput):
0025     # Abramowitz and Stegun approximations for Erf (equations 7.1.25-28)
0026     X = np.abs(xInput)
0027 
0028     p = 0.47047
0029     b1 = 0.3480242
0030     b2 = -0.0958798
0031     b3 = 0.7478556
0032 
0033     T = 1.0/(1.0+p*X)
0034     cErf = 1.0 - (b1*T + b2*T*T + b3*T*T*T)*np.exp(-1.0*X*X)
0035 
0036     # Alternate Erf approximation:
0037     #A1 = 0.278393
0038     #A2 = 0.230389
0039     #A3 = 0.000972
0040     #A4 = 0.078108
0041 
0042     #term = 1.0+ A1*X+ A2*X*X+ A3*X*X*X+ A4*X*X*X*X
0043     #denom = term*term*term*term
0044 
0045     #dErf = 1.0 - 1.0/denom
0046 
0047     return np.where(xInput < 0, -cErf, cErf)
0048 
0049 def poisson(x, par):
0050     ## equivalent to TMath::Poisson (without x<0 and par<0 checks)
0051     return np.where(x == 0., np.exp(-par),
0052            np.exp(x*np.log(par)-loggamma(x+1)-par))
0053 
0054 class EquidistantBinning(object):
0055     def __init__(self, num, xMin, xMax):
0056         self.num = num
0057         self.xMin = xMin
0058         self.xMax = xMax
0059         self.edges = np.linspace(xMin, xMax, num=num+1)
0060         self.centers = .5*(self.edges[:-1] + self.edges[1:])
0061     @property
0062     def width(self):
0063         return (self.xMax-self.xMin)/self.num
0064     def find(self, x):
0065         return np.floor((x-self.xMin)*self.num/(self.xMax-self.xMin)).astype(np.int)
0066 
0067 Sqrt2 = np.sqrt(2)
0068 
0069 def fillPileupHistogram(lumiInfo, calcOption, binning, hContents, minbXsec, run, ls):
0070     '''
0071     lumiinfo:[intlumi per LS, mean interactions ]
0072 
0073     intlumi is the deadtime corrected average integrated lumi per lumisection
0074     '''
0075 
0076     LSintLumi = lumiInfo[0]
0077     RMSInt = lumiInfo[1]*minbXsec
0078     AveNumInt = lumiInfo[2]*minbXsec
0079 
0080     # First, re-constitute lumi distribution for this LS from RMS:
0081     if RMSInt > 0:
0082         areaAbove = MyErf((AveNumInt-binning.edges)/Sqrt2/RMSInt)
0083         ## area above edge, so areaAbove[i]-areaAbove[i+1] = area in bin
0084         ProbFromRMS = .5*(areaAbove[:-1]-areaAbove[1:])
0085     else:
0086         ProbFromRMS = np.zeros(hContents.shape)
0087         obs = binning.find(AveNumInt)
0088         if ( obs < binning.num ) and ( AveNumInt >= 1.0E-5 ): # just ignore zero values
0089             ProbFromRMS[obs] = 1.0
0090         
0091     if calcOption == 'true':  # Just put distribution into histogram
0092         if RMSInt > 0:
0093             hContents += ProbFromRMS*LSintLumi
0094             totalProb = np.sum(ProbFromRMS)
0095 
0096             if 1.0-totalProb > 0.01:
0097                 print("Run %d, LS %d: Significant probability density outside of your histogram (mean %.2f," % (run, ls, AveNumInt))
0098                 print("rms %.2f, integrated probability %.3f). Consider using a higher value of --maxPileupBin." % (RMSInt, totalProb))
0099         else:
0100             hContents[obs] += LSintLumi ## obs = FindBin(AveNumInt), -1 because hContents has no overflows
0101     else: # have to convolute with a poisson distribution to get observed Nint
0102         if not hasattr(binning, "poissConv"): ## only depends on binning, cache
0103             ## poissConv[i,j] = TMath.Poisson(e[i], c[j])
0104             binning.poissConv = poisson(
0105                 binning.edges[:-1,np.newaxis], ## e'[i,] = e[i]
0106                 binning.centers[np.newaxis,:]) ## c'[,j] = c[j]
0107         # prob[i] = sum_j ProbFromRMS[j]*TMath.Poisson(e[i], c[j])
0108         prob = np.sum(binning.poissConv * ProbFromRMS[np.newaxis,:], axis=1)
0109         hContents += prob*LSintLumi
0110         #if ( not np.all(ProbFromRMS == 0) ) and 1.0-np.sum(prob) > 0.01:
0111         #    print("Run %d, LS %d: significant probability density outside of your histogram, %f" % (run, ls, np.sum(prob)))
0112         #    print("Consider using a higher value of --maxPileupBin")
0113 
0114 ##############################
0115 ## ######################## ##
0116 ## ## ################## ## ##
0117 ## ## ## Main Program ## ## ##
0118 ## ## ################## ## ##
0119 ## ######################## ##
0120 ##############################
0121 
0122 if __name__ == '__main__':
0123 
0124     parser = argparse.ArgumentParser(description = "Script to estimate pileup distribution using bunch instantaneous luminosity information and minimum bias cross section. Outputs a TH1D of the pileup distribution stored in a ROOT file.")
0125 
0126     # required
0127     req_group = parser.add_argument_group('required arguments')
0128     req_group.add_argument('outputfile', action='store', help='output ROOT file')
0129     req_group.add_argument('-i', '--input', dest='inputfile', action='store', required=True,
0130                            help='input Run/LS file for your analysis in JSON format')
0131     req_group.add_argument('-j', '--inputLumiJSON', dest='inputLumiJSON', action='store', required=True,
0132                            help='input pileup file in JSON format')
0133     req_group.add_argument('-c', '--calcMode' ,dest='calcMode', action='store',
0134                            help='calculate either "true" or "observed" distributions',
0135                            choices=['true', 'observed'], required=True)
0136 
0137     # optional
0138     parser.add_argument('-x', '--minBiasXsec', dest='minBiasXsec', action='store',
0139                            type=float, default=69200.0,
0140                            help='minimum bias cross section to use (in microbarn) (default: %(default).0f)')
0141     parser.add_argument('-m', '--maxPileupBin', dest='maxPileupBin', action='store',
0142                            type=int, default=100, help='maximum value of pileup histogram (default: %(default)d)')
0143     parser.add_argument('-n', '--numPileupBins', dest='numPileupBins', action='store',
0144                            type=int, default=1000, help='number of bins in pileup histogram (default: %(default)d)')
0145     parser.add_argument('--pileupHistName', dest='pileupHistName', action='store',
0146                            default='pileup', help='name of pileup histogram (default: %(default)s)')
0147     parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
0148                            help='verbose mode for printing' )
0149 
0150     options = parser.parse_args()
0151     output = options.outputfile
0152     
0153     if options.verbose:
0154         print('General configuration')
0155         print('\toutputfile:' ,options.outputfile)
0156         print('\tAction:' ,options.calcMode, 'luminosity distribution will be calculated')
0157         print('\tinput selection file:', options.inputfile)
0158         print('\tinput lumi JSON:', options.inputLumiJSON)
0159         print('\tMinBiasXsec:', options.minBiasXsec)
0160         print('\tmaxPileupBin:', options.maxPileupBin)
0161         print('\tnumPileupBins:', options.numPileupBins)
0162 
0163     binning = EquidistantBinning(options.numPileupBins, 0., options.maxPileupBin)
0164     hContents = np.zeros(binning.centers.shape)
0165 
0166     inpf = open(options.inputfile, 'r')
0167     inputfilecontent = inpf.read()
0168     inputRange = selectionParser.selectionParser (inputfilecontent).runsandls()
0169 
0170     #inputRange=inputFilesetParser.inputFilesetParser(options.inputfile)
0171 
0172     inputPileupRange=parseInputFile(options.inputLumiJSON)
0173 
0174     # now, we have to find the information for the input runs and lumi sections
0175     # in the Lumi/Pileup list. First, loop over inputs
0176 
0177     for (run, lslist) in sorted (inputRange.items()):
0178         # now, look for matching run, then match lumi sections
0179         # print "searching for run %d" % (run)
0180         if run in inputPileupRange.keys():
0181             #print run
0182             LSPUlist = inputPileupRange[run]
0183             # print "LSPUlist", LSPUlist
0184             for LSnumber in lslist:
0185                 if LSnumber in LSPUlist.keys():
0186                     #print "found LS %d" % (LSnumber)
0187                     lumiInfo = LSPUlist[LSnumber]
0188                     # print lumiInfo
0189                     fillPileupHistogram(lumiInfo, options.calcMode,
0190                                         binning, hContents,
0191                                         options.minBiasXsec, run, LSnumber)
0192                 else: # trouble
0193                     print("Run %d, LumiSection %d not found in Lumi/Pileup input file. Check your files!" \
0194                             % (run,LSnumber))
0195 
0196         else:  # trouble
0197             print("Run %d not found in Lumi/Pileup input file.  Check your files!" % (run))
0198 
0199         # print run
0200         # print lslist
0201 
0202     ## convert hContents to TH1F
0203     import ROOT
0204     pileupHist = ROOT.TH1D(options.pileupHistName, options.pileupHistName,
0205             options.numPileupBins, 0., options.maxPileupBin)
0206     for i,ct in enumerate(hContents):
0207         pileupHist.SetBinContent(i+1, ct)
0208 
0209     histFile = ROOT.TFile.Open(output, 'recreate')
0210     if not histFile:
0211         raise RuntimeError("Could not open '%s' as an output root file" % output)
0212     pileupHist.Write()
0213     histFile.Close()
0214     print("Wrote output histogram to", output)