Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2021-10-28 04:15:54

0001 from __future__ import print_function
0002 import os
0003 class Matrix(dict):
0004     def __setitem__(self,key,value):
0005         if key in self:
0006             print("ERROR in Matrix")
0007             print("overwriting",key,"not allowed")
0008         else:
0009             self.update({float(key):WF(float(key),value)})
0010 
0011     def addOverride(self,key,override):
0012         self[key].addOverride(override)
0013             
0014 #the class to collect all possible steps
0015 class Steps(dict):
0016     def __setitem__(self,key,value):
0017         if key in self:
0018             print("ERROR in Step")
0019             print("overwriting",key,"not allowed")
0020             import sys
0021             sys.exit(-9)
0022         else:
0023             self.update({key:value})
0024             # make the python file named <step>.py
0025             #if not '--python' in value:                self[key].update({'--python':'%s.py'%(key,)})
0026 
0027     def overwrite(self,keypair):
0028         value=self[keypair[1]]
0029         self.update({keypair[0]:value})
0030         
0031 class WF(list):
0032     def __init__(self,n,l):
0033         self.extend(l)
0034         self.num=n
0035         #the actual steps of this WF
0036         self.steps=[]
0037         self.overrides={}
0038     def addOverride(self,overrides):
0039         self.overrides=overrides
0040         
0041     def interpret(self,stepsDict):
0042         for s in self:
0043             print('steps',s,stepsDict[s])
0044             steps.append(stepsDict[s])
0045     
0046 
0047 
0048 def expandLsInterval(lumis):
0049     return range(lumis[0],(lumis[1]+1))
0050 
0051 from DPGAnalysis.Skims.golden_json_2015 import * 
0052 jsonFile2015 = findFileInPath("DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
0053 jsonFile2016 = findFileInPath("DPGAnalysis/Skims/data/Cert_271036-274240_13TeV_PromptReco_Collisions16_JSON.txt")
0054 
0055 import json
0056 with open(jsonFile2015) as data_file:
0057     data_json2015 = json.load(data_file)
0058 
0059 with open(jsonFile2016) as data_file:
0060     data_json2016 = json.load(data_file)
0061 
0062 # return a portion of the 2015 golden json
0063 # LS for a full run by default; otherwise a subset of which you determined the size
0064 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
0065     # print "maxNum is %s"%(maxNum)
0066     if not isinstance(list_runs[0], int):
0067         print("ERROR: list_runs must be a list of integers")
0068         return None
0069     local_dict = {}
0070     ls_count = 0
0071 
0072     for run in list_runs:
0073         if str(run) in l_json.keys():
0074             # print "run %s is there"%(run)
0075             runNumber = run
0076             # print "Doing lumi-section selection for run %s: "%(run)
0077             for LSsegment in l_json[str(run)] :
0078                 # print LSsegment
0079                 ls_count += (LSsegment[-1] - LSsegment[0] + 1)
0080                 if (ls_count > maxNum) & (maxNum != -1):
0081                     break
0082                     # return local_dict
0083                 if runNumber in local_dict.keys():
0084                     local_dict[runNumber].append(LSsegment)
0085                 else: 
0086                     local_dict[runNumber] = [LSsegment]
0087                 # print "total LS so far  %s    -   grow %s"%(ls_count,local_dict)
0088             #local_dict[runNumber] = [1,2,3]
0089         else:
0090             print("run %s is NOT present in json %s\n\n"%(run, l_json))
0091         # print "++    %s"%(local_dict)
0092 
0093     if ( len(local_dict) > 0 ) :
0094         return local_dict
0095     else :
0096         print("No luminosity section interval passed the json and your selection; returning None")
0097         return None
0098 
0099 # print "\n\n\n THIS IS WHAT I RETURN: %s \n\n"%( selectedLS([251244,251251]) )
0100 
0101 
0102 
0103 
0104 InputInfoNDefault=2000000    
0105 class InputInfo(object):
0106     def __init__(self,dataSet,dataSetParent='',label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
0107         self.run = run
0108         self.ls = ls
0109         self.files = files
0110         self.events = events
0111         self.location = location
0112         self.label = label
0113         self.dataSet = dataSet
0114         self.split = split
0115         self.ib_blacklist = ib_blacklist
0116         self.ib_block = ib_block
0117         self.dataSetParent = dataSetParent
0118         
0119     def das(self, das_options, dataset):
0120         if len(self.run) != 0 or self.ls:
0121             queries = self.queries(dataset)[:3]
0122             if len(self.run) != 0:
0123               command = ";".join(["dasgoclient %s --query '%s'" % (das_options, query) for query in queries])
0124             else:
0125               lumis = self.lumis()
0126               commands = []
0127               while queries:
0128                 commands.append("dasgoclient %s --query 'lumi,%s' --format json | das-selected-lumis.py %s " % (das_options, queries.pop(), lumis.pop()))
0129               command = ";".join(commands)
0130             command = "({0})".format(command)
0131         else:
0132             command = "dasgoclient %s --query '%s'" % (das_options, self.queries(dataset)[0])
0133        
0134         # Run filter on DAS output 
0135         if self.ib_blacklist:
0136             command += " | grep -E -v "
0137             command += " ".join(["-e '{0}'".format(pattern) for pattern in self.ib_blacklist])
0138         from os import getenv
0139         if getenv("CMSSW_USE_IBEOS","false")=="true": return command + " | ibeos-lfn-sort"
0140         return command + " | sort -u"
0141 
0142     def lumiRanges(self):
0143         if len(self.run) != 0:
0144             return "echo '{\n"+",".join(('"%d":[[1,268435455]]\n'%(x,) for x in self.run))+"}'"
0145         if self.ls :
0146             return "echo '{\n"+",".join(('"%d" : %s\n'%( int(x),self.ls[x]) for x in self.ls.keys()))+"}'"
0147         return None
0148 
0149     def lumis(self):
0150       query_lumis = []
0151       if self.ls:
0152         for run in self.ls.keys():
0153           run_lumis = []
0154           for rng in self.ls[run]:
0155               if isinstance(rng, int):
0156                   run_lumis.append(str(rng))
0157               else:
0158                   run_lumis.append(str(rng[0])+","+str(rng[1]))
0159           query_lumis.append(":".join(run_lumis))
0160       return query_lumis
0161 
0162     def queries(self, dataset):
0163         query_by = "block" if self.ib_block else "dataset"
0164         query_source = "{0}#{1}".format(dataset, self.ib_block) if self.ib_block else dataset
0165 
0166         if self.ls :
0167             the_queries = []
0168             #for query_run in self.ls.keys():
0169             # print "run is %s"%(query_run)
0170             # if you have a LS list specified, still query das for the full run (multiple ls queries take forever)
0171             # and use step1_lumiRanges.log to run only on LS which respect your selection
0172 
0173             # DO WE WANT T2_CERN ?
0174             return ["file {0}={1} run={2}".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
0175             #return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
0176 
0177 
0178                 # 
0179                 #for a_range in self.ls[query_run]:
0180                 #    # print "a_range is %s"%(a_range)
0181                 #    the_queries +=  ["file {0}={1} run={2} lumi={3} ".format(query_by, query_source, query_run, query_ls) for query_ls in expandLsInterval(a_range) ]
0182             #print the_queries
0183             return the_queries
0184 
0185         site = " site=T2_CH_CERN"
0186         if "CMSSW_DAS_QUERY_SITES" in os.environ:
0187             if os.environ["CMSSW_DAS_QUERY_SITES"]:
0188                 site = " site=%s" % os.environ["CMSSW_DAS_QUERY_SITES"]
0189             else:
0190                 site = ""
0191         if len(self.run) != 0:
0192             return ["file {0}={1} run={2}{3}".format(query_by, query_source, query_run, site) for query_run in self.run]
0193             #return ["file {0}={1} run={2} ".format(query_by, query_source, query_run) for query_run in self.run]
0194         else:
0195             return ["file {0}={1}{2}".format(query_by, query_source, site)]
0196             #return ["file {0}={1} ".format(query_by, query_source)]
0197 
0198     def __str__(self):
0199         if self.ib_block:
0200             return "input from: {0} with run {1}#{2}".format(self.dataSet, self.ib_block, self.run)
0201         return "input from: {0} with run {1}".format(self.dataSet, self.run)
0202 
0203     
0204 # merge dictionaries, with prioty on the [0] index
0205 def merge(dictlist,TELL=False):
0206     import copy
0207     last=len(dictlist)-1
0208     if TELL: print(last,dictlist)
0209     if last==0:
0210         # ONLY ONE ITEM LEFT
0211         return copy.copy(dictlist[0])
0212     else:
0213         reducedlist=dictlist[0:max(0,last-1)]
0214         if TELL: print(reducedlist)
0215         # make a copy of the last item
0216         d=copy.copy(dictlist[last])
0217         # update with the last but one item
0218         d.update(dictlist[last-1])
0219         # and recursively do the rest
0220         reducedlist.append(d)
0221         return merge(reducedlist,TELL)
0222 
0223 def remove(d,key,TELL=False):
0224     import copy
0225     e = copy.deepcopy(d)
0226     if TELL: print("original dict, BEF: %s"%d)
0227     del e[key]
0228     if TELL: print("copy-removed dict, AFT: %s"%e)
0229     return e
0230 
0231 
0232 #### Standard release validation samples ####
0233 
0234 stCond={'--conditions':'auto:run1_mc'}
0235 def Kby(N,s):
0236     return {'--relval':'%s000,%s'%(N,s)}
0237 def Mby(N,s):
0238     return {'--relval':'%s000000,%s'%(N,s)}
0239 
0240 def changeRefRelease(steps,listOfPairs):
0241     for s in steps:
0242         if ('INPUT' in steps[s]):
0243             oldD=steps[s]['INPUT'].dataSet
0244             for (ref,newRef) in listOfPairs:
0245                 if  ref in oldD:
0246                     steps[s]['INPUT'].dataSet=oldD.replace(ref,newRef)
0247         if '--pileup_input' in steps[s]:
0248             for (ref,newRef) in listOfPairs:
0249                 if ref in steps[s]['--pileup_input']:
0250                     steps[s]['--pileup_input']=steps[s]['--pileup_input'].replace(ref,newRef)
0251         
0252 def addForAll(steps,d):
0253     for s in steps:
0254         steps[s].update(d)
0255 
0256 
0257 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
0258     import copy
0259     c=copy.copy(d)
0260     if suffix:
0261         c['-s']=c['-s'].replace('genvalid','genvalid_'+suffix)
0262     if fi:
0263         c['--filein']='lhe:%d'%(fi,)
0264     if dataSet:
0265         c['--filein']='das:%s'%(dataSet,)
0266     c['cfg']=fragment
0267     return c
0268 
0269