Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-11-27 03:18:06

0001 #!/usr/bin/env python3
0002 #____________________________________________________________
0003 #
0004 #  createPayload
0005 #
0006 # A very simple way to create condition DB payloads
0007 #
0008 # Francisco Yumiceva
0009 # yumiceva@fnal.gov
0010 #
0011 # Fermilab, 2009
0012 #
0013 #____________________________________________________________
0014 
0015 """
0016    createPayload.py
0017 
0018    A very simple script to handle payload for beam spot results
0019 
0020    usage: %prog -d <data file/directory> -t <tag name>
0021    -c, --copy   : Only copy files from input directory to test/workflow/files/
0022    -d, --data   = DATA: Data file, or directory with data files.
0023    -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase
0024    -o, --overwrite : Overwrite results files when copying.
0025    -O, --Output = OUTPUT: Output directory for data files (workflow directory)
0026    -m, --merged : Use when data file contains combined results.
0027    -n, --newarchive : Create a new archive directory when copying.
0028    -t, --tag    = TAG: Database tag name.
0029    -T, --Test   : Upload files to Test dropbox for data validation.
0030    -u, --upload : Upload files to offline drop box via scp.
0031    -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
0032    
0033    Francisco Yumiceva (yumiceva@fnal.gov)
0034    Fermilab 2010
0035    
0036 """
0037 
0038 
0039 from builtins import range
0040 import sys,os
0041 import subprocess, re, time
0042 import datetime
0043 from CommonMethods import *
0044 
0045 workflowdir             = 'test/workflow/'
0046 workflowdirLastPayloads = workflowdir + 'lastPayloads/'
0047 workflowdirTmp          = workflowdir + 'tmp/'
0048 workflowdirArchive      = workflowdir + 'archive/'
0049 optionstring            = ''
0050 tagType                 = ''
0051 
0052 def copyToWorkflowdir(path):
0053     global workflowdirArchive
0054     lsCommand      = ''
0055     cpCommand      = ''
0056     listoffiles    = []
0057     tmplistoffiles = []
0058     if path.find('castor') != -1:
0059         print("Getting files from castor ...")
0060         lsCommand = 'ns'
0061         cpCommand = 'rf'
0062     elif not os.path.exists(path):
0063         exit("ERROR: File or directory " + path + " doesn't exist") 
0064 
0065     if path[len(path)-4:len(path)] != '.txt':
0066         if path[len(path)-1] != '/':
0067             path = path + '/'
0068 
0069         aCommand  = lsCommand  + 'ls '+ path + " | grep .txt"
0070 
0071         tmpstatus = subprocess.getstatusoutput( aCommand )
0072         tmplistoffiles = tmpstatus[1].split('\n')
0073         if len(tmplistoffiles) == 1:
0074             if tmplistoffiles[0] == '':
0075                 exit('ERROR: No files found in directory ' + path)
0076             if tmplistoffiles[0].find('No such file or directory') != -1:
0077                 exit("ERROR: File or directory " + path + " doesn't exist") 
0078 
0079     else:
0080         tmplistoffiles.append(path[path.rfind('/')+1:len(path)])
0081         path = path[0:path.rfind('/')+1]
0082 
0083 
0084     archiveName = path
0085     if path == './':
0086         archiveName = os.getcwd() + '/'
0087         archiveName = archiveName[archiveName[:len(archiveName)-1].rfind('/')+1:len(archiveName)]
0088     if path[:len(path)-1].rfind('/') != -1:
0089         archiveName = path[path[:len(path)-1].rfind('/')+1:len(path)]
0090 
0091     workflowdirArchive = workflowdirArchive + archiveName
0092     if tagType != '' :
0093         workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tagType + '/'
0094     if not os.path.isdir(workflowdirArchive):
0095         os.mkdir(workflowdirArchive)
0096     elif(option.newarchive):
0097 #        tmpTime = str(datetime.datetime.now())
0098 #        tmpTime = tmpTime.replace(' ','-')
0099 #        tmpTime = tmpTime.replace('.','-')
0100 #        workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tmpTime + '/'
0101 #        os.mkdir(workflowdirArchive)
0102         for n in range(1,100000):
0103             tryDir = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + str(n) + '/'
0104             if not os.path.isdir(tryDir):
0105                 workflowdirArchive = tryDir
0106                 os.mkdir(workflowdirArchive)
0107                 break
0108             elif n == 100000-1:
0109                 exit('ERROR: Unbelievable! do you ever clean ' + workflowdir + '?. I think you have to remove some directories!') 
0110 
0111     for ifile in tmplistoffiles:
0112         if ifile.find('.txt') != -1:
0113             if os.path.isfile(workflowdirArchive+"/"+ifile):
0114                 if option.overwrite:
0115                     print("File " + ifile + " already exists in destination. We will overwrite it.")
0116                 else:
0117                     print("File " + ifile + " already exists in destination. Keep original file.")
0118                     listoffiles.append( workflowdirArchive + ifile )
0119                     continue
0120             listoffiles.append( workflowdirArchive + ifile )
0121             # copy to local disk
0122             aCommand = cpCommand + 'cp '+ path + ifile + " " + workflowdirArchive
0123             print(" >> " + aCommand)
0124             tmpstatus = subprocess.getstatusoutput( aCommand )
0125     return listoffiles
0126 
0127 def mkWorkflowdir():
0128     global workflowdir
0129     global workflowdirLastPayloads
0130     global workflowdirTmp
0131     global workflowdirArchive
0132     if not os.path.isdir(workflowdir):
0133         print("Making " + workflowdir + " directory...")
0134         os.mkdir(workflowdir)
0135 
0136     if not os.path.isdir(workflowdirLastPayloads):
0137         os.mkdir(workflowdirLastPayloads)
0138     else:
0139         os.system("rm -f "+ workflowdirLastPayloads + "*")
0140 
0141     if not os.path.isdir(workflowdirTmp):
0142         os.mkdir(workflowdirTmp)
0143     else:
0144         os.system("rm -f "+ workflowdirTmp + "*")
0145 
0146     if not os.path.isdir(workflowdirArchive):
0147         os.mkdir(workflowdirArchive)
0148 
0149 ###############################################################################################3
0150 if __name__ == '__main__':
0151     #if len(sys.argv) < 2:
0152 #   print "\n [usage] createPayload <beamspot file> <tag name> <IOV since> <IOV till=-1=inf> <IOV comment> <destDB=oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT>"
0153         #print " e.g. createPayload BeamFitResults_template.txt BeamSpotObjects_2009_v1_express 122745 \"\" \"beam spot for early collisions\" \"oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT\"\n"
0154         #sys.exit()
0155 
0156 
0157      # COMMAND LINE OPTIONS
0158     #################################
0159     option,args = parse(__doc__)
0160     if not args and not option: exit()
0161 
0162     workflowdir             = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/workflow/"
0163     if option.Output:
0164         workflowdir = option.Output
0165         if workflowdir[len(workflowdir)-1] != '/':
0166             workflowdir = workflowdir + '/'
0167     workflowdirLastPayloads = workflowdir + "lastPayloads/"
0168     workflowdirTmp          = workflowdir + "tmp/"
0169     workflowdirArchive      = workflowdir + "archive/"
0170 
0171     if ( (option.data and option.tag) or (option.data and option.copy)):
0172         mkWorkflowdir()
0173 
0174     if not option.data:
0175         print("ERROR: You must provide the data file or the a directory with data files")
0176         exit()
0177 
0178     if option.copy:
0179         copyToWorkflowdir(option.data)
0180         exit("Files copied in " + workflowdirArchive)
0181 
0182     tagname = ''
0183     if option.tag:
0184         tagname = option.tag
0185         if tagname.find("offline") != -1:
0186             tagType = "offline"
0187         elif tagname.find("prompt") != -1:
0188             tagType = "prompt"
0189         elif tagname.find("express") != -1 :
0190             tagType = "express"
0191         elif tagname.find("hlt") != -1:
0192             tagType = "hlt"
0193         else:
0194             print("I am assuming your tag is for the offline database...")
0195             tagType = "offline"
0196 
0197     else:   
0198         print("ERROR: You must provide the database tag name")
0199         exit()
0200 
0201     IOVbase = 'runbase'
0202     timetype = 'runnumber'
0203     if option.IOVbase:
0204         if option.IOVbase != "runbase" and option.IOVbase != "lumibase" and option.IOVbase != "timebase":
0205             print("\n\n unknown iov base option: "+ option.IOVbase +" \n\n\n")
0206             exit()
0207         IOVbase = option.IOVbase
0208 
0209     listoffiles = copyToWorkflowdir(option.data)
0210     # sort list of data files in chronological order
0211     sortedlist = {}
0212 
0213     for beam_file in listoffiles:
0214 
0215         if len(listoffiles)==1 and option.merged:
0216             mergedfile = open(beam_file)
0217             alllines = mergedfile.readlines()
0218             npayloads = int(len(alllines)/23)
0219             for i in range(0,npayloads):
0220                 block = alllines[i * 23: (i+1)*23]
0221                 arun  = ''
0222                 atime = ''
0223                 alumi = ''
0224                 for line in block:
0225                     if line.find('Runnumber') != -1:
0226                         arun = line.split()[1]
0227                     if line.find("EndTimeOfFit") != -1:
0228                         atime = time.strptime(line.split()[1] +  " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
0229                     if line.find("LumiRange") != -1:
0230                         alumi = line.split()[3]
0231                     if line.find('Type') != -1 and line.split()[1] != '2':
0232                         continue
0233                 sortedlist[int(pack(int(arun), int(alumi)))] = block
0234             break
0235 
0236         tmpfile = open(beam_file)
0237         atime = ''
0238         arun = ''
0239         alumis = ''
0240         skip = False
0241         for line in tmpfile:
0242             if line.find('Runnumber') != -1:
0243                 arun = line.split()[1]
0244             if line.find("EndTimeOfFit") != -1:
0245                 atime = time.strptime(line.split()[1] +  " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
0246             if line.find("LumiRange") != -1:
0247                 alumi = line.split()[3]
0248             if line.find('Type') != -1 and line.split()[1] == '0':
0249                 skip = True     
0250         if skip:
0251             print(" zero fit result, skip file " + beam_file + " with time stamp:")
0252             print(" run " + arun + " lumis " + alumis)
0253         else:
0254             sortedlist[int(pack(int(arun), int(alumi)))] = beam_file
0255 
0256         tmpfile.close()
0257 
0258     keys = sorted(sortedlist.keys())
0259 
0260     # write combined data file
0261     if not os.path.isdir(workflowdirArchive + "AllIOVs"):
0262         os.mkdir(workflowdirArchive + "AllIOVs")
0263     allbeam_file = workflowdirArchive + "AllIOVs/" + tagname + "_all_IOVs.txt"
0264 #    if os.path.isfile(allbeam_file):
0265 
0266     allfile = open( allbeam_file, 'a')
0267     print(" merging all results into file: " + allbeam_file)
0268 
0269     # check if merged sqlite file exists
0270     if os.path.exists(workflowdirArchive+"payloads/Combined.db"):
0271         os.system("rm "+workflowdirArchive+"payloads/Combined.db")
0272 
0273 
0274     nfile = 0
0275     iov_since_first = '1'
0276     total_files = len(keys)
0277 
0278     destDB = 'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
0279     if option.Test:
0280         destDB = 'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
0281 
0282     iov_comment = 'Beam spot position'
0283     for key in keys:
0284 
0285         iov_since = '1'
0286         iov_till = ''
0287 
0288         suffix = "_" + str(nfile)
0289         writedb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
0290         readdb_template  = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
0291         sqlite_file_name = tagname + suffix
0292         sqlite_file   = workflowdirTmp + sqlite_file_name + '.db'
0293         metadata_file = workflowdirTmp + sqlite_file_name + '.txt'
0294         nfile = nfile + 1
0295 
0296     #### WRITE sqlite file
0297 
0298         beam_file = sortedlist[key]
0299         tmp_datafilename = workflowdirTmp+"tmp_datafile.txt"
0300         if option.merged:
0301             tmpfile = open(tmp_datafilename,'w')
0302             tmpfile.writelines(sortedlist[key])
0303             tmpfile.close()
0304             beam_file = tmp_datafilename
0305 
0306         print("read input beamspot file: " + beam_file)
0307         tmpfile = open(beam_file)
0308         beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind('/')+1:] + ".tmp"
0309         newtmpfile = open(beam_file_tmp,"w")
0310         tmp_run = ""
0311         tmp_lumi_since = ""
0312         tmp_lumi_till = ""
0313         for line in tmpfile:
0314             if line.find("Runnumber") != -1:
0315                 iov_since = line.split()[1]
0316                 iov_till = iov_since
0317                 tmp_run = line.split()[1]
0318             elif line.find("LumiRange") != -1:
0319                 tmp_lumi_since = line.split()[1]
0320                 tmp_lumi_till = line.split()[3]
0321             elif line.find("BeginTimeOfFit") == -1 and line.find("EndTimeOfFit") == -1 and line.find("LumiRange") == -1:
0322                 if line.find("sigmaZ0") != -1 and option.zlarge:
0323                     line = "sigmaZ0 10\n"
0324                 if line.find("Cov(3,j)") != -1 and option.zlarge:
0325                     line = "Cov(3,j) 0 0 0 2.5e-05 0 0 0\n"
0326                 newtmpfile.write(line)
0327             allfile.write(line)
0328 
0329         # pack run number and lumi section
0330         if IOVbase == "lumibase":
0331             timetype = "lumiid"
0332             iov_since = str( pack(int(tmp_run), int(tmp_lumi_since)) )
0333             iov_till = str( pack(int(tmp_run), int(tmp_lumi_till)) )
0334         # keep first iov for merged output metafile
0335         if nfile == 1:
0336             iov_since_first = iov_since
0337 
0338         tmpfile.close()
0339         newtmpfile.close()
0340         if option.copy:
0341             continue
0342 
0343         beam_file = beam_file_tmp
0344 
0345         if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
0346             print("An error occurred while writing the sqlite file: " + sqlite_file)
0347 
0348         subprocess.getstatusoutput('rm -f ' + beam_file)
0349         ##### READ and check sqlite file
0350         readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
0351 
0352         #### Merge sqlite files
0353         if not os.path.isdir(workflowdirArchive + 'payloads'):
0354             os.mkdir(workflowdirArchive + 'payloads')
0355 
0356         print(" merge sqlite file ...")
0357         appendSqliteFile("Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
0358 
0359         # keep last payload for express, and prompt tags
0360         if nfile == total_files:
0361             print(" this is the last IOV. You can use this payload for express and prompt conditions.")
0362             os.system("cp "+sqlite_file+ " "+workflowdirArchive+"payloads/express.db")
0363             print("a copy of this payload has been placed at:")
0364             print(workflowdirArchive+"payloads/express.db")
0365 
0366         # clean up
0367         os.system("rm "+ sqlite_file)
0368         print(" clean up done.")
0369 
0370     os.system("mv " + workflowdirTmp + "Combined.db " + workflowdirArchive + "payloads/")
0371     allfile.close()
0372 
0373     #### CREATE payload for merged output
0374 
0375     print(" create MERGED payload card for dropbox ...")
0376 
0377     sqlite_file   = workflowdirArchive+'payloads/Combined.db'
0378     metadata_file = workflowdirArchive+'payloads/Combined.txt'
0379     dfile = open(metadata_file,'w')
0380 
0381     dfile.write('destDB '+ destDB +'\n')
0382     dfile.write('tag '+ tagname +'\n')
0383     dfile.write('inputtag' +'\n')
0384     dfile.write('since ' + iov_since_first +'\n')
0385     #        dfile.write('till ' + iov_till +'\n')
0386     if IOVbase == "runbase":
0387         dfile.write('Timetype runnumber\n')
0388     elif IOVbase == "lumibase":
0389         dfile.write('Timetype lumiid\n')
0390     checkType = tagType
0391     if tagType == "express":
0392         checkType = "hlt"
0393     dfile.write('IOVCheck ' + checkType + '\n')
0394     dfile.write('usertext ' + iov_comment +'\n')
0395 
0396     dfile.close()
0397 
0398     uuid = subprocess.getstatusoutput('uuidgen -t')[1]
0399     final_sqlite_file_name = tagname + '@' + uuid
0400 
0401     if not os.path.isdir(workflowdirArchive + 'payloads'):
0402         os.mkdir(workflowdirArchive + 'payloads')
0403     subprocess.getstatusoutput('cp ' + sqlite_file   + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.db')
0404     subprocess.getstatusoutput('cp ' + metadata_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.txt')
0405 
0406     subprocess.getstatusoutput('mv ' + sqlite_file   + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.db')
0407     subprocess.getstatusoutput('mv ' + metadata_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.txt')
0408 
0409     print(workflowdirLastPayloads + final_sqlite_file_name + '.db')
0410     print(workflowdirLastPayloads + final_sqlite_file_name + '.txt')
0411 
0412     if option.upload:
0413         print(" scp files to offline Drop Box")
0414         dropbox = "/DropBox"
0415         if option.Test:
0416             dropbox = "/DropBox_test"
0417 
0418         uploadSqliteFile(workflowdirLastPayloads,final_sqlite_file_name,dropbox)