File indexing completed on 2024-04-06 12:29:04
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015 """
0016 createPayload.py
0017
0018 A very simple script to handle payload for beam spot results
0019
0020 usage: %prog -d <data file/directory> -t <tag name>
0021 -c, --copy : Only copy files from input directory to test/workflow/files/
0022 -d, --data = DATA: Data file, or directory with data files.
0023 -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase
0024 -o, --overwrite : Overwrite results files when copying.
0025 -O, --Output = OUTPUT: Output directory for data files (workflow directory)
0026 -m, --merged : Use when data file contains combined results.
0027 -n, --newarchive : Create a new archive directory when copying.
0028 -t, --tag = TAG: Database tag name.
0029 -T, --Test : Upload files to Test dropbox for data validation.
0030 -u, --upload : Upload files to offline drop box via scp.
0031 -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
0032
0033 Francisco Yumiceva (yumiceva@fnal.gov)
0034 Fermilab 2010
0035
0036 """
0037 from __future__ import print_function
0038
0039
0040 from builtins import range
0041 import sys,os
0042 import subprocess, re, time
0043 import datetime
0044 from CommonMethods import *
0045
0046 workflowdir = 'test/workflow/'
0047 workflowdirLastPayloads = workflowdir + 'lastPayloads/'
0048 workflowdirTmp = workflowdir + 'tmp/'
0049 workflowdirArchive = workflowdir + 'archive/'
0050 optionstring = ''
0051 tagType = ''
0052
0053 def copyToWorkflowdir(path):
0054 global workflowdirArchive
0055 lsCommand = ''
0056 cpCommand = ''
0057 listoffiles = []
0058 tmplistoffiles = []
0059 if path.find('castor') != -1:
0060 print("Getting files from castor ...")
0061 lsCommand = 'ns'
0062 cpCommand = 'rf'
0063 elif not os.path.exists(path):
0064 exit("ERROR: File or directory " + path + " doesn't exist")
0065
0066 if path[len(path)-4:len(path)] != '.txt':
0067 if path[len(path)-1] != '/':
0068 path = path + '/'
0069
0070 aCommand = lsCommand + 'ls '+ path + " | grep .txt"
0071
0072 tmpstatus = subprocess.getstatusoutput( aCommand )
0073 tmplistoffiles = tmpstatus[1].split('\n')
0074 if len(tmplistoffiles) == 1:
0075 if tmplistoffiles[0] == '':
0076 exit('ERROR: No files found in directory ' + path)
0077 if tmplistoffiles[0].find('No such file or directory') != -1:
0078 exit("ERROR: File or directory " + path + " doesn't exist")
0079
0080 else:
0081 tmplistoffiles.append(path[path.rfind('/')+1:len(path)])
0082 path = path[0:path.rfind('/')+1]
0083
0084
0085 archiveName = path
0086 if path == './':
0087 archiveName = os.getcwd() + '/'
0088 archiveName = archiveName[archiveName[:len(archiveName)-1].rfind('/')+1:len(archiveName)]
0089 if path[:len(path)-1].rfind('/') != -1:
0090 archiveName = path[path[:len(path)-1].rfind('/')+1:len(path)]
0091
0092 workflowdirArchive = workflowdirArchive + archiveName
0093 if tagType != '' :
0094 workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tagType + '/'
0095 if not os.path.isdir(workflowdirArchive):
0096 os.mkdir(workflowdirArchive)
0097 elif(option.newarchive):
0098
0099
0100
0101
0102
0103 for n in range(1,100000):
0104 tryDir = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + str(n) + '/'
0105 if not os.path.isdir(tryDir):
0106 workflowdirArchive = tryDir
0107 os.mkdir(workflowdirArchive)
0108 break
0109 elif n == 100000-1:
0110 exit('ERROR: Unbelievable! do you ever clean ' + workflowdir + '?. I think you have to remove some directories!')
0111
0112 for ifile in tmplistoffiles:
0113 if ifile.find('.txt') != -1:
0114 if os.path.isfile(workflowdirArchive+"/"+ifile):
0115 if option.overwrite:
0116 print("File " + ifile + " already exists in destination. We will overwrite it.")
0117 else:
0118 print("File " + ifile + " already exists in destination. Keep original file.")
0119 listoffiles.append( workflowdirArchive + ifile )
0120 continue
0121 listoffiles.append( workflowdirArchive + ifile )
0122
0123 aCommand = cpCommand + 'cp '+ path + ifile + " " + workflowdirArchive
0124 print(" >> " + aCommand)
0125 tmpstatus = subprocess.getstatusoutput( aCommand )
0126 return listoffiles
0127
0128 def mkWorkflowdir():
0129 global workflowdir
0130 global workflowdirLastPayloads
0131 global workflowdirTmp
0132 global workflowdirArchive
0133 if not os.path.isdir(workflowdir):
0134 print("Making " + workflowdir + " directory...")
0135 os.mkdir(workflowdir)
0136
0137 if not os.path.isdir(workflowdirLastPayloads):
0138 os.mkdir(workflowdirLastPayloads)
0139 else:
0140 os.system("rm -f "+ workflowdirLastPayloads + "*")
0141
0142 if not os.path.isdir(workflowdirTmp):
0143 os.mkdir(workflowdirTmp)
0144 else:
0145 os.system("rm -f "+ workflowdirTmp + "*")
0146
0147 if not os.path.isdir(workflowdirArchive):
0148 os.mkdir(workflowdirArchive)
0149
0150
0151 if __name__ == '__main__':
0152
0153
0154
0155
0156
0157
0158
0159
0160 option,args = parse(__doc__)
0161 if not args and not option: exit()
0162
0163 workflowdir = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/workflow/"
0164 if option.Output:
0165 workflowdir = option.Output
0166 if workflowdir[len(workflowdir)-1] != '/':
0167 workflowdir = workflowdir + '/'
0168 workflowdirLastPayloads = workflowdir + "lastPayloads/"
0169 workflowdirTmp = workflowdir + "tmp/"
0170 workflowdirArchive = workflowdir + "archive/"
0171
0172 if ( (option.data and option.tag) or (option.data and option.copy)):
0173 mkWorkflowdir()
0174
0175 if not option.data:
0176 print("ERROR: You must provide the data file or the a directory with data files")
0177 exit()
0178
0179 if option.copy:
0180 copyToWorkflowdir(option.data)
0181 exit("Files copied in " + workflowdirArchive)
0182
0183 tagname = ''
0184 if option.tag:
0185 tagname = option.tag
0186 if tagname.find("offline") != -1:
0187 tagType = "offline"
0188 elif tagname.find("prompt") != -1:
0189 tagType = "prompt"
0190 elif tagname.find("express") != -1 :
0191 tagType = "express"
0192 elif tagname.find("hlt") != -1:
0193 tagType = "hlt"
0194 else:
0195 print("I am assuming your tag is for the offline database...")
0196 tagType = "offline"
0197
0198 else:
0199 print("ERROR: You must provide the database tag name")
0200 exit()
0201
0202 IOVbase = 'runbase'
0203 timetype = 'runnumber'
0204 if option.IOVbase:
0205 if option.IOVbase != "runbase" and option.IOVbase != "lumibase" and option.IOVbase != "timebase":
0206 print("\n\n unknown iov base option: "+ option.IOVbase +" \n\n\n")
0207 exit()
0208 IOVbase = option.IOVbase
0209
0210 listoffiles = copyToWorkflowdir(option.data)
0211
0212 sortedlist = {}
0213
0214 for beam_file in listoffiles:
0215
0216 if len(listoffiles)==1 and option.merged:
0217 mergedfile = open(beam_file)
0218 alllines = mergedfile.readlines()
0219 npayloads = int(len(alllines)/23)
0220 for i in range(0,npayloads):
0221 block = alllines[i * 23: (i+1)*23]
0222 arun = ''
0223 atime = ''
0224 alumi = ''
0225 for line in block:
0226 if line.find('Runnumber') != -1:
0227 arun = line.split()[1]
0228 if line.find("EndTimeOfFit") != -1:
0229 atime = time.strptime(line.split()[1] + " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
0230 if line.find("LumiRange") != -1:
0231 alumi = line.split()[3]
0232 if line.find('Type') != -1 and line.split()[1] != '2':
0233 continue
0234 sortedlist[int(pack(int(arun), int(alumi)))] = block
0235 break
0236
0237 tmpfile = open(beam_file)
0238 atime = ''
0239 arun = ''
0240 alumis = ''
0241 skip = False
0242 for line in tmpfile:
0243 if line.find('Runnumber') != -1:
0244 arun = line.split()[1]
0245 if line.find("EndTimeOfFit") != -1:
0246 atime = time.strptime(line.split()[1] + " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
0247 if line.find("LumiRange") != -1:
0248 alumi = line.split()[3]
0249 if line.find('Type') != -1 and line.split()[1] == '0':
0250 skip = True
0251 if skip:
0252 print(" zero fit result, skip file " + beam_file + " with time stamp:")
0253 print(" run " + arun + " lumis " + alumis)
0254 else:
0255 sortedlist[int(pack(int(arun), int(alumi)))] = beam_file
0256
0257 tmpfile.close()
0258
0259 keys = sorted(sortedlist.keys())
0260
0261
0262 if not os.path.isdir(workflowdirArchive + "AllIOVs"):
0263 os.mkdir(workflowdirArchive + "AllIOVs")
0264 allbeam_file = workflowdirArchive + "AllIOVs/" + tagname + "_all_IOVs.txt"
0265
0266
0267 allfile = open( allbeam_file, 'a')
0268 print(" merging all results into file: " + allbeam_file)
0269
0270
0271 if os.path.exists(workflowdirArchive+"payloads/Combined.db"):
0272 os.system("rm "+workflowdirArchive+"payloads/Combined.db")
0273
0274
0275 nfile = 0
0276 iov_since_first = '1'
0277 total_files = len(keys)
0278
0279 destDB = 'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
0280 if option.Test:
0281 destDB = 'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
0282
0283 iov_comment = 'Beam spot position'
0284 for key in keys:
0285
0286 iov_since = '1'
0287 iov_till = ''
0288
0289 suffix = "_" + str(nfile)
0290 writedb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
0291 readdb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
0292 sqlite_file_name = tagname + suffix
0293 sqlite_file = workflowdirTmp + sqlite_file_name + '.db'
0294 metadata_file = workflowdirTmp + sqlite_file_name + '.txt'
0295 nfile = nfile + 1
0296
0297
0298
0299 beam_file = sortedlist[key]
0300 tmp_datafilename = workflowdirTmp+"tmp_datafile.txt"
0301 if option.merged:
0302 tmpfile = open(tmp_datafilename,'w')
0303 tmpfile.writelines(sortedlist[key])
0304 tmpfile.close()
0305 beam_file = tmp_datafilename
0306
0307 print("read input beamspot file: " + beam_file)
0308 tmpfile = open(beam_file)
0309 beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind('/')+1:] + ".tmp"
0310 newtmpfile = open(beam_file_tmp,"w")
0311 tmp_run = ""
0312 tmp_lumi_since = ""
0313 tmp_lumi_till = ""
0314 for line in tmpfile:
0315 if line.find("Runnumber") != -1:
0316 iov_since = line.split()[1]
0317 iov_till = iov_since
0318 tmp_run = line.split()[1]
0319 elif line.find("LumiRange") != -1:
0320 tmp_lumi_since = line.split()[1]
0321 tmp_lumi_till = line.split()[3]
0322 elif line.find("BeginTimeOfFit") == -1 and line.find("EndTimeOfFit") == -1 and line.find("LumiRange") == -1:
0323 if line.find("sigmaZ0") != -1 and option.zlarge:
0324 line = "sigmaZ0 10\n"
0325 if line.find("Cov(3,j)") != -1 and option.zlarge:
0326 line = "Cov(3,j) 0 0 0 2.5e-05 0 0 0\n"
0327 newtmpfile.write(line)
0328 allfile.write(line)
0329
0330
0331 if IOVbase == "lumibase":
0332 timetype = "lumiid"
0333 iov_since = str( pack(int(tmp_run), int(tmp_lumi_since)) )
0334 iov_till = str( pack(int(tmp_run), int(tmp_lumi_till)) )
0335
0336 if nfile == 1:
0337 iov_since_first = iov_since
0338
0339 tmpfile.close()
0340 newtmpfile.close()
0341 if option.copy:
0342 continue
0343
0344 beam_file = beam_file_tmp
0345
0346 if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
0347 print("An error occurred while writing the sqlite file: " + sqlite_file)
0348
0349 subprocess.getstatusoutput('rm -f ' + beam_file)
0350
0351 readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
0352
0353
0354 if not os.path.isdir(workflowdirArchive + 'payloads'):
0355 os.mkdir(workflowdirArchive + 'payloads')
0356
0357 print(" merge sqlite file ...")
0358 appendSqliteFile("Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
0359
0360
0361 if nfile == total_files:
0362 print(" this is the last IOV. You can use this payload for express and prompt conditions.")
0363 os.system("cp "+sqlite_file+ " "+workflowdirArchive+"payloads/express.db")
0364 print("a copy of this payload has been placed at:")
0365 print(workflowdirArchive+"payloads/express.db")
0366
0367
0368 os.system("rm "+ sqlite_file)
0369 print(" clean up done.")
0370
0371 os.system("mv " + workflowdirTmp + "Combined.db " + workflowdirArchive + "payloads/")
0372 allfile.close()
0373
0374
0375
0376 print(" create MERGED payload card for dropbox ...")
0377
0378 sqlite_file = workflowdirArchive+'payloads/Combined.db'
0379 metadata_file = workflowdirArchive+'payloads/Combined.txt'
0380 dfile = open(metadata_file,'w')
0381
0382 dfile.write('destDB '+ destDB +'\n')
0383 dfile.write('tag '+ tagname +'\n')
0384 dfile.write('inputtag' +'\n')
0385 dfile.write('since ' + iov_since_first +'\n')
0386
0387 if IOVbase == "runbase":
0388 dfile.write('Timetype runnumber\n')
0389 elif IOVbase == "lumibase":
0390 dfile.write('Timetype lumiid\n')
0391 checkType = tagType
0392 if tagType == "express":
0393 checkType = "hlt"
0394 dfile.write('IOVCheck ' + checkType + '\n')
0395 dfile.write('usertext ' + iov_comment +'\n')
0396
0397 dfile.close()
0398
0399 uuid = subprocess.getstatusoutput('uuidgen -t')[1]
0400 final_sqlite_file_name = tagname + '@' + uuid
0401
0402 if not os.path.isdir(workflowdirArchive + 'payloads'):
0403 os.mkdir(workflowdirArchive + 'payloads')
0404 subprocess.getstatusoutput('cp ' + sqlite_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.db')
0405 subprocess.getstatusoutput('cp ' + metadata_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.txt')
0406
0407 subprocess.getstatusoutput('mv ' + sqlite_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.db')
0408 subprocess.getstatusoutput('mv ' + metadata_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.txt')
0409
0410 print(workflowdirLastPayloads + final_sqlite_file_name + '.db')
0411 print(workflowdirLastPayloads + final_sqlite_file_name + '.txt')
0412
0413 if option.upload:
0414 print(" scp files to offline Drop Box")
0415 dropbox = "/DropBox"
0416 if option.Test:
0417 dropbox = "/DropBox_test"
0418
0419 uploadSqliteFile(workflowdirLastPayloads,final_sqlite_file_name,dropbox)