File indexing completed on 2024-04-06 12:01:53
0001
0002 """
0003 Primary Author:
0004 Joshua Dawes - CERN, CMS - The University of Manchester
0005
0006 Debugging, Integration and Maintenance:
0007 Andres Cardenas - CERN, CMS - Universidad San Francisco
0008
0009 Upload script wrapper - controls the automatic update system.
0010
0011 Note: the name of the file follows a different convention to the others because it should be the same as the current upload script name.
0012
0013 Takes user arguments and passes them to the main upload module CondDBFW.uploads, once the correct version exists.
0014
0015 1. Ask the server corresponding to the database we're uploading to which version of CondDBFW it has (query the /conddbfw_version/ url).
0016 2. Decide which directory that we can write to - either the current local directory, or /tmp/random_string/.
0017 3. Pull the commit returned from the server into the directory from step 2.
0018 4. Invoke the CondDBFW.uploads module with the arguments given to this script.
0019
0020 """
0021
0022 __version__ = 1
0023
0024
0025 import requests
0026 import urllib3
0027 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
0028 try:
0029 from StringIO import StringIO
0030 except:
0031 pass
0032 import traceback
0033 import sys
0034 import os
0035 import json
0036 import subprocess
0037 import argparse
0038 import netrc
0039 import shutil
0040 import getpass
0041 import errno
0042 import sqlite3
0043
0044
0045 horizontal_rule = "="*60
0046
0047 def run_upload(**parameters):
0048 """
0049 Imports CondDBFW.uploads and runs the upload with the upload metadata obtained.
0050 """
0051 try:
0052 import CondCore.Utilities.CondDBFW.uploads as uploads
0053 except Exception as e:
0054 traceback.print_exc()
0055 exit("CondDBFW or one of its dependencies could not be imported.\n"\
0056 + "If the CondDBFW directory exists, you are likely not in a CMSSW environment.")
0057
0058 uploader = uploads.uploader(**parameters)
0059 result = uploader.upload()
0060
0061 def getInput(default, prompt = ''):
0062 '''Like raw_input() but with a default and automatic strip().
0063 '''
0064
0065 answer = raw_input(prompt)
0066 if answer:
0067 return answer.strip()
0068
0069 return default.strip()
0070
0071
0072 def getInputWorkflow(prompt = ''):
0073 '''Like getInput() but tailored to get target workflows (synchronization options).
0074 '''
0075
0076 while True:
0077 workflow = getInput(defaultWorkflow, prompt)
0078
0079 if workflow in frozenset(['offline', 'hlt', 'express', 'prompt', 'pcl']):
0080 return workflow
0081
0082 print('Please specify one of the allowed workflows. See above for the explanation on each of them.')
0083
0084
0085 def getInputChoose(optionsList, default, prompt = ''):
0086 '''Makes the user choose from a list of options.
0087 '''
0088
0089 while True:
0090 index = getInput(default, prompt)
0091
0092 try:
0093 return optionsList[int(index)]
0094 except ValueError:
0095 print('Please specify an index of the list (i.e. integer).')
0096 except IndexError:
0097 print('The index you provided is not in the given list.')
0098
0099
0100 def getInputRepeat(prompt = ''):
0101 '''Like raw_input() but repeats if nothing is provided and automatic strip().
0102 '''
0103
0104 while True:
0105 answer = raw_input(prompt)
0106 if answer:
0107 return answer.strip()
0108
0109 print('You need to provide a value.')
0110
0111 def runWizard(basename, dataFilename, metadataFilename):
0112 while True:
0113 print('''\nWizard for metadata for %s
0114
0115 I will ask you some questions to fill the metadata file. For some of the questions there are defaults between square brackets (i.e. []), leave empty (i.e. hit Enter) to use them.''' % basename)
0116
0117
0118 try:
0119 dataConnection = sqlite3.connect(dataFilename)
0120 dataCursor = dataConnection.cursor()
0121 dataCursor.execute('select name from sqlite_master where type == "table"')
0122 tables = set(zip(*dataCursor.fetchall())[0])
0123
0124
0125 if 'TAG' in tables:
0126 dataCursor.execute('select NAME from TAG')
0127
0128 else:
0129 raise Exception()
0130
0131 inputTags = dataCursor.fetchall()
0132 if len(inputTags) == 0:
0133 raise Exception()
0134 inputTags = list(zip(*inputTags))[0]
0135
0136 except Exception:
0137 inputTags = []
0138
0139 if len(inputTags) == 0:
0140 print('\nI could not find any input tag in your data file, but you can still specify one manually.')
0141
0142 inputTag = getInputRepeat(
0143 '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. BeamSpotObject_ByRun\ninputTag: ')
0144
0145 else:
0146 print('\nI found the following input tags in your SQLite data file:')
0147 for (index, inputTag) in enumerate(inputTags):
0148 print(' %s) %s' % (index, inputTag))
0149
0150 inputTag = getInputChoose(inputTags, '0',
0151 '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. 0 (you select the first in the list)\ninputTag [0]: ')
0152
0153 databases = {
0154 'oraprod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
0155 'prod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
0156 'oradev': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
0157 'prep': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
0158 }
0159
0160 destinationDatabase = ''
0161 ntry = 0
0162 print('\nWhich is the destination database where the tags should be exported?')
0163 print('\n%s) %s' % ('oraprod', databases['oraprod']))
0164 print('\n%s) %s' % ('oradev', databases['oradev']))
0165
0166 while ( destinationDatabase not in databases.values() ):
0167 if ntry==0:
0168 inputMessage = \
0169 '\nPossible choices: oraprod or oradev \ndestinationDatabase: '
0170 elif ntry==1:
0171 inputMessage = \
0172 '\nPlease choose one of the two valid destinations: oraprod or oradev \ndestinationDatabase: '
0173 else:
0174 raise Exception('No valid destination chosen. Bailing out...')
0175
0176 databaseInput = getInputRepeat(inputMessage).lower()
0177 if databaseInput in databases.keys():
0178 destinationDatabase = databases[databaseInput]
0179 ntry += 1
0180
0181 while True:
0182 since = getInput('',
0183 '\nWhich is the given since? (if not specified, the one from the SQLite data file will be taken -- note that even if specified, still this may not be the final since, depending on the synchronization options you select later: if the synchronization target is not offline, and the since you give is smaller than the next possible one (i.e. you give a run number earlier than the one which will be started/processed next in prompt/hlt/express), the DropBox will move the since ahead to go to the first safe run instead of the value you gave)\ne.g. 1234\nsince []: ')
0184 if not since:
0185 since = None
0186 break
0187 else:
0188 try:
0189 since = int(since)
0190 break
0191 except ValueError:
0192 print('The since value has to be an integer or empty (null).')
0193
0194 userText = getInput('',
0195 '\nWrite any comments/text you may want to describe your request\ne.g. Muon alignment scenario for...\nuserText []: ')
0196
0197 destinationTags = {}
0198 while True:
0199 destinationTag = getInput('',
0200 '\nWhich is the next destination tag to be added (leave empty to stop)?\ne.g. BeamSpotObjects_PCL_byRun_v0_offline\ndestinationTag []: ')
0201 if not destinationTag:
0202 if len(destinationTags) == 0:
0203 print('There must be at least one destination tag.')
0204 continue
0205 break
0206
0207 if destinationTag in destinationTags:
0208 print(
0209 'You already added this destination tag. Overwriting the previous one with this new one.')
0210
0211 destinationTags[destinationTag] = {
0212 }
0213
0214 metadata = {
0215 'destinationDatabase': destinationDatabase,
0216 'destinationTags': destinationTags,
0217 'inputTag': inputTag,
0218 'since': since,
0219 'userText': userText,
0220 }
0221
0222 metadata = json.dumps(metadata, sort_keys=True, indent=4)
0223 print('\nThis is the generated metadata:\n%s' % metadata)
0224
0225 if getInput('n',
0226 '\nIs it fine (i.e. save in %s and *upload* the conditions if this is the latest file)?\nAnswer [n]: ' % metadataFilename).lower() == 'y':
0227 break
0228 print('Saving generated metadata in %s...', metadataFilename)
0229 with open(metadataFilename, 'wb') as metadataFile:
0230 metadataFile.write(metadata)
0231
0232 def parse_arguments():
0233
0234 parser = argparse.ArgumentParser(prog="cmsDbUpload client", description="CMS Conditions Upload Script in CondDBFW.")
0235
0236 parser.add_argument("--sourceDB", type=str, help="DB to find Tags, IOVs + Payloads in.", required=False)
0237
0238
0239 parser.add_argument("--inputTag", type=str,\
0240 help="Tag to take IOVs + Payloads from in --sourceDB.", required=False)
0241 parser.add_argument("--destinationTag", type=str,\
0242 help="Tag to copy IOVs + Payloads to in --destDB.", required=False)
0243 parser.add_argument("--destinationDatabase", type=str,\
0244 help="Database to copy IOVs + Payloads to.", required=False)
0245 parser.add_argument("--since", type=int,\
0246 help="Since to take IOVs from.", required=False)
0247 parser.add_argument("--userText", type=str,\
0248 help="Description of --destTag (can be empty).")
0249
0250
0251 parser.add_argument("--metadataFile", "-m", type=str, help="Metadata file to take metadata from.", required=False)
0252
0253 parser.add_argument("--debug", required=False, action="store_true")
0254 parser.add_argument("--verbose", required=False, action="store_true")
0255 parser.add_argument("--testing", required=False, action="store_true")
0256 parser.add_argument("--fcsr-filter", type=str, help="Synchronization to take FCSR from for local filtering of IOVs.", required=False)
0257
0258 parser.add_argument("--netrc", required=False)
0259
0260 parser.add_argument("--hashToUse", required=False)
0261
0262 parser.add_argument("--server", required=False)
0263
0264 parser.add_argument("--review-options", required=False, action="store_true")
0265
0266 parser.add_argument("--replay-file", required=False)
0267
0268 command_line_data = parser.parse_args()
0269
0270 if command_line_data.replay_file:
0271 dictionary = json.loads("".join(open(command_line_data.replay_file, "r").readlines()))
0272 command_line_data.tier0_response = dictionary["tier0_response"]
0273
0274
0275 server_alias_to_url = {
0276 "prep" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
0277 "dev" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
0278 "prod" : "https://cms-conddb.cern.ch/cmsDbCondUpload/"
0279 }
0280
0281
0282
0283 if command_line_data.server in server_alias_to_url.keys():
0284 command_line_data.server = server_alias_to_url[command_line_data.server]
0285
0286
0287 database_alias_to_connection = {
0288 "prep": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
0289 "dev": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
0290 "prod": "oracle://cms_orcon_adg/CMS_CONDITIONS"
0291 }
0292
0293 if command_line_data.destinationDatabase in database_alias_to_connection.keys():
0294 command_line_data.destinationDatabase = database_alias_to_connection[command_line_data.destinationDatabase]
0295
0296
0297
0298 try:
0299 netrc_file = command_line_data.netrc
0300 netrc_authenticators = netrc.netrc(netrc_file).authenticators("ConditionUploader")
0301 if netrc_authenticators == None:
0302 print("Your netrc file must contain the key 'ConditionUploader'.")
0303 manual_input = raw_input("Do you want to try to type your credentials? ")
0304 if manual_input == "y":
0305
0306 username = raw_input("Username: ")
0307 password = getpass.getpass("Password: ")
0308 else:
0309 exit()
0310 else:
0311 print("Read your credentials from ~/.netrc. If you want to use a different file, supply its name with the --netrc argument.")
0312 username = netrc_authenticators[0]
0313 password = netrc_authenticators[2]
0314 except:
0315 print("Couldn't obtain your credentials (either from netrc or manual input).")
0316 exit()
0317
0318 command_line_data.username = username
0319 command_line_data.password = password
0320
0321
0322 command_line_data.destinationTags = {command_line_data.destinationTag:{}}
0323
0324 """
0325 Construct metadata_dictionary:
0326 Currently, this is 3 cases:
0327
0328 1) An IOV is being appended to an existing Tag with an existing Payload.
0329 In this case, we just take all data from the command line.
0330
0331 2) No metadata file is given, so we assume that ALL upload metadata is coming from the command line.
0332
0333 3) A metadata file is given, hence we parse the file, and then iterate through command line arguments
0334 since these override the options set in the metadata file.
0335
0336 """
0337
0338
0339 if command_line_data.hashToUse != None:
0340 command_line_data.userText = ""
0341 metadata_dictionary = command_line_data.__dict__
0342 elif command_line_data.metadataFile == None:
0343 if command_line_data.sourceDB != None and (command_line_data.inputTag == None or command_line_data.destinationTag == None or command_line_data.destinationDatabase == None):
0344 basepath = command_line_data.sourceDB.rsplit('.db', 1)[0].rsplit('.txt', 1)[0]
0345 basename = os.path.basename(basepath)
0346 dataFilename = '%s.db' % basepath
0347 metadataFilename = '%s.txt' % basepath
0348
0349 try:
0350 with open(dataFilename, 'rb') as dataFile:
0351 pass
0352 except IOError as e:
0353 errMsg = 'Impossible to open SQLite data file %s' %dataFilename
0354 print( errMsg )
0355 ret['status'] = -3
0356 ret['error'] = errMsg
0357 return ret
0358
0359
0360
0361 try:
0362 with open(metadataFilename, 'rb') as metadataFile:
0363 pass
0364 except IOError as e:
0365 if e.errno != errno.ENOENT:
0366 errMsg = 'Impossible to open file %s (for other reason than not existing)' %metadataFilename
0367 ret = {}
0368 ret['status'] = -4
0369 ret['error'] = errMsg
0370 exit (ret)
0371
0372 if getInput('y', '\nIt looks like the metadata file %s does not exist and not enough parameters were received in the command line. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() != 'y':
0373 errMsg = 'Metadata file %s does not exist' %metadataFilename
0374 ret = {}
0375 ret['status'] = -5
0376 ret['error'] = errMsg
0377 exit(ret)
0378
0379 runWizard(basename, dataFilename, metadataFilename)
0380 command_line_data.metadataFile = metadataFilename
0381 else:
0382 command_line_data.userText = command_line_data.userText\
0383 if command_line_data.userText != None\
0384 else str(raw_input("Tag's description [can be empty]:"))
0385 metadata_dictionary = command_line_data.__dict__
0386
0387 if command_line_data.metadataFile != None:
0388 metadata_dictionary = json.loads("".join(open(os.path.abspath(command_line_data.metadataFile), "r").readlines()))
0389 metadata_dictionary["username"] = username
0390 metadata_dictionary["password"] = password
0391 metadata_dictionary["userText"] = metadata_dictionary.get("userText")\
0392 if metadata_dictionary.get("userText") != None\
0393 else str(raw_input("Tag's description [can be empty]:"))
0394
0395
0396 for (option_name, option_value) in command_line_data.__dict__.items():
0397
0398 if option_name != "destinationTags":
0399 if option_value != None or (option_value == None and not(option_name in metadata_dictionary.keys())):
0400
0401
0402
0403 metadata_dictionary[option_name] = option_value
0404 else:
0405 if option_value != {None:{}}:
0406 metadata_dictionary["destinationTags"] = {option_value:{}}
0407 elif option_value == {None:{}} and not("destinationTags" in metadata_dictionary.keys()):
0408 metadata_dictionary["destinationTags"] = {None:{}}
0409
0410 if command_line_data.review_options:
0411 defaults = {
0412 "since" : "Since of first IOV",
0413 "userText" : "Populated by upload process",
0414 "netrc" : "None given",
0415 "fcsr_filter" : "Don't apply",
0416 "hashToUse" : "Using local SQLite file instead"
0417 }
0418 print("Configuration to use for the upload:")
0419 for key in metadata_dictionary:
0420 if not(key) in ["username", "password", "destinationTag"]:
0421 value_to_print = metadata_dictionary[key] if metadata_dictionary[key] != None else defaults[key]
0422 print("\t%s : %s" % (key, value_to_print))
0423
0424 if raw_input("\nDo you want to continue? [y/n] ") != "y":
0425 exit()
0426
0427 if metadata_dictionary["server"] == None:
0428 if metadata_dictionary["destinationDatabase"] == "oracle://cms_orcoff_prep/CMS_CONDITIONS":
0429 metadata_dictionary["server"] = server_alias_to_url["prep"]
0430 else:
0431 metadata_dictionary["server"] = server_alias_to_url["prod"]
0432
0433 return metadata_dictionary
0434
0435 def get_version(url):
0436 return requests.get(url + "script_version/", verify=False)
0437
0438
0439 if __name__ == "__main__":
0440
0441 upload_metadata = parse_arguments()
0442
0443
0444 final_service_url = upload_metadata["server"]
0445 try:
0446 response = get_version(final_service_url)
0447 server_version = response.json()
0448 except Exception as e:
0449 print(horizontal_rule)
0450 print(e)
0451 print("Could not connect to server at %s"%final_service_url)
0452 print("If you specified a server please check it is correct. If that is not the issue please contact the AlcaDB team.")
0453 print(horizontal_rule)
0454 exit(1)
0455
0456 if server_version["version"] != __version__:
0457 print(horizontal_rule)
0458 print("Local upload script is different than server version. Please run the following command to get the latest script.")
0459 print("curl --insecure -o uploadConditions.py %sget_upload_script/ && chmod +x uploadConditions.py;"%final_service_url)
0460 print(horizontal_rule)
0461 exit(1)
0462
0463 import CondCore.Utilities.CondDBFW.data_sources as data_sources
0464
0465 upload_metadata["sqlite_file"] = upload_metadata.get("sourceDB")
0466
0467 try:
0468 os.mkdir('upload_logs')
0469 except OSError as e:
0470 pass
0471
0472
0473 upload_metadata_argument = {}
0474 for (key, value) in upload_metadata.items():
0475 if key != "metadata_source":
0476 upload_metadata_argument[key] = value
0477
0478 upload_metadata["metadata_source"] = data_sources.json_data_node.make(upload_metadata_argument)
0479 try:
0480
0481 run_upload(**upload_metadata)
0482 print(horizontal_rule)
0483 print("Process completed without issues. Please check logs for further details.")
0484 print(horizontal_rule)
0485 except SystemExit as e:
0486 print(horizontal_rule)
0487 print("Process exited abnormally. Please check logs for details.")
0488 print(horizontal_rule)
0489 exit(1)
0490 exit(0)