File indexing completed on 2024-04-06 11:56:35
0001
0002
0003 from __future__ import print_function
0004 import os
0005 import re
0006 import sys
0007 if sys.version_info[0]>2:
0008 import _pickle as cPickle
0009 else:
0010 import cPickle
0011 import argparse
0012 import itertools
0013 import subprocess
0014 import collections
0015 import configparser as ConfigParser
0016 import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools
0017 import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib
0018 import Alignment.MillePedeAlignmentAlgorithm.mpsvalidate.iniparser as mpsv_iniparser
0019 import Alignment.MillePedeAlignmentAlgorithm.mpsvalidate.trackerTree as mpsv_trackerTree
0020 from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS
0021 from functools import reduce
0022
0023
0024
0025 def main(argv = None):
0026 """Main routine. Not called, if this module is loaded via `import`.
0027
0028 Arguments:
0029 - `argv`: Command line arguments passed to the script.
0030 """
0031
0032 if argv == None:
0033 argv = sys.argv[1:]
0034
0035 setup_alignment = SetupAlignment(argv)
0036 setup_alignment.setup()
0037
0038
0039
0040 class SetupAlignment(object):
0041 """Class encapsulating the alignment campaign setup procedure."""
0042
0043 def __init__(self, argv):
0044 """Constructor
0045
0046 Arguments:
0047 - `argv`: command line arguments
0048 """
0049
0050 self._argv = argv
0051 self._args = None
0052 self._config = None
0053 self._mss_dir = None
0054 self._datasets = collections.OrderedDict()
0055 self._first_run = None
0056 self._cms_process = None
0057 self._override_gt = None
0058 self._pede_script = None
0059 self._weight_dict = collections.OrderedDict()
0060 self._mille_script = None
0061 self._mps_dir_name = None
0062 self._common_weights = {}
0063 self._weight_configs = []
0064 self._general_options = {}
0065 self._external_datasets = collections.OrderedDict()
0066 self._first_pede_config = True
0067
0068 self._create_config()
0069 self._fill_general_options()
0070 self._fetch_datasets()
0071 self._construct_paths()
0072 self._create_mass_storage_directory()
0073 self._fetch_pede_settings()
0074 self._create_weight_configs()
0075
0076
0077 def setup(self):
0078 """Setup the alignment campaign."""
0079
0080 if self._args.weight:
0081 self._create_additional_pede_jobs()
0082 else:
0083 self._create_mille_jobs()
0084 self._create_pede_jobs()
0085
0086 if self._override_gt.strip() != "":
0087 msg = ("Overriding global tag with single-IOV tags extracted from "
0088 "'{}' for run number '{}'.".format(self._global_tag,
0089 self._first_run))
0090 print(msg)
0091 print("-"*75)
0092 print(self._override_gt)
0093 print("="*75)
0094
0095
0096 def _create_config(self):
0097 """Create ConfigParser object from command line arguments."""
0098
0099 helpEpilog ="""Builds the config-templates from a universal
0100 config-template for each dataset specified in .ini-file that is passed
0101 to this script. Then calls mps_setup.py for all datasets."""
0102 parser = argparse.ArgumentParser(
0103 description = ("Setup the alignment as configured in the "
0104 "alignment_config file."),
0105 epilog = helpEpilog)
0106 parser.add_argument("-v", "--verbose", action="store_true",
0107 help="display detailed output of mps_setup")
0108 parser.add_argument("-w", "--weight", action="store_true",
0109 help=("creates additional merge job(s) with "
0110 "(possibly new) weights from .ini-config"))
0111 parser.add_argument("alignmentConfig",
0112 help=("name of the .ini config file that specifies "
0113 "the datasets to be used"))
0114
0115 self._args = parser.parse_args(self._argv)
0116 self._config = ConfigParser.ConfigParser()
0117 self._config.optionxform = str
0118
0119 self._config.read(self._args.alignmentConfig)
0120 self._config.config_path = self._args.alignmentConfig
0121
0122
0123 def _construct_paths(self):
0124 """Determine directory paths and create the ones that are needed."""
0125
0126 mpsTemplates = os.path.join("src", "Alignment",
0127 "MillePedeAlignmentAlgorithm", "templates")
0128 if checked_out_MPS()[0]:
0129 mpsTemplates = os.path.join(os.environ["CMSSW_BASE"], mpsTemplates)
0130 else:
0131 mpsTemplates = os.path.join(os.environ["CMSSW_RELEASE_BASE"], mpsTemplates)
0132 self._mille_script = os.path.join(mpsTemplates, "mps_runMille_template.sh")
0133 self._pede_script = os.path.join(mpsTemplates, "mps_runPede_rfcp_template.sh")
0134
0135
0136 currentDir = os.getcwd()
0137 match = re.search(re.compile('mpproduction\/mp(.+?)$', re.M|re.I),currentDir)
0138 if match:
0139 self._mps_dir_name = 'mp'+match.group(1)
0140 else:
0141 print("Current location does not seem to be a MillePede campaign directory:", end=' ')
0142 print(currentDir)
0143 sys.exit(1)
0144
0145
0146 def _fill_general_options(self):
0147 """Create and fill `general_options` dictionary."""
0148
0149 print("="*75)
0150 self._fetch_dataset_directory()
0151 self._fetch_external_datasets()
0152 self._fetch_essentials()
0153 self._fetch_defaults()
0154
0155
0156 def _fetch_external_datasets(self):
0157 """Fetch information about external datasets."""
0158
0159 if self._config.has_option("general", "externalDatasets"):
0160 datasets = map(lambda x: x.strip(),
0161 self._config.get("general",
0162 "externalDatasets").split(","))
0163 datasets = [x for x in datasets if len(x.strip()) > 0]
0164 for item in datasets:
0165 splitted = item.split("|")
0166 dataset = splitted[0].strip()
0167 dataset = os.path.expandvars(dataset)
0168
0169 weight = splitted[1] if len(splitted) > 1 else None
0170 config = ConfigParser.ConfigParser()
0171 config.optionxform = str
0172 config.read(dataset)
0173 config.config_path = dataset
0174 self._external_datasets[dataset] = {"config": config,
0175 "weight": weight}
0176
0177
0178
0179 def _create_mass_storage_directory(self):
0180 """
0181 Create MPS mass storage directory where, e.g., mille binaries are
0182 stored.
0183 """
0184
0185
0186 self._mss_dir = self._general_options.get("massStorageDir",
0187 "/eos/cms/store/group/alca_millepede/")
0188 self._mss_dir = os.path.join(self._mss_dir, "MPproductionFiles",
0189 self._mps_dir_name)
0190
0191 cmd = ["mkdir", "-p", self._mss_dir]
0192
0193
0194 if not self._general_options.get("testMode", False):
0195 try:
0196 with open(os.devnull, "w") as dump:
0197 subprocess.check_call(cmd, stdout = dump, stderr = dump)
0198 except subprocess.CalledProcessError:
0199 print("Failed to create mass storage directory:", self._mss_dir)
0200 sys.exit(1)
0201
0202
0203 def _create_weight_configs(self):
0204 """Extract different weight configurations from `self._config`."""
0205
0206 weights_list = [[(name, weight) for weight in self._weight_dict[name]]
0207 for name in self._weight_dict]
0208
0209 common_weights_list = [[(name, weight)
0210 for weight in self._common_weights[name]]
0211 for name in self._common_weights]
0212
0213 common_weights_dicts = []
0214 for item in itertools.product(*common_weights_list):
0215 d = {}
0216 for name,weight in item:
0217 d[name] = weight
0218 common_weights_dicts.append(d)
0219
0220 weight_configs = []
0221 for weight_conf in itertools.product(*weights_list):
0222 number_of_configs = len(weight_configs)
0223 for common_weight in common_weights_dicts:
0224 replaced_config \
0225 = tuple([(dataset[0],
0226 reduce(lambda x,y: mps_tools.replace_factors(x, y, common_weight[y]),
0227 common_weight, dataset[1]))
0228 for dataset in weight_conf])
0229 if replaced_config not in weight_configs:
0230 weight_configs.append(replaced_config)
0231
0232
0233 if len(weight_configs) == number_of_configs:
0234 weight_configs.append(weight_conf)
0235
0236 for weight_config in weight_configs:
0237 resolved_weight_config \
0238 = [(dataset[0], mps_tools.compute_product_string(dataset[1]))
0239 for dataset in weight_config]
0240 self._weight_configs.append(resolved_weight_config)
0241
0242
0243 def _fetch_pede_settings(self):
0244 """Fetch 'pedesettings' from general section in `self._config`."""
0245
0246 self._pede_settings \
0247 = ([x.strip()
0248 for x in self._config.get("general", "pedesettings").split(",")]
0249 if self._config.has_option("general", "pedesettings") else [None])
0250
0251
0252 def _create_mille_jobs(self):
0253 """Create the mille jobs based on the [dataset:<name>] sections."""
0254
0255 gt_regex = re.compile('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']')
0256 sg_regex = re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M)
0257 collection_regex = re.compile('setupCollection\s*\=\s*[\"\'](.*?)[\"\']')
0258 czt_regex = re.compile('setupCosmicsZeroTesla\s*\=\s*.*$', re.M)
0259 cdm_regex = re.compile('setupCosmicsDecoMode\s*\=\s*.*$', re.M)
0260 pw_regex = re.compile('setupPrimaryWidth\s*\=\s*.*$', re.M)
0261 json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)
0262
0263 first_dataset = True
0264 for name, dataset in self._datasets.items():
0265 print("="*75)
0266
0267 try:
0268 with open(dataset["configTemplate"],"r") as f:
0269 tmpFile = f.read()
0270 except IOError:
0271 print("The config-template called", end=' ')
0272 print(dataset["configTemplate"], "cannot be found.")
0273 sys.exit(1)
0274
0275 tmpFile = re.sub(gt_regex,
0276 'setupGlobaltag = \"'+dataset["globaltag"]+'\"',
0277 tmpFile)
0278 tmpFile = re.sub(sg_regex,
0279 "setupRunStartGeometry = "+
0280 self._general_options["FirstRunForStartGeometry"], tmpFile)
0281 tmpFile = re.sub(collection_regex,
0282 'setupCollection = \"'+dataset["collection"]+'\"',
0283 tmpFile)
0284 if "ALCARECOTkAlCosmics" in dataset["collection"]:
0285 if dataset['cosmicsZeroTesla']:
0286 tmpFile = re.sub(czt_regex,
0287 'setupCosmicsZeroTesla = True',
0288 tmpFile)
0289 else :
0290 tmpFile = re.sub(czt_regex,
0291 'setupCosmicsZeroTesla = False',
0292 tmpFile)
0293
0294 if dataset['cosmicsDecoMode']:
0295 tmpFile = re.sub(cdm_regex,
0296 'setupCosmicsDecoMode = True',
0297 tmpFile)
0298 else:
0299 tmpFile = re.sub(cdm_regex,
0300 'setupCosmicsDecoMode = False',
0301 tmpFile)
0302
0303 if dataset['primaryWidth'] > 0.0:
0304 tmpFile = re.sub(pw_regex,
0305 'setupPrimaryWidth = '+str(dataset["primaryWidth"]),
0306 tmpFile)
0307 if dataset['json'] != '':
0308 tmpFile = re.sub(json_regex,
0309 'setupJson = \"'+dataset["json"]+'\"',
0310 tmpFile)
0311
0312 thisCfgTemplate = "tmp.py"
0313 with open(thisCfgTemplate, "w") as f:
0314 f.write(tmpFile)
0315
0316
0317
0318 append = "-a"
0319 if first_dataset:
0320 append = ""
0321 first_dataset = False
0322 self._config_template = tmpFile
0323 self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
0324 self._create_input_db()
0325
0326 with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)
0327
0328
0329
0330 command = ["mps_setup.py",
0331 "-m",
0332 append,
0333 "-M", self._general_options["pedeMem"],
0334 "-N", name,
0335 self._mille_script,
0336 thisCfgTemplate,
0337 dataset["inputFileList"],
0338 str(dataset["njobs"]),
0339 self._general_options["classInf"],
0340 self._general_options["jobname"],
0341 self._pede_script,
0342 "cmscafuser:"+self._mss_dir]
0343 if dataset["numberOfEvents"] > 0:
0344 command.extend(["--max-events", str(dataset["numberOfEvents"])])
0345 command = [x for x in command if len(x.strip()) > 0]
0346
0347
0348 print("Creating jobs for dataset:", name)
0349 print("-"*75)
0350 print("Baseconfig: ", dataset["configTemplate"])
0351 print("Collection: ", dataset["collection"])
0352 if "ALCARECOTkAlCosmics" in dataset["collection"]:
0353 print("cosmicsDecoMode: ", dataset["cosmicsDecoMode"])
0354 print("cosmicsZeroTesla: ", dataset["cosmicsZeroTesla"])
0355 print("Globaltag: ", dataset["globaltag"])
0356 print("Number of jobs: ", dataset["njobs"])
0357 print("Inputfilelist: ", dataset["inputFileList"])
0358 if dataset["json"] != "":
0359 print("Jsonfile: ", dataset["json"])
0360 if self._args.verbose:
0361 print("Pass to mps_setup: ", " ".join(command))
0362
0363
0364 self._handle_process_call(command, self._args.verbose)
0365
0366
0367 self._handle_process_call(["rm", thisCfgTemplate])
0368
0369
0370 def _create_pede_jobs(self):
0371 """Create pede jobs from the given input."""
0372
0373 for setting in self._pede_settings:
0374 print()
0375 print("="*75)
0376 if setting is None:
0377 print("Creating pede job{}.".format(
0378 "s" if len(self._pede_settings)*len(self._weight_configs) > 1 else ""))
0379 print("-"*75)
0380 else:
0381 print("Creating pede jobs using settings from '{0}'.".format(setting))
0382 for weight_conf in self._weight_configs:
0383
0384 self._handle_process_call(["mps_weight.pl", "-c"])
0385
0386 thisCfgTemplate = "tmp.py"
0387 with open(thisCfgTemplate, "w") as f: f.write(self._config_template)
0388 if self._override_gt is None:
0389 self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
0390 self._create_input_db()
0391 with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)
0392
0393 for name,weight in weight_conf:
0394 self._handle_process_call(["mps_weight.pl", "-N", name, weight], True)
0395
0396 if not self._first_pede_config:
0397
0398 self._handle_process_call(["mps_setupm.pl"], self._args.verbose)
0399
0400
0401 lib = mpslib.jobdatabase()
0402 lib.read_db()
0403
0404
0405 jobm_path = os.path.join("jobData", lib.JOBDIR[-1])
0406
0407
0408 command = ["rm", "-f", os.path.join(jobm_path, "alignment_merge.py")]
0409 self._handle_process_call(command, self._args.verbose)
0410
0411
0412 command = [
0413 "mps_merge.py",
0414 "-w", thisCfgTemplate,
0415 os.path.join(jobm_path, "alignment_merge.py"),
0416 jobm_path,
0417 str(lib.nJobs),
0418 ]
0419 if setting is not None: command.extend(["-a", setting])
0420 print("-"*75)
0421 print(" ".join(command))
0422 self._handle_process_call(command, self._args.verbose)
0423 self._create_tracker_tree()
0424 if self._first_pede_config:
0425 os.symlink(self._tracker_tree_path,
0426 os.path.abspath(os.path.join(jobm_path,
0427 ".TrackerTree.root")))
0428 self._first_pede_config = False
0429
0430
0431 with open(os.path.join(jobm_path, ".weights.pkl"), "wb") as f:
0432 cPickle.dump(weight_conf, f, 2)
0433 print("="*75)
0434
0435
0436 self._handle_process_call(["rm", thisCfgTemplate])
0437
0438
0439 def _create_additional_pede_jobs(self):
0440 """
0441 Create pede jobs in addition to already existing ones. Return GT
0442 override snippet.
0443 """
0444
0445
0446 if not os.path.isdir("jobData"):
0447 print("No jobData-folder found.", end=' ')
0448 print("Properly set up the alignment before using the -w option.")
0449 sys.exit(1)
0450 if not os.path.exists("mps.db"):
0451 print("No mps.db found.", end=' ')
0452 print("Properly set up the alignment before using the -w option.")
0453 sys.exit(1)
0454
0455 firstDataset = next(iter(self._datasets.values()))
0456 config_template = firstDataset["configTemplate"]
0457 collection = firstDataset["collection"]
0458
0459 try:
0460 with open(config_template,"r") as f:
0461 tmpFile = f.read()
0462 except IOError:
0463 print("The config-template '"+config_template+"' cannot be found.")
0464 sys.exit(1)
0465
0466 tmpFile = re.sub('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']',
0467 'setupGlobaltag = \"'+self._global_tag+'\"',
0468 tmpFile)
0469 tmpFile = re.sub('setupCollection\s*\=\s*[\"\'](.*?)[\"\']',
0470 'setupCollection = \"'+collection+'\"',
0471 tmpFile)
0472 tmpFile = re.sub(re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M),
0473 "setupRunStartGeometry = "+self._first_run,
0474 tmpFile)
0475 self._config_template = tmpFile
0476
0477
0478 self._first_pede_config = False
0479 self._create_pede_jobs()
0480
0481
0482 def _handle_process_call(self, command, verbose = False):
0483 """
0484 Wrapper around subprocess calls which treats output depending on verbosity
0485 level.
0486
0487 Arguments:
0488 - `command`: list of command items
0489 - `verbose`: flag to turn on verbosity
0490 """
0491
0492 call_method = subprocess.check_call if verbose else subprocess.check_output
0493 try:
0494 call_method(command, stderr=subprocess.STDOUT)
0495 except subprocess.CalledProcessError as e:
0496 print("" if verbose else e.output)
0497 print("Failed to execute command:", " ".join(command))
0498 sys.exit(1)
0499
0500
0501 def _create_input_db(self):
0502 """
0503 Create sqlite file with single-IOV tags and use it to override the
0504 GT. If the GT is already customized by the user, the customization has
0505 higher priority. Creates a snippet to be appended to the configuration
0506 file.
0507 """
0508
0509 run_number = int(self._first_run)
0510 if not run_number > 0:
0511 print("'FirstRunForStartGeometry' must be positive, but is", run_number)
0512 sys.exit(1)
0513
0514 input_db_name = os.path.abspath("alignment_input.db")
0515 tags = mps_tools.create_single_iov_db(self._check_iov_definition(),
0516 run_number, input_db_name)
0517
0518 self._override_gt = ""
0519 for record,tag in tags.items():
0520 if self._override_gt == "":
0521 self._override_gt \
0522 += ("\nimport "
0523 "Alignment.MillePedeAlignmentAlgorithm.alignmentsetup."
0524 "SetCondition as tagwriter\n")
0525 self._override_gt += ("\ntagwriter.setCondition(process,\n"
0526 " connect = \""+tag["connect"]+"\",\n"
0527 " record = \""+record+"\",\n"
0528 " tag = \""+tag["tag"]+"\")\n")
0529
0530
0531 def _check_iov_definition(self):
0532 """
0533 Check consistency of input alignment payloads and IOV definition.
0534 Returns a dictionary with the information needed to override possibly
0535 problematic input taken from the global tag.
0536 """
0537
0538 print("Checking consistency of IOV definition...")
0539 iovs = mps_tools.make_unique_runranges(self._cms_process.AlignmentProducer)
0540
0541 inputs = {
0542 "TrackerAlignmentRcd": None,
0543 "TrackerSurfaceDeformationRcd": None,
0544 "TrackerAlignmentErrorExtendedRcd": None,
0545 }
0546
0547 for condition in self._cms_process.GlobalTag.toGet.value():
0548 if condition.record.value() in inputs:
0549 inputs[condition.record.value()] = {
0550 "tag": condition.tag.value(),
0551 "connect": ("pro"
0552 if not condition.hasParameter("connect")
0553 else condition.connect.value())
0554 }
0555
0556 inputs_from_gt = [record for record in inputs if inputs[record] is None]
0557 inputs.update(
0558 mps_tools.get_tags(self._cms_process.GlobalTag.globaltag.value(),
0559 inputs_from_gt))
0560
0561 if int(self._first_run) != iovs[0]:
0562 if iovs[0] == 1 and len(iovs) == 1:
0563 print("Single IOV output detected in configuration and", end=' ')
0564 print("'FirstRunForStartGeometry' is not 1.")
0565 print("Creating single IOV output from input conditions in run", end=' ')
0566 print(self._first_run+".")
0567 for inp in inputs: inputs[inp]["problematic"] = True
0568 else:
0569 print("Value of 'FirstRunForStartGeometry' has to match first", end=' ')
0570 print("defined output IOV:", end=' ')
0571 print(self._first_run, "!=", iovs[0])
0572 sys.exit(1)
0573
0574 for inp in inputs.values():
0575 inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])
0576
0577
0578 problematic_gt_inputs = {}
0579 input_indices = {key: len(value["iovs"]) -1
0580 for key,value in inputs.items()}
0581 for iov in reversed(iovs):
0582 for inp in inputs:
0583 if inputs[inp].pop("problematic", False):
0584 problematic_gt_inputs[inp] = inputs[inp]
0585 if inp in problematic_gt_inputs: continue
0586 if input_indices[inp] < 0:
0587 print("First output IOV boundary at run", iov, end=' ')
0588 print("is before the first input IOV boundary at", end=' ')
0589 print(inputs[inp]["iovs"][0], "for '"+inp+"'.")
0590 print("Please check your run range selection.")
0591 sys.exit(1)
0592 input_iov = inputs[inp]["iovs"][input_indices[inp]]
0593 if iov < input_iov:
0594 if inp in inputs_from_gt:
0595 problematic_gt_inputs[inp] = inputs[inp]
0596 print("Found problematic input taken from global tag.")
0597 print("Input IOV boundary at run",input_iov, end=' ')
0598 print("for '"+inp+"' is within output IOV starting", end=' ')
0599 print("with run", str(iov)+".")
0600 print("Deriving an alignment with coarse IOV", end=' ')
0601 print("granularity starting from finer granularity", end=' ')
0602 print("leads to wrong results.")
0603 print("A single IOV input using the IOV of", end=' ')
0604 print("'FirstRunForStartGeometry' ("+self._first_run+")", end=' ')
0605 print("is automatically created and used.")
0606 continue
0607 print("Found input IOV boundary at run",input_iov, end=' ')
0608 print("for '"+inp+"' which is within output IOV starting", end=' ')
0609 print("with run", str(iov)+".")
0610 print("Deriving an alignment with coarse IOV granularity", end=' ')
0611 print("starting from finer granularity leads to wrong", end=' ')
0612 print("results.")
0613 print("Please check your run range selection.")
0614 sys.exit(1)
0615 elif iov == input_iov:
0616 input_indices[inp] -= 1
0617
0618
0619 input_indices = {key: len(value["iovs"]) -1
0620 for key,value in inputs.items()
0621 if (key != "TrackerAlignmentRcd")
0622 and (inp not in problematic_gt_inputs)}
0623 for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]):
0624 for inp in input_indices:
0625 input_iov = inputs[inp]["iovs"][input_indices[inp]]
0626 if iov < input_iov:
0627 print("Found input IOV boundary at run",input_iov, end=' ')
0628 print("for '"+inp+"' which is within 'TrackerAlignmentRcd'", end=' ')
0629 print("IOV starting with run", str(iov)+".")
0630 print("Deriving an alignment with inconsistent IOV boundaries", end=' ')
0631 print("leads to wrong results.")
0632 print("Please check your input IOVs.")
0633 sys.exit(1)
0634 elif iov == input_iov:
0635 input_indices[inp] -= 1
0636
0637 print(" -> IOV consistency check successful.")
0638 print("="*75)
0639
0640 return problematic_gt_inputs
0641
0642
0643 def _create_tracker_tree(self):
0644 """Method to create hidden 'TrackerTree.root'."""
0645
0646 if self._global_tag is None or self._first_run is None:
0647 print("Trying to create the tracker tree before setting the global", end=' ')
0648 print("tag or the run to determine the geometry IOV.")
0649 sys.exit(1)
0650
0651 config = mpsv_iniparser.ConfigData()
0652 config.jobDataPath = "."
0653 config.globalTag = self._global_tag
0654 config.firstRun = self._first_run
0655 self._tracker_tree_path = mpsv_trackerTree.check(config)
0656
0657
0658 def _fetch_essentials(self):
0659 """Fetch general options from config file."""
0660
0661 for var in ("classInf","pedeMem","jobname", "FirstRunForStartGeometry"):
0662 try:
0663 self._general_options[var] = self._config.get('general',var)
0664 except ConfigParser.NoOptionError:
0665 print("No", var, "found in [general] section.", end=' ')
0666 print("Please check ini-file.")
0667 sys.exit(1)
0668 self._first_run = self._general_options["FirstRunForStartGeometry"]
0669
0670
0671 def _fetch_defaults(self):
0672 """Fetch default general options from config file."""
0673
0674 for var in ("globaltag", "configTemplate", "json", "massStorageDir",
0675 "testMode"):
0676 try:
0677 self._general_options[var] = self._config.get("general", var)
0678 except ConfigParser.NoOptionError:
0679 if var == "testMode": continue
0680 print("No '" + var + "' given in [general] section.")
0681
0682 for dataset in self._external_datasets.values():
0683 dataset["general"] = {}
0684 for var in ("globaltag", "configTemplate", "json"):
0685 try:
0686 dataset["general"][var] = dataset["config"].get("general", var)
0687 except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
0688 pass
0689
0690
0691 def _fetch_dataset_directory(self):
0692 """
0693 Fetch 'datasetDir' variable from general section and add it to the
0694 'os.environ' dictionary.
0695 """
0696
0697 if self._config.has_option("general", "datasetdir"):
0698 dataset_directory = self._config.get("general", "datasetdir")
0699
0700 os.environ["datasetdir"] = dataset_directory
0701 self._general_options["datasetdir"] = dataset_directory
0702 else:
0703 print("No datasetdir given in [general] section.", end=' ')
0704 print("Be sure to give a full path in inputFileList.")
0705 self._general_options["datasetdir"] = ""
0706
0707
0708 def _fetch_datasets(self):
0709 """Fetch internal and external dataset configurations."""
0710
0711 all_configs = collections.OrderedDict()
0712 all_configs["main"] = {"config": self._config,
0713 "general": self._general_options,
0714 "weight": None}
0715 all_configs.update(self._external_datasets)
0716
0717 for config in all_configs.values():
0718 global_weight = "1" if config["weight"] is None else config["weight"]
0719 if global_weight+self._config.config_path in self._common_weights:
0720 global_weight = self._common_weights[global_weight+
0721 self._config.config_path]
0722 elif global_weight in self._common_weights:
0723 global_weight = self._common_weights[global_weight]
0724 else:
0725 global_weight = (global_weight,)
0726 common_weights = {}
0727 weight_dict = {}
0728 for section in config["config"].sections():
0729 cache_datasetdir = os.environ["datasetdir"]
0730 if "general" in section:
0731 if config["config"].has_option("general", "datasetdir"):
0732 os.environ["datasetdir"] = config["config"].get("general", "datasetdir")
0733 elif section == "weights":
0734 for option in config["config"].options(section):
0735 common_weights[option] \
0736 = [x.strip() for x in
0737 config["config"].get(section, option).split(",")]
0738 elif section.startswith("dataset:"):
0739 print("-"*75)
0740
0741 name = section[8:]
0742 if name in self._datasets:
0743 print("WARNING: Duplicate definition of dataset '{}'".format(name))
0744 print(" -> Using defintion in '{}':\n".format(config["config"].config_path))
0745 print(" [{}]".format(section))
0746 for k,v in config["config"].items(section):
0747 print(" ", k, "=", v)
0748 print()
0749 self._datasets[name] = {}
0750
0751
0752 if config["config"].has_option(section, "weight"):
0753 self._weight_dict[name] \
0754 = [x.strip() for x in
0755 config["config"].get(section, "weight").split(",")]
0756 else:
0757 self._weight_dict[name] = ["1.0"]
0758 self._weight_dict[name] = [global_w+"*"+w
0759 for w in self._weight_dict[name]
0760 for global_w in global_weight]
0761 weight_dict[name] = self._weight_dict[name]
0762
0763
0764 for var in ("inputFileList", "collection"):
0765 try:
0766 self._datasets[name][var] = config["config"].get(section, var)
0767 except ConfigParser.NoOptionError:
0768 print("No", var, "found in", section+". Please check ini-file.")
0769 sys.exit(1)
0770
0771
0772
0773 for var in ("configTemplate", "globaltag"):
0774 try:
0775 self._datasets[name][var] = config["config"].get(section, var)
0776 except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
0777 try:
0778 self._datasets[name][var] = config["general"][var]
0779 except KeyError:
0780 try:
0781 self._datasets[name][var] \
0782 = all_configs["main"]["general"][var]
0783 except KeyError:
0784 print("No",var,"found in ["+section+"]", end=' ')
0785 print("and no default in [general] section.")
0786 sys.exit(1)
0787
0788
0789 if "ALCARECOTkAlCosmics" in self._datasets[name]["collection"]:
0790 try:
0791 self._datasets[name]["cosmicsZeroTesla"] \
0792 = config["config"].getboolean(section,"cosmicsZeroTesla")
0793 except ConfigParser.NoOptionError:
0794 print("No option cosmicsZeroTesla found in", section,"even though it is required for dataset type", self._datasets[name]["collection"], ". Please check ini-file.")
0795 sys.exit(1)
0796 try:
0797 self._datasets[name]["cosmicsDecoMode"] \
0798 = config["config"].getboolean(section,"cosmicsDecoMode")
0799 except ConfigParser.NoOptionError:
0800 print("No option cosmicsDecoMode found in", section,"even though it is required for dataset type", self._datasets[name]["collection"], ".Please check ini-file.")
0801 sys.exit(1)
0802
0803 self._datasets[name]["primaryWidth"] = -1.0
0804 if config["config"].has_option(section,"primaryWidth"):
0805 self._datasets[name]["primaryWidth"] \
0806 = config["config"].getfloat(section,"primaryWidth")
0807
0808 self._datasets[name]["numberOfEvents"] = -1
0809 if config["config"].has_option(section, "numberOfEvents"):
0810 self._datasets[name]["numberOfEvents"] \
0811 = config["config"].getint(section, "numberOfEvents")
0812
0813 self._datasets[name]["json"] = ""
0814 try:
0815 self._datasets[name]["json"] = config["config"].get(section,"json")
0816 except ConfigParser.NoOptionError:
0817 try:
0818 self._datasets[name]["json"] = config["general"]["json"]
0819 except KeyError:
0820 try:
0821 self._datasets[name]["json"] \
0822 = all_configs["main"]["general"]["json"]
0823 except KeyError:
0824 print("No json given in either [general] or", end=' ')
0825 print("["+section+"] sections.")
0826 print(" -> Proceeding without json-file.")
0827
0828
0829
0830 for var in ("inputFileList", "json", "configTemplate"):
0831 self._datasets[name][var] \
0832 = os.path.expandvars(self._datasets[name][var])
0833
0834
0835
0836 self._datasets[name]["njobs"] = 0
0837 try:
0838 with open(self._datasets[name]["inputFileList"], "r") as filelist:
0839 for line in filelist:
0840 if "CastorPool" in line:
0841 continue
0842
0843 if not line.strip()=="":
0844 self._datasets[name]["njobs"] += 1
0845 except IOError:
0846 print("Inputfilelist", self._datasets[name]["inputFileList"], end=' ')
0847 print("does not exist.")
0848 sys.exit(1)
0849 if self._datasets[name]["njobs"] == 0:
0850 print("Number of jobs is 0. There may be a problem with the inputfilelist:")
0851 print(self._datasets[name]["inputFileList"])
0852 sys.exit(1)
0853
0854
0855 if config["config"].has_option(section, "njobs"):
0856 if config["config"].getint(section, "njobs") <= self._datasets[name]["njobs"]:
0857 self._datasets[name]["njobs"] = config["config"].getint(section, "njobs")
0858 else:
0859 print("'njobs' is bigger than the number of files for this", end=' ')
0860 print("dataset:", self._datasets[name]["njobs"])
0861 print("Using default.")
0862 else:
0863 print("No number of jobs specified. Using number of files in", end=' ')
0864 print("inputfilelist as the number of jobs.")
0865
0866
0867 for weight_name, weight_values in common_weights.items():
0868 for key, weight in weight_dict.items():
0869 if any([weight_name in w for w in weight]):
0870 self._common_weights[weight_name+config["config"].config_path] = weight_values
0871 self._weight_dict[key] = [mps_tools.replace_factors(w,
0872 weight_name,
0873 weight_name+config["config"].config_path)
0874 for w in weight]
0875 else:
0876 self._common_weights[weight_name] = weight_values
0877 self._weight_dict[key] = weight
0878
0879 os.environ["datasetdir"] = cache_datasetdir
0880
0881 if len(self._datasets) == 0:
0882 print("No dataset section defined in '{0}'".format(
0883 ", ".join([self._args.aligmentConfig]+self._external_datasets.keys())))
0884 print("At least one section '[dataset:<name>]' is required.")
0885 sys.exit(1)
0886
0887 self._global_tag = self._datasets[name]["globaltag"]
0888
0889
0890
0891 if __name__ == "__main__":
0892 try:
0893 main()
0894 except KeyboardInterrupt:
0895 pass