File indexing completed on 2024-11-25 02:29:03
0001
0002
0003 import os
0004 import re
0005 import sys
0006 if sys.version_info[0]>2:
0007 import _pickle as cPickle
0008 else:
0009 import cPickle
0010 import argparse
0011 import itertools
0012 import subprocess
0013 import collections
0014 import configparser as ConfigParser
0015 import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools
0016 import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib
0017 import Alignment.MillePedeAlignmentAlgorithm.mpsvalidate.iniparser as mpsv_iniparser
0018 import Alignment.MillePedeAlignmentAlgorithm.mpsvalidate.trackerTree as mpsv_trackerTree
0019 from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS
0020 from functools import reduce
0021
0022
0023
0024 def main(argv = None):
0025 """Main routine. Not called, if this module is loaded via `import`.
0026
0027 Arguments:
0028 - `argv`: Command line arguments passed to the script.
0029 """
0030
0031 if argv == None:
0032 argv = sys.argv[1:]
0033
0034 setup_alignment = SetupAlignment(argv)
0035 setup_alignment.setup()
0036
0037
0038
0039 class SetupAlignment(object):
0040 """Class encapsulating the alignment campaign setup procedure."""
0041
0042 def __init__(self, argv):
0043 """Constructor
0044
0045 Arguments:
0046 - `argv`: command line arguments
0047 """
0048
0049 self._argv = argv
0050 self._args = None
0051 self._config = None
0052 self._mss_dir = None
0053 self._datasets = collections.OrderedDict()
0054 self._first_run = None
0055 self._cms_process = None
0056 self._override_gt = None
0057 self._pede_script = None
0058 self._weight_dict = collections.OrderedDict()
0059 self._mille_script = None
0060 self._mps_dir_name = None
0061 self._common_weights = {}
0062 self._weight_configs = []
0063 self._general_options = {}
0064 self._external_datasets = collections.OrderedDict()
0065 self._first_pede_config = True
0066
0067 self._create_config()
0068 self._fill_general_options()
0069 self._fetch_datasets()
0070 self._construct_paths()
0071 self._create_mass_storage_directory()
0072 self._fetch_pede_settings()
0073 self._create_weight_configs()
0074
0075
0076 def setup(self):
0077 """Setup the alignment campaign."""
0078
0079 if self._args.weight:
0080 self._create_additional_pede_jobs()
0081 else:
0082 self._create_mille_jobs()
0083 self._create_pede_jobs()
0084
0085 if self._override_gt.strip() != "":
0086 msg = ("Overriding global tag with single-IOV tags extracted from "
0087 "'{}' for run number '{}'.".format(self._global_tag,
0088 self._first_run))
0089 print(msg)
0090 print("-"*75)
0091 print(self._override_gt)
0092 print("="*75)
0093
0094
0095 def _create_config(self):
0096 """Create ConfigParser object from command line arguments."""
0097
0098 helpEpilog ="""Builds the config-templates from a universal
0099 config-template for each dataset specified in .ini-file that is passed
0100 to this script. Then calls mps_setup.py for all datasets."""
0101 parser = argparse.ArgumentParser(
0102 description = ("Setup the alignment as configured in the "
0103 "alignment_config file."),
0104 epilog = helpEpilog)
0105 parser.add_argument("-v", "--verbose", action="store_true",
0106 help="display detailed output of mps_setup")
0107 parser.add_argument("-w", "--weight", action="store_true",
0108 help=("creates additional merge job(s) with "
0109 "(possibly new) weights from .ini-config"))
0110 parser.add_argument("alignmentConfig",
0111 help=("name of the .ini config file that specifies "
0112 "the datasets to be used"))
0113
0114 self._args = parser.parse_args(self._argv)
0115 self._config = ConfigParser.ConfigParser()
0116 self._config.optionxform = str
0117
0118 self._config.read(self._args.alignmentConfig)
0119 self._config.config_path = self._args.alignmentConfig
0120
0121
0122 def _construct_paths(self):
0123 """Determine directory paths and create the ones that are needed."""
0124
0125 mpsTemplates = os.path.join("src", "Alignment",
0126 "MillePedeAlignmentAlgorithm", "templates")
0127 if checked_out_MPS()[0]:
0128 mpsTemplates = os.path.join(os.environ["CMSSW_BASE"], mpsTemplates)
0129 else:
0130 mpsTemplates = os.path.join(os.environ["CMSSW_RELEASE_BASE"], mpsTemplates)
0131 self._mille_script = os.path.join(mpsTemplates, "mps_runMille_template.sh")
0132 self._pede_script = os.path.join(mpsTemplates, "mps_runPede_rfcp_template.sh")
0133
0134
0135 currentDir = os.getcwd()
0136 match = re.search(re.compile('mpproduction\/mp(.+?)$', re.M|re.I),currentDir)
0137 if match:
0138 self._mps_dir_name = 'mp'+match.group(1)
0139 else:
0140 print("Current location does not seem to be a MillePede campaign directory:", end=' ')
0141 print(currentDir)
0142 sys.exit(1)
0143
0144
0145 def _fill_general_options(self):
0146 """Create and fill `general_options` dictionary."""
0147
0148 print("="*75)
0149 self._fetch_dataset_directory()
0150 self._fetch_external_datasets()
0151 self._fetch_essentials()
0152 self._fetch_defaults()
0153
0154
0155 def _fetch_external_datasets(self):
0156 """Fetch information about external datasets."""
0157
0158 if self._config.has_option("general", "externalDatasets"):
0159 datasets = map(lambda x: x.strip(),
0160 self._config.get("general",
0161 "externalDatasets").split(","))
0162 datasets = [x for x in datasets if len(x.strip()) > 0]
0163 for item in datasets:
0164 splitted = item.split("|")
0165 dataset = splitted[0].strip()
0166 dataset = os.path.expandvars(dataset)
0167
0168 weight = splitted[1] if len(splitted) > 1 else None
0169 config = ConfigParser.ConfigParser()
0170 config.optionxform = str
0171 config.read(dataset)
0172 config.config_path = dataset
0173 self._external_datasets[dataset] = {"config": config,
0174 "weight": weight}
0175
0176
0177
0178 def _create_mass_storage_directory(self):
0179 """
0180 Create MPS mass storage directory where, e.g., mille binaries are
0181 stored.
0182 """
0183
0184
0185 self._mss_dir = self._general_options.get("massStorageDir",
0186 "/eos/cms/store/group/alca_millepede/")
0187 self._mss_dir = os.path.join(self._mss_dir, "MPproductionFiles",
0188 self._mps_dir_name)
0189
0190 cmd = ["mkdir", "-p", self._mss_dir]
0191
0192
0193 if not self._general_options.get("testMode", False):
0194 try:
0195 with open(os.devnull, "w") as dump:
0196 subprocess.check_call(cmd, stdout = dump, stderr = dump)
0197 except subprocess.CalledProcessError:
0198 print("Failed to create mass storage directory:", self._mss_dir)
0199 sys.exit(1)
0200
0201
0202 def _create_weight_configs(self):
0203 """Extract different weight configurations from `self._config`."""
0204
0205 weights_list = [[(name, weight) for weight in self._weight_dict[name]]
0206 for name in self._weight_dict]
0207
0208 common_weights_list = [[(name, weight)
0209 for weight in self._common_weights[name]]
0210 for name in self._common_weights]
0211
0212 common_weights_dicts = []
0213 for item in itertools.product(*common_weights_list):
0214 d = {}
0215 for name,weight in item:
0216 d[name] = weight
0217 common_weights_dicts.append(d)
0218
0219 weight_configs = []
0220 for weight_conf in itertools.product(*weights_list):
0221 number_of_configs = len(weight_configs)
0222 for common_weight in common_weights_dicts:
0223 replaced_config \
0224 = tuple([(dataset[0],
0225 reduce(lambda x,y: mps_tools.replace_factors(x, y, common_weight[y]),
0226 common_weight, dataset[1]))
0227 for dataset in weight_conf])
0228 if replaced_config not in weight_configs:
0229 weight_configs.append(replaced_config)
0230
0231
0232 if len(weight_configs) == number_of_configs:
0233 weight_configs.append(weight_conf)
0234
0235 for weight_config in weight_configs:
0236 resolved_weight_config \
0237 = [(dataset[0], mps_tools.compute_product_string(dataset[1]))
0238 for dataset in weight_config]
0239 self._weight_configs.append(resolved_weight_config)
0240
0241
0242 def _fetch_pede_settings(self):
0243 """Fetch 'pedesettings' from general section in `self._config`."""
0244
0245 self._pede_settings \
0246 = ([x.strip()
0247 for x in self._config.get("general", "pedesettings").split(",")]
0248 if self._config.has_option("general", "pedesettings") else [None])
0249
0250
0251 def _create_mille_jobs(self):
0252 """Create the mille jobs based on the [dataset:<name>] sections."""
0253
0254 gt_regex = re.compile('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']')
0255 sg_regex = re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M)
0256 collection_regex = re.compile('setupCollection\s*\=\s*[\"\'](.*?)[\"\']')
0257 recogeom_regex = re.compile('setupRecoGeometry\s*\=\s*[\"\'](.*?)[\"\']')
0258 czt_regex = re.compile('setupCosmicsZeroTesla\s*\=\s*.*$', re.M)
0259 cdm_regex = re.compile('setupCosmicsDecoMode\s*\=\s*.*$', re.M)
0260 pw_regex = re.compile('setupPrimaryWidth\s*\=\s*.*$', re.M)
0261 json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)
0262
0263 first_dataset = True
0264 for name, dataset in self._datasets.items():
0265 print("="*75)
0266
0267 try:
0268 with open(dataset["configTemplate"],"r") as f:
0269 tmpFile = f.read()
0270 except IOError:
0271 print("The config-template called", end=' ')
0272 print(dataset["configTemplate"], "cannot be found.")
0273 sys.exit(1)
0274
0275 tmpFile = re.sub(gt_regex,
0276 'setupGlobaltag = \"'+dataset["globaltag"]+'\"',
0277 tmpFile)
0278 tmpFile = re.sub(recogeom_regex,
0279 'setupRecoGeometry = \"'+dataset["recogeometry"]+'\"',
0280 tmpFile)
0281 tmpFile = re.sub(sg_regex,
0282 "setupRunStartGeometry = "+
0283 self._general_options["FirstRunForStartGeometry"], tmpFile)
0284 tmpFile = re.sub(collection_regex,
0285 'setupCollection = \"'+dataset["collection"]+'\"',
0286 tmpFile)
0287 if "ALCARECOTkAlCosmics" in dataset["collection"]:
0288 if dataset['cosmicsZeroTesla']:
0289 tmpFile = re.sub(czt_regex,
0290 'setupCosmicsZeroTesla = True',
0291 tmpFile)
0292 else :
0293 tmpFile = re.sub(czt_regex,
0294 'setupCosmicsZeroTesla = False',
0295 tmpFile)
0296
0297 if dataset['cosmicsDecoMode']:
0298 tmpFile = re.sub(cdm_regex,
0299 'setupCosmicsDecoMode = True',
0300 tmpFile)
0301 else:
0302 tmpFile = re.sub(cdm_regex,
0303 'setupCosmicsDecoMode = False',
0304 tmpFile)
0305
0306 if dataset['primaryWidth'] > 0.0:
0307 tmpFile = re.sub(pw_regex,
0308 'setupPrimaryWidth = '+str(dataset["primaryWidth"]),
0309 tmpFile)
0310 if dataset['json'] != '':
0311 tmpFile = re.sub(json_regex,
0312 'setupJson = \"'+dataset["json"]+'\"',
0313 tmpFile)
0314
0315 thisCfgTemplate = "tmp.py"
0316 with open(thisCfgTemplate, "w") as f:
0317 f.write(tmpFile)
0318
0319
0320
0321 append = "-a"
0322 if first_dataset:
0323 append = ""
0324 first_dataset = False
0325 self._config_template = tmpFile
0326 self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
0327 self._create_input_db()
0328
0329 with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)
0330
0331
0332
0333 command = ["mps_setup.py",
0334 "-m",
0335 append,
0336 "-M", self._general_options["pedeMem"],
0337 "-N", name,
0338 self._mille_script,
0339 thisCfgTemplate,
0340 dataset["inputFileList"],
0341 str(dataset["njobs"]),
0342 self._general_options["classInf"],
0343 self._general_options["jobname"],
0344 self._pede_script,
0345 "cmscafuser:"+self._mss_dir]
0346 if dataset["numberOfEvents"] > 0:
0347 command.extend(["--max-events", str(dataset["numberOfEvents"])])
0348 command = [x for x in command if len(x.strip()) > 0]
0349
0350
0351 print("Creating jobs for dataset:", name)
0352 print("-"*75)
0353 print("Baseconfig: ", dataset["configTemplate"])
0354 print("Collection: ", dataset["collection"])
0355 if "ALCARECOTkAlCosmics" in dataset["collection"]:
0356 print("cosmicsDecoMode: ", dataset["cosmicsDecoMode"])
0357 print("cosmicsZeroTesla: ", dataset["cosmicsZeroTesla"])
0358 print("Globaltag: ", dataset["globaltag"])
0359 print("RecoGeometry: ", dataset["recogeometry"])
0360 print("Number of jobs: ", dataset["njobs"])
0361 print("Inputfilelist: ", dataset["inputFileList"])
0362 if dataset["json"] != "":
0363 print("Jsonfile: ", dataset["json"])
0364 if self._args.verbose:
0365 print("Pass to mps_setup: ", " ".join(command))
0366
0367
0368 self._handle_process_call(command, self._args.verbose)
0369
0370
0371 self._handle_process_call(["rm", thisCfgTemplate])
0372
0373
0374 def _create_pede_jobs(self):
0375 """Create pede jobs from the given input."""
0376
0377 for setting in self._pede_settings:
0378 print()
0379 print("="*75)
0380 if setting is None:
0381 print("Creating pede job{}.".format(
0382 "s" if len(self._pede_settings)*len(self._weight_configs) > 1 else ""))
0383 print("-"*75)
0384 else:
0385 print("Creating pede jobs using settings from '{0}'.".format(setting))
0386 for weight_conf in self._weight_configs:
0387
0388 self._handle_process_call(["mps_weight.pl", "-c"])
0389
0390 thisCfgTemplate = "tmp.py"
0391 with open(thisCfgTemplate, "w") as f: f.write(self._config_template)
0392 if self._override_gt is None:
0393 self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
0394 self._create_input_db()
0395 with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)
0396
0397 for name,weight in weight_conf:
0398 self._handle_process_call(["mps_weight.pl", "-N", name, weight], True)
0399
0400 if not self._first_pede_config:
0401
0402 self._handle_process_call(["mps_setupm.pl"], self._args.verbose)
0403
0404
0405 lib = mpslib.jobdatabase()
0406 lib.read_db()
0407
0408
0409 jobm_path = os.path.join("jobData", lib.JOBDIR[-1])
0410
0411
0412 command = ["rm", "-f", os.path.join(jobm_path, "alignment_merge.py")]
0413 self._handle_process_call(command, self._args.verbose)
0414
0415
0416 command = [
0417 "mps_merge.py",
0418 "-w", thisCfgTemplate,
0419 os.path.join(jobm_path, "alignment_merge.py"),
0420 jobm_path,
0421 str(lib.nJobs),
0422 ]
0423 if setting is not None: command.extend(["-a", setting])
0424 print("-"*75)
0425 print(" ".join(command))
0426 self._handle_process_call(command, self._args.verbose)
0427 self._create_tracker_tree()
0428 if self._first_pede_config:
0429 os.symlink(self._tracker_tree_path,
0430 os.path.abspath(os.path.join(jobm_path,
0431 ".TrackerTree.root")))
0432 self._first_pede_config = False
0433
0434
0435 with open(os.path.join(jobm_path, ".weights.pkl"), "wb") as f:
0436 cPickle.dump(weight_conf, f, 2)
0437 print("="*75)
0438
0439
0440 self._handle_process_call(["rm", thisCfgTemplate])
0441
0442
0443 def _create_additional_pede_jobs(self):
0444 """
0445 Create pede jobs in addition to already existing ones. Return GT
0446 override snippet.
0447 """
0448
0449
0450 if not os.path.isdir("jobData"):
0451 print("No jobData-folder found.", end=' ')
0452 print("Properly set up the alignment before using the -w option.")
0453 sys.exit(1)
0454 if not os.path.exists("mps.db"):
0455 print("No mps.db found.", end=' ')
0456 print("Properly set up the alignment before using the -w option.")
0457 sys.exit(1)
0458
0459 firstDataset = next(iter(self._datasets.values()))
0460 config_template = firstDataset["configTemplate"]
0461 collection = firstDataset["collection"]
0462
0463 try:
0464 with open(config_template,"r") as f:
0465 tmpFile = f.read()
0466 except IOError:
0467 print("The config-template '"+config_template+"' cannot be found.")
0468 sys.exit(1)
0469
0470 tmpFile = re.sub('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']',
0471 'setupGlobaltag = \"'+self._global_tag+'\"',
0472 tmpFile)
0473 tmpFile = re.sub('setupRecoGeometry\s*\=\s*[\"\'](.*?)[\"\']',
0474 'setupRecoGeometry = \"'+self._reco_geometry+'\"',
0475 tmpFile)
0476 tmpFile = re.sub('setupCollection\s*\=\s*[\"\'](.*?)[\"\']',
0477 'setupCollection = \"'+collection+'\"',
0478 tmpFile)
0479 tmpFile = re.sub(re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M),
0480 "setupRunStartGeometry = "+self._first_run,
0481 tmpFile)
0482 self._config_template = tmpFile
0483
0484
0485 self._first_pede_config = False
0486 self._create_pede_jobs()
0487
0488
0489 def _handle_process_call(self, command, verbose = False):
0490 """
0491 Wrapper around subprocess calls which treats output depending on verbosity
0492 level.
0493
0494 Arguments:
0495 - `command`: list of command items
0496 - `verbose`: flag to turn on verbosity
0497 """
0498
0499 call_method = subprocess.check_call if verbose else subprocess.check_output
0500 try:
0501 call_method(command, stderr=subprocess.STDOUT)
0502 except subprocess.CalledProcessError as e:
0503 print("" if verbose else e.output)
0504 print("Failed to execute command:", " ".join(command))
0505 sys.exit(1)
0506
0507
0508 def _create_input_db(self):
0509 """
0510 Create sqlite file with single-IOV tags and use it to override the
0511 GT. If the GT is already customized by the user, the customization has
0512 higher priority. Creates a snippet to be appended to the configuration
0513 file.
0514 """
0515
0516 run_number = int(self._first_run)
0517 if not run_number > 0:
0518 print("'FirstRunForStartGeometry' must be positive, but is", run_number)
0519 sys.exit(1)
0520
0521 input_db_name = os.path.abspath("alignment_input.db")
0522 tags = mps_tools.create_single_iov_db(self._check_iov_definition(),
0523 run_number, input_db_name)
0524
0525 self._override_gt = ""
0526 for record,tag in tags.items():
0527 if self._override_gt == "":
0528 self._override_gt \
0529 += ("\nimport "
0530 "Alignment.MillePedeAlignmentAlgorithm.alignmentsetup."
0531 "SetCondition as tagwriter\n")
0532 self._override_gt += ("\ntagwriter.setCondition(process,\n"
0533 " connect = \""+tag["connect"]+"\",\n"
0534 " record = \""+record+"\",\n"
0535 " tag = \""+tag["tag"]+"\")\n")
0536
0537
0538 def _check_iov_definition(self):
0539 """
0540 Check consistency of input alignment payloads and IOV definition.
0541 Returns a dictionary with the information needed to override possibly
0542 problematic input taken from the global tag.
0543 """
0544
0545 print("Checking consistency of IOV definition...")
0546 iovs = mps_tools.make_unique_runranges(self._cms_process.AlignmentProducer)
0547
0548 inputs = {
0549 "TrackerAlignmentRcd": None,
0550 "TrackerSurfaceDeformationRcd": None,
0551 "TrackerAlignmentErrorExtendedRcd": None,
0552 }
0553
0554 for condition in self._cms_process.GlobalTag.toGet.value():
0555 if condition.record.value() in inputs:
0556 inputs[condition.record.value()] = {
0557 "tag": condition.tag.value(),
0558 "connect": ("pro"
0559 if not condition.hasParameter("connect")
0560 else condition.connect.value())
0561 }
0562
0563 inputs_from_gt = [record for record in inputs if inputs[record] is None]
0564 inputs.update(
0565 mps_tools.get_tags(self._cms_process.GlobalTag.globaltag.value(),
0566 inputs_from_gt))
0567
0568 if int(self._first_run) != iovs[0]:
0569 if iovs[0] == 1 and len(iovs) == 1:
0570 print("Single IOV output detected in configuration and", end=' ')
0571 print("'FirstRunForStartGeometry' is not 1.")
0572 print("Creating single IOV output from input conditions in run", end=' ')
0573 print(self._first_run+".")
0574 for inp in inputs: inputs[inp]["problematic"] = True
0575 else:
0576 print("Value of 'FirstRunForStartGeometry' has to match first", end=' ')
0577 print("defined output IOV:", end=' ')
0578 print(self._first_run, "!=", iovs[0])
0579 sys.exit(1)
0580
0581 for inp in inputs.values():
0582 inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])
0583
0584
0585 problematic_gt_inputs = {}
0586 input_indices = {key: len(value["iovs"]) -1
0587 for key,value in inputs.items()}
0588 for iov in reversed(iovs):
0589 for inp in inputs:
0590 if inputs[inp].pop("problematic", False):
0591 problematic_gt_inputs[inp] = inputs[inp]
0592 if inp in problematic_gt_inputs: continue
0593 if input_indices[inp] < 0:
0594 print("First output IOV boundary at run", iov, end=' ')
0595 print("is before the first input IOV boundary at", end=' ')
0596 print(inputs[inp]["iovs"][0], "for '"+inp+"'.")
0597 print("Please check your run range selection.")
0598 sys.exit(1)
0599 input_iov = inputs[inp]["iovs"][input_indices[inp]]
0600 if iov < input_iov:
0601 if inp in inputs_from_gt:
0602 problematic_gt_inputs[inp] = inputs[inp]
0603 print("Found problematic input taken from global tag.")
0604 print("Input IOV boundary at run",input_iov, end=' ')
0605 print("for '"+inp+"' is within output IOV starting", end=' ')
0606 print("with run", str(iov)+".")
0607 print("Deriving an alignment with coarse IOV", end=' ')
0608 print("granularity starting from finer granularity", end=' ')
0609 print("leads to wrong results.")
0610 print("A single IOV input using the IOV of", end=' ')
0611 print("'FirstRunForStartGeometry' ("+self._first_run+")", end=' ')
0612 print("is automatically created and used.")
0613 continue
0614 print("Found input IOV boundary at run",input_iov, end=' ')
0615 print("for '"+inp+"' which is within output IOV starting", end=' ')
0616 print("with run", str(iov)+".")
0617 print("Deriving an alignment with coarse IOV granularity", end=' ')
0618 print("starting from finer granularity leads to wrong", end=' ')
0619 print("results.")
0620 print("Please check your run range selection.")
0621 sys.exit(1)
0622 elif iov == input_iov:
0623 input_indices[inp] -= 1
0624
0625
0626 input_indices = {key: len(value["iovs"]) -1
0627 for key,value in inputs.items()
0628 if (key != "TrackerAlignmentRcd")
0629 and (inp not in problematic_gt_inputs)}
0630 for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]):
0631 for inp in input_indices:
0632 input_iov = inputs[inp]["iovs"][input_indices[inp]]
0633 if iov < input_iov:
0634 print("Found input IOV boundary at run",input_iov, end=' ')
0635 print("for '"+inp+"' which is within 'TrackerAlignmentRcd'", end=' ')
0636 print("IOV starting with run", str(iov)+".")
0637 print("Deriving an alignment with inconsistent IOV boundaries", end=' ')
0638 print("leads to wrong results.")
0639 print("Please check your input IOVs.")
0640 sys.exit(1)
0641 elif iov == input_iov:
0642 input_indices[inp] -= 1
0643
0644 print(" -> IOV consistency check successful.")
0645 print("="*75)
0646
0647 return problematic_gt_inputs
0648
0649
0650 def _create_tracker_tree(self):
0651 """Method to create hidden 'TrackerTree.root'."""
0652
0653 if self._global_tag is None or self._first_run is None:
0654 print("Trying to create the tracker tree before setting the global", end=' ')
0655 print("tag or the run to determine the geometry IOV.")
0656 sys.exit(1)
0657
0658 config = mpsv_iniparser.ConfigData()
0659 config.jobDataPath = "."
0660 config.globalTag = self._global_tag
0661
0662 config.firstRun = self._first_run
0663 self._tracker_tree_path = mpsv_trackerTree.check(config)
0664
0665
0666 def _fetch_essentials(self):
0667 """Fetch general options from config file."""
0668
0669 for var in ("classInf","pedeMem","jobname", "FirstRunForStartGeometry"):
0670 try:
0671 self._general_options[var] = self._config.get('general',var)
0672 except ConfigParser.NoOptionError:
0673 print("No", var, "found in [general] section.", end=' ')
0674 print("Please check ini-file.")
0675 sys.exit(1)
0676 self._first_run = self._general_options["FirstRunForStartGeometry"]
0677
0678
0679 def _fetch_defaults(self):
0680 """Fetch default general options from config file."""
0681
0682 for var in ("globaltag", "recogeometry", "configTemplate", "json", "massStorageDir",
0683 "testMode"):
0684 try:
0685 self._general_options[var] = self._config.get("general", var)
0686 except ConfigParser.NoOptionError:
0687 if var == "testMode": continue
0688 print("No '" + var + "' given in [general] section.")
0689
0690 for dataset in self._external_datasets.values():
0691 dataset["general"] = {}
0692 for var in ("globaltag", "recogeometry", "configTemplate", "json"):
0693 try:
0694 dataset["general"][var] = dataset["config"].get("general", var)
0695 except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
0696 pass
0697
0698
0699 def _fetch_dataset_directory(self):
0700 """
0701 Fetch 'datasetDir' variable from general section and add it to the
0702 'os.environ' dictionary.
0703 """
0704
0705 if self._config.has_option("general", "datasetdir"):
0706 dataset_directory = self._config.get("general", "datasetdir")
0707
0708 os.environ["datasetdir"] = dataset_directory
0709 self._general_options["datasetdir"] = dataset_directory
0710 else:
0711 print("No datasetdir given in [general] section.", end=' ')
0712 print("Be sure to give a full path in inputFileList.")
0713 self._general_options["datasetdir"] = ""
0714
0715
0716 def _fetch_datasets(self):
0717 """Fetch internal and external dataset configurations."""
0718
0719 all_configs = collections.OrderedDict()
0720 all_configs["main"] = {"config": self._config,
0721 "general": self._general_options,
0722 "weight": None}
0723 all_configs.update(self._external_datasets)
0724
0725 for config in all_configs.values():
0726 global_weight = "1" if config["weight"] is None else config["weight"]
0727 if global_weight+self._config.config_path in self._common_weights:
0728 global_weight = self._common_weights[global_weight+
0729 self._config.config_path]
0730 elif global_weight in self._common_weights:
0731 global_weight = self._common_weights[global_weight]
0732 else:
0733 global_weight = (global_weight,)
0734 common_weights = {}
0735 weight_dict = {}
0736 for section in config["config"].sections():
0737 cache_datasetdir = os.environ["datasetdir"]
0738 if "general" in section:
0739 if config["config"].has_option("general", "datasetdir"):
0740 os.environ["datasetdir"] = config["config"].get("general", "datasetdir")
0741 elif section == "weights":
0742 for option in config["config"].options(section):
0743 common_weights[option] \
0744 = [x.strip() for x in
0745 config["config"].get(section, option).split(",")]
0746 elif section.startswith("dataset:"):
0747 print("-"*75)
0748
0749 name = section[8:]
0750 if name in self._datasets:
0751 print("WARNING: Duplicate definition of dataset '{}'".format(name))
0752 print(" -> Using defintion in '{}':\n".format(config["config"].config_path))
0753 print(" [{}]".format(section))
0754 for k,v in config["config"].items(section):
0755 print(" ", k, "=", v)
0756 print()
0757 self._datasets[name] = {}
0758
0759
0760 if config["config"].has_option(section, "weight"):
0761 self._weight_dict[name] \
0762 = [x.strip() for x in
0763 config["config"].get(section, "weight").split(",")]
0764 else:
0765 self._weight_dict[name] = ["1.0"]
0766 self._weight_dict[name] = [global_w+"*"+w
0767 for w in self._weight_dict[name]
0768 for global_w in global_weight]
0769 weight_dict[name] = self._weight_dict[name]
0770
0771
0772 for var in ("inputFileList", "collection"):
0773 try:
0774 self._datasets[name][var] = config["config"].get(section, var)
0775 except ConfigParser.NoOptionError:
0776 print("No", var, "found in", section+". Please check ini-file.")
0777 sys.exit(1)
0778
0779
0780
0781 for var in ("configTemplate", "globaltag"):
0782 try:
0783 self._datasets[name][var] = config["config"].get(section, var)
0784 except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
0785 try:
0786 self._datasets[name][var] = config["general"][var]
0787 except KeyError:
0788 try:
0789 self._datasets[name][var] \
0790 = all_configs["main"]["general"][var]
0791 except KeyError:
0792 print("No",var,"found in ["+section+"]", end=' ')
0793 print("and no default in [general] section.")
0794 sys.exit(1)
0795
0796
0797
0798 for var in ("configTemplate", "recogeometry"):
0799 try:
0800 self._datasets[name][var] = config["config"].get(section, var)
0801 except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
0802 try:
0803 self._datasets[name][var] = config["general"][var]
0804 except KeyError:
0805 try:
0806 self._datasets[name][var] \
0807 = all_configs["main"]["general"][var]
0808 except KeyError:
0809 print("No",var,"found in ["+section+"]", end=' ')
0810 print("and no default in [general] section.")
0811 sys.exit(1)
0812
0813
0814 if "ALCARECOTkAlCosmics" in self._datasets[name]["collection"]:
0815 try:
0816 self._datasets[name]["cosmicsZeroTesla"] \
0817 = config["config"].getboolean(section,"cosmicsZeroTesla")
0818 except ConfigParser.NoOptionError:
0819 print("No option cosmicsZeroTesla found in", section,"even though it is required for dataset type", self._datasets[name]["collection"], ". Please check ini-file.")
0820 sys.exit(1)
0821 try:
0822 self._datasets[name]["cosmicsDecoMode"] \
0823 = config["config"].getboolean(section,"cosmicsDecoMode")
0824 except ConfigParser.NoOptionError:
0825 print("No option cosmicsDecoMode found in", section,"even though it is required for dataset type", self._datasets[name]["collection"], ".Please check ini-file.")
0826 sys.exit(1)
0827
0828 self._datasets[name]["primaryWidth"] = -1.0
0829 if config["config"].has_option(section,"primaryWidth"):
0830 self._datasets[name]["primaryWidth"] \
0831 = config["config"].getfloat(section,"primaryWidth")
0832
0833 self._datasets[name]["numberOfEvents"] = -1
0834 if config["config"].has_option(section, "numberOfEvents"):
0835 self._datasets[name]["numberOfEvents"] \
0836 = config["config"].getint(section, "numberOfEvents")
0837
0838 self._datasets[name]["json"] = ""
0839 try:
0840 self._datasets[name]["json"] = config["config"].get(section,"json")
0841 except ConfigParser.NoOptionError:
0842 try:
0843 self._datasets[name]["json"] = config["general"]["json"]
0844 except KeyError:
0845 try:
0846 self._datasets[name]["json"] \
0847 = all_configs["main"]["general"]["json"]
0848 except KeyError:
0849 print("No json given in either [general] or", end=' ')
0850 print("["+section+"] sections.")
0851 print(" -> Proceeding without json-file.")
0852
0853
0854
0855 for var in ("inputFileList", "json", "configTemplate"):
0856 self._datasets[name][var] \
0857 = os.path.expandvars(self._datasets[name][var])
0858
0859
0860
0861 self._datasets[name]["njobs"] = 0
0862 try:
0863 with open(self._datasets[name]["inputFileList"], "r") as filelist:
0864 for line in filelist:
0865 if "CastorPool" in line:
0866 continue
0867
0868 if not line.strip()=="":
0869 self._datasets[name]["njobs"] += 1
0870 except IOError:
0871 print("Inputfilelist", self._datasets[name]["inputFileList"], end=' ')
0872 print("does not exist.")
0873 sys.exit(1)
0874 if self._datasets[name]["njobs"] == 0:
0875 print("Number of jobs is 0. There may be a problem with the inputfilelist:")
0876 print(self._datasets[name]["inputFileList"])
0877 sys.exit(1)
0878
0879
0880 if config["config"].has_option(section, "njobs"):
0881 if config["config"].getint(section, "njobs") <= self._datasets[name]["njobs"]:
0882 self._datasets[name]["njobs"] = config["config"].getint(section, "njobs")
0883 else:
0884 print("'njobs' is bigger than the number of files for this", end=' ')
0885 print("dataset:", self._datasets[name]["njobs"])
0886 print("Using default.")
0887 else:
0888 print("No number of jobs specified. Using number of files in", end=' ')
0889 print("inputfilelist as the number of jobs.")
0890
0891
0892 for weight_name, weight_values in common_weights.items():
0893 for key, weight in weight_dict.items():
0894 if any([weight_name in w for w in weight]):
0895 self._common_weights[weight_name+config["config"].config_path] = weight_values
0896 self._weight_dict[key] = [mps_tools.replace_factors(w,
0897 weight_name,
0898 weight_name+config["config"].config_path)
0899 for w in weight]
0900 else:
0901 self._common_weights[weight_name] = weight_values
0902 self._weight_dict[key] = weight
0903
0904 os.environ["datasetdir"] = cache_datasetdir
0905
0906 if len(self._datasets) == 0:
0907 print("No dataset section defined in '{0}'".format(
0908 ", ".join([self._args.aligmentConfig]+self._external_datasets.keys())))
0909 print("At least one section '[dataset:<name>]' is required.")
0910 sys.exit(1)
0911
0912 self._global_tag = self._datasets[name]["globaltag"]
0913 self._reco_geometry = self._datasets[name]["recogeometry"]
0914
0915
0916 if __name__ == "__main__":
0917 try:
0918 main()
0919 except KeyboardInterrupt:
0920 pass