File indexing completed on 2023-03-17 11:28:58
0001
0002
0003 from __future__ import print_function
0004 from builtins import range
0005 import ROOT
0006 from array import array
0007 from collections import OrderedDict
0008 from copy import copy
0009 import pickle
0010 from math import sqrt, copysign, sin, cos, pi
0011
0012 from Validation.RecoTrack.plotting.ntuple import *
0013
0014
0015
0016 ''' These are different class labels, which are mainly used for plotting histograms. '''
0017
0018
0019 classes = {-1: "UNCLASSIFIED",
0020 0: "UNMATCHED",
0021 21: "MERGED DECAY TRACK MATCHES",
0022 22: "MULTIPLE ALL RECONSTRUCTED MATCHES",
0023 23: "MULTIPLE PARTIALLY RECONSTRUCTED MATCHES",
0024 20: "MULTIPLE UNRECONSTRUCTED MATCHES",
0025 11: "RECONSTRUCTED AND DECAYED MATCH",
0026 12: "RECONSTRUCTED MATCH",
0027 13: "DECAYED MATCH",
0028 10: "MATCH"}
0029
0030
0031 end_class_names = {0: "Fake and particle tracks differ",
0032 1: "Tracking ended correctly",
0033 2: "Fake ended before particle",
0034 3: "Particle ended before fake",
0035 4: "Particle ended within two hits in same layer"}
0036
0037
0038 layer_names_t = ((1, "BPix1"),
0039 (2, "BPix2"),
0040 (3, "BPix3"),
0041 (4, "FPix1"),
0042 (5, "FPix2"),
0043 (11, "TIB1"),
0044 (12, "TIB2"),
0045 (13, "TIB3"),
0046 (14, "TIB4"),
0047 (21, "TID1"),
0048 (22, "TID2"),
0049 (23, "TID3"),
0050 (31, "TOB1"),
0051 (32, "TOB2"),
0052 (33, "TOB3"),
0053 (34, "TOB4"),
0054 (35, "TOB5"),
0055 (36, "TOB6"),
0056 (41, "TEC1"),
0057 (42, "TEC2"),
0058 (43, "TEC3"),
0059 (44, "TEC4"),
0060 (45, "TEC5"),
0061 (46, "TEC6"),
0062 (47, "TEC7"),
0063 (48, "TEC8"),
0064 (49, "TEC9"))
0065 layer_names = OrderedDict(layer_names_t)
0066
0067 layer_names_rev_t = (("BPix1", 1),
0068 ("BPix2", 2),
0069 ("BPix3", 3),
0070 ("FPix1", 4),
0071 ("FPix2", 5),
0072 ("TIB1", 11),
0073 ("TIB2", 12),
0074 ("TIB3", 13),
0075 ("TIB4", 14),
0076 ("TID1", 21),
0077 ("TID2", 22),
0078 ("TID3", 23),
0079 ("TOB1", 31),
0080 ("TOB2", 32),
0081 ("TOB3", 33),
0082 ("TOB4", 34),
0083 ("TOB5", 35),
0084 ("TOB6", 36),
0085 ("TEC1", 41),
0086 ("TEC2", 42),
0087 ("TEC3", 43),
0088 ("TEC4", 44),
0089 ("TEC5", 45),
0090 ("TEC6", 46),
0091 ("TEC7", 47),
0092 ("TEC8", 48),
0093 ("TEC9", 49))
0094 layer_names_rev = OrderedDict(layer_names_rev_t)
0095
0096
0097 layer_data_tmp_t = ((1,0),(2,0),(3,0),(4,0),(5,0),
0098 (11,0),(12,0),(13,0),(14,0),
0099 (21,0),(22,0),(23,0),
0100 (31,0),(32,0),(33,0),(34,0),(35,0),(36,0),
0101 (41,0),(42,0),(43,0),(44,0),(45,0),(46,0),(47,0),(48,0),(49,0))
0102 layer_data_tmp = OrderedDict(layer_data_tmp_t)
0103
0104
0105 invalid_types_t = ((0, "valid"),
0106 (1, "missing"),
0107 (2, "inactive"),
0108 (3, "bad"),
0109 (4, "missing_inner"),
0110 (5, "missing_outer"))
0111 invalid_types = OrderedDict(invalid_types_t)
0112
0113
0114 invalid_types_tmp_t = ((0,0),(1,0),(2,0),(3,0),(4,0),(5,0))
0115 invalid_types_tmp = OrderedDict(invalid_types_tmp_t)
0116
0117
0118
0119
0120 def FindFakes(event):
0121 '''Returns fake tracks of an event in a list'''
0122 fakes = []
0123 for track in event.tracks():
0124 if track.nMatchedTrackingParticles() == 0:
0125 fakes.append(track)
0126 print("Event: " + str(event.entry()+1) + " Number of fake tracks: " + str(len(fakes)))
0127 return fakes
0128
0129 def FindTrues(event):
0130 '''Returns true tracks of an event in a list'''
0131 trues = []
0132 for track in event.tracks():
0133 if track.nMatchedTrackingParticles() >= 0:
0134 trues.append(track)
0135 print("Event: " + str(event.entry()+1) + " Number of true tracks: " + str(len(trues)))
0136 return trues
0137
0138 def Distance(x1,x2):
0139 '''Returns Euclidean distance between two iterable vectors.'''
0140 d = 0
0141 for i in range(len(x1)):
0142 d += abs(x1[i]-x2[i])**2
0143 d = sqrt(d)
0144 return d
0145
0146 def FindUntrackedParticles(event):
0147 '''Returns untracked TrackingParticles of an event, according to MTFEfficiencySelector.'''
0148 untracked = []
0149
0150 for particle in event.trackingParticles():
0151 if (particle.isValid() and particle.nMatchedTracks() == 0 and MTFEfficiencySelector(particle)):
0152 untracked.append(particle)
0153 print("Number of untracked particles: " + str(len(untracked)))
0154 return untracked
0155
0156 def MTFEfficiencySelector(particle):
0157 '''
0158 A selector to analyse MultiTrackFinder efficiency rate.
0159 Used to analyse untracked particles.
0160 Returns True if particle fulfills the criterion for an "interesting" particle,
0161 which could have been detected precisely.
0162 '''
0163 if(particle.pt() > 0.9 and abs(particle.eta()) < 2.5
0164 and (particle.parentVertex().x()**2 + particle.parentVertex().y()**2 < 3.5**2)
0165 and abs(particle.parentVertex().z()) < 30 and particle.q() != 0 and particle.event() == 0):
0166 return True
0167 return False
0168
0169 def EfficiencyRate(ntuple, N):
0170 '''
0171 Calculates the efficiency rate of N first events in the ntuple class.
0172 Efficiency rate is the fraction between tracked particles and all particles.
0173 Prints output.
0174 '''
0175 tracked = 0
0176 untracked = 0
0177 i = 0
0178 for event in ntuple:
0179 if (i >= N): break
0180 for particle in event.trackingParticles():
0181 if (MTFEfficiencySelector(particle)):
0182 if(particle.nMatchedTracks() == 0): untracked += 1
0183 else: tracked += 1
0184 i += 1
0185 print("In " + str(N) + " events there are:")
0186 print("Tracked particles: " + str(tracked))
0187 print("Untracked particles: " + str(untracked))
0188 print("Efficiency rate: " + str(float(tracked)/(tracked+untracked)))
0189
0190 def SharedHitFrac(track, particle, frac = 0):
0191 '''
0192 Shared hits or hit fractions between a Track and a TrackingParticle.
0193 If frac = 0, returns number of shared hits, number of hits in the Track and number of hits in the TrackingParticle.
0194 Otherwise returns the shared hit fraction between the Track and the TrackingParticle.
0195 '''
0196 particle_hits = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0197 shared_hits = 0
0198 ntrack_hits = 0
0199 for hit in track.hits():
0200 if hit.isValidHit() and hit.index() in particle_hits:
0201 shared_hits += 1
0202 ntrack_hits += 1
0203 if frac:
0204 return shared_hits, ntrack_hits, len(particle_hits)
0205 else:
0206 return 1.0*shared_hits/ntrack_hits
0207
0208 def SharedHitsFromBeginning(track, particle, tolerance=0):
0209 '''
0210 Checks the shared hits with a particle from the beginning of the track.
0211 Tolerance is the amount of allowed differences (errors) between tracks.
0212 Returns an list including the amount of shared hits from the beginning
0213 as the function of tolerance (index).
0214
0215 Example:
0216 Output: [3, 3, 4, 4, 4]
0217 Means: first 3 particle hits are shared with track,
0218 then 1 unshared particle hit,
0219 then 1 more shared particle hit,
0220 then 2 unshared particle hits
0221 until tolerance < 0 (or particle track ended)
0222
0223 NOTE: Often this is called with a TrackingParticle as parameter "track" and a Track as the parameter "particle",
0224 which is because it was later needed to analyse the hits which are consecutive with respect to the Track.
0225 Sorry about inconvenience.
0226 '''
0227 particle_hits = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0228 track_hits = [hit.index() for hit in track.hits() if hit.isValidHit()]
0229
0230
0231 count = 0
0232 i = 0
0233 result = []
0234 while tolerance >= 0 and i < len(particle_hits) and count < len(track_hits):
0235 if particle_hits[i] in track_hits:
0236 count += 1
0237 i += 1
0238 else:
0239 tolerance -= 1
0240 result.append(count)
0241 i += 1
0242 if tolerance >= 0:
0243 result.append(count)
0244 return result
0245
0246 def SharedHits(track, particle):
0247 '''Returns shared hits between a Track and a TrackingParticle in a list'''
0248 res_hits = []
0249 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0250 track_hits = [hit for hit in track.hits() if hit.isValidHit()]
0251
0252 for hit in track_hits:
0253 if hit.index() in particle_hit_indexes:
0254 res_hits.append(hit)
0255
0256 return res_hits
0257
0258 def FindAssociatedParticles(track):
0259 '''Returns TrackingParticles in a list that have any shared hits with the given Track'''
0260 particle_inds = []
0261 particles = []
0262 for hit in track.hits():
0263 if hit.isValidHit() and hit.nSimHits() >= 0:
0264 for simHit in hit.simHits():
0265 particle = simHit.trackingParticle()
0266 if particle.index() not in particle_inds:
0267 particle_inds.append(particle.index())
0268 particles.append(particle)
0269 return particles
0270
0271 def MatchedParticles(fake, real_criterion = ["consecutive", 3]):
0272 '''
0273 Find the possible matched real particle of a fake track.
0274 Currently three possible criterions for evaluating a possible match:
0275 consecutive: has a given amount of hits which are consecutive in a particle track (default with 3 hit limit)
0276 fraction: a given fraction of fake hits is included in a particle track
0277 NLay: a given number of pixel / strip layers are included in the shared hits
0278
0279 Parameters: fake track, criterion type and limit value in a list
0280 Returns: a list of matched particles
0281 '''
0282 CRITERION = real_criterion[0]
0283 LIMIT = real_criterion[1]
0284
0285 particles = FindAssociatedParticles(fake)
0286 matches = []
0287 for particle in particles:
0288 if CRITERION == "consecutive":
0289 tolerance_mask = SharedHitsFromBeginning(particle, fake, particle.nValid())
0290 diff = [abs(tolerance_mask[i+1] - tolerance_mask[i]) for i in range(len(tolerance_mask)-1)]
0291 if tolerance_mask[0] >= LIMIT or (diff and max(diff) >= LIMIT):
0292 matches.append(particle)
0293 if CRITERION == "fraction":
0294 if SharedHitFrac(fake, particle, 0) >= LIMIT:
0295 matches.append(particle)
0296 if CRITERION == "NLay":
0297 hits = SharedHits(fake, particle)
0298 nPix = 0
0299 nStr = 0
0300 tracked_layers = []
0301 for hit in hits:
0302 layer = hit.layerStr()
0303 if layer not in tracked_layers:
0304 tracked_layers.append(layer)
0305 if "Pix" in layer: nPix += 1
0306 else: nStr += 1
0307 if 2*nPix + nStr >= LIMIT:
0308 matches.append(particle)
0309 return matches
0310
0311 def IsUnmatched(fake, unmatch_criterion = ["nShared", 2]):
0312 '''
0313 Returns True if the the particle is unmatched to any particle with respect
0314 to the criterion. Criterion is by default that if there are n <= 2 hits
0315 shared between any track an the fake, the fake is unmatched.
0316 '''
0317 CRITERION = unmatch_criterion[0]
0318 LIMIT = unmatch_criterion[1]
0319
0320 for particle in FindAssociatedParticles(fake):
0321 if CRITERION == "nShared":
0322 shared, track_hits, particle_hits = SharedHitFrac(fake, particle, 1)
0323 if shared > LIMIT:
0324 return False
0325 return True
0326
0327 def FindEndOfTracking(fake, particle, end_criterion = ["nMissing", 2], real_criterion = ["consecutive", 3], only_valid = False):
0328 '''
0329 Finds the point where the fake does not track the particle anymore, according to
0330 end_criterion, which is 2 consecutive missing layers in particle hits by default.
0331 Returns: last: the last shared hit between the fake and the particle, which is the first hit in particle trajectory after which tracks separate by the end criterion (or end)
0332 fake_end: the fake track hit following the last shared hit
0333 particle_end: the particle hit following the last shared hit
0334 fake_end and particle_end might be the same as the last shared hit, if the fake track or the particle track (or both) end
0335 '''
0336 CRITERION = end_criterion[0]
0337 LIMIT = end_criterion[1]
0338 REAL_CRITERION = real_criterion[0]
0339 REAL_LIMIT = real_criterion[1]
0340
0341 if CRITERION == "nMissing" and REAL_CRITERION == "consecutive":
0342 if only_valid:
0343 particle_hits = [hit for hit in particle.hits() if hit.isValidHit()]
0344 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0345 track_hits = [hit for hit in fake.hits() if hit.isValidHit()]
0346 track_hit_indexes = [hit.index() for hit in fake.hits() if hit.isValidHit()]
0347 else:
0348 particle_hits = [hit for hit in particle.hits()]
0349 particle_hit_indexes = [hit.index() for hit in particle.hits()]
0350 track_hits = [hit for hit in fake.hits()]
0351 track_hit_indexes = [hit.index() for hit in fake.hits()]
0352
0353
0354
0355 tolerance = LIMIT
0356 i = 0
0357 start_tolerance = 0
0358 last = particle_hits[0]
0359 particle_end = particle_hits[0]
0360 fake_end = particle_hits[0]
0361
0362 while i < len(track_hits):
0363
0364 if track_hit_indexes[i] in particle_hit_indexes:
0365 start_tolerance += 1
0366
0367 if start_tolerance >= REAL_LIMIT:
0368
0369 tolerance = LIMIT
0370 i = particle_hit_indexes.index(track_hit_indexes[i])
0371 last = particle_hits[i]
0372 particle_end = particle_hits[min([i+1, len(particle_hits)-1])]
0373 fake_end = track_hits[min(track_hit_indexes.index(particle_hit_indexes[i])+1, len(track_hits)-1)]
0374
0375 break
0376 i += 1
0377 else:
0378 start_tolerance = 0
0379 i += 1
0380
0381 while tolerance >= 0 and i < len(particle_hits):
0382
0383
0384 if particle_hit_indexes[i] in track_hit_indexes:
0385 tolerance = LIMIT
0386 last = particle_hits[i]
0387 particle_end = particle_hits[min([i+1, len(particle_hits)-1])]
0388 fake_end = track_hits[min(track_hit_indexes.index(particle_hit_indexes[i])+1, len(track_hits)-1)]
0389
0390 elif not (particle_hits[i-1].layerStr() in particle_hits[i].layerStr() or particle_hits[i].layerStr() in particle_hits[i-1].layerStr()):
0391 tolerance -= 1
0392 i += 1
0393 end_class = 0
0394 if last.index() == fake_end.index() and last.index() == particle_end.index():
0395 end_class = 1
0396 elif last.index() == fake_end.index(): end_class = 2
0397 elif last.index() == particle_end.index(): end_class = 3
0398 elif last.layerStr() == particle_hits[-1].layerStr() and (len(particle_hits)-1 - i < 4): end_class = 3
0399 '''
0400 if tolerance >= LIMIT: # If the end of the particle was reached, last and fail are the same
0401 last = particle_end
0402 fake_end = particle_end
0403 end_class = 1
0404
0405 print [[hit.index(), hit.layerStr()] for hit in track_hits]
0406 print [[hit.index(), hit.layerStr()] for hit in particle_hits]
0407 print i
0408 print last.index()
0409 print fake_end.index()
0410 print particle_end.index()
0411 print end_class
0412 print "*****"
0413 input()
0414 '''
0415 return last, fake_end, particle_end, end_class
0416
0417 def MatchPixelHits(fake, particle, real_criterion = ["consecutive", 3]):
0418 '''
0419 Finds how many shared pixelhits fake has with a particle, starting at the beginning
0420 of shared hits.
0421 '''
0422 CRITERION = real_criterion[0]
0423 LIMIT = real_criterion[1]
0424
0425 if CRITERION == "consecutive":
0426 particle_hits = [hit for hit in particle.hits() if hit.isValidHit()]
0427 track_hits = [hit for hit in fake.hits() if hit.isValidHit()]
0428 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0429
0430
0431 i = 0
0432 start_tolerance = 0
0433 hit_candidates = []
0434 start_flag = False
0435
0436 layer_strs = []
0437 nPix = 0
0438
0439 while i <= len(track_hits)-1:
0440 if track_hits[i].index() in particle_hit_indexes:
0441 start_tolerance += 1
0442 hit_candidates.append(track_hits[i])
0443 if start_tolerance >= LIMIT:
0444 start_flag = True
0445 i += 1
0446 elif start_flag:
0447
0448 break
0449 else:
0450 hit_candidates = []
0451 start_tolerance = 0
0452 i += 1
0453
0454
0455 for hit in hit_candidates:
0456 if "Pix" in hit.layerStr():
0457 if hit.layerStr() not in layer_strs:
0458 layer_strs.append(hit.layerStr())
0459 nPix += 1
0460 else:
0461 break
0462 nPixLayers = len(layer_strs)
0463 ''' Uncomment to analyse fakes having >= 4 pixelhits
0464 if nPixLayers >= 4: #print [hit.layerStr() for hit in hit_candidates]
0465 if 'BPix1' in layer_strs and 'BPix2' in layer_strs:
0466 if 'BPix3' in layer_strs and 'FPix1' in layer_strs:
0467 print "B3-F1"
0468 elif 'FPix1' in layer_strs and 'FPix2' in layer_strs:
0469 print "F1-F2"
0470 else:
0471 print "B1-B2 Other"
0472 else:
0473 print "Other"
0474 '''
0475
0476 if start_tolerance == 0:
0477 print("Match is not a real match :\\")
0478 if len(hit_candidates)<3 or not start_flag:
0479 print("No hit candidates from a match")
0480 print([hit.index() for hit in fake.hits() if hit.isValidHit()])
0481 print(particle_hit_indexes)
0482 print([hit.index() for hit in hit_candidates])
0483 print(start_tolerance)
0484 print(start_flag)
0485 return -1, -1
0486 return nPix, nPixLayers
0487
0488 if CRITERION == "NLay":
0489 particle_hits = [hit for hit in particle.hits() if hit.isValidHit()]
0490 track_hits = [hit for hit in fake.hits() if hit.isValidHit()]
0491 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0492
0493
0494 i = 0
0495 hit_candidates = []
0496 start_flag = False
0497
0498 layer_strs = []
0499 nPix = 0
0500
0501 while i <= len(track_hits)-1:
0502 if track_hits[i].index() in particle_hit_indexes:
0503 hit_candidates.append(track_hits[i])
0504 if "Pix" in hit.layerStr():
0505 start_flag = True
0506 i += 1
0507 elif start_flag:
0508
0509 break
0510 else:
0511 i += 1
0512
0513
0514 for hit in hit_candidates:
0515 if "Pix" in hit.layerStr():
0516 if hit.layerStr() not in layer_strs:
0517 layer_strs.append(hit.layerStr())
0518 nPix += 1
0519 else:
0520 break
0521 nPixLayers = len(layer_strs)
0522
0523 return nPix, nPixLayers
0524
0525 return -1, -1
0526
0527
0528 def MaxSharedHits(fake, fraction = False):
0529 ''' Returns the maximum amount of shared hits which fake shares with some simulated particle. '''
0530 max_shared = 0
0531 max_frac = 0
0532 for particle in FindAssociatedParticles(fake):
0533 shared, nTrack, nParticle = SharedHitFrac(fake, particle, 1)
0534 frac = 1.0*shared/nParticle
0535 if shared > max_shared:
0536 max_shared = shared
0537 max_frac = frac
0538 if fraction: return max_shared, max_frac
0539 return max_shared
0540
0541
0542
0543
0544 def StopReason(track):
0545 ''' Converts track stop reason index to string '''
0546 reason = track.stopReason()
0547 if reason == 0:
0548 return "UNINITIALIZED"
0549 if reason == 1:
0550 return "MAX_HITS"
0551 if reason == 2:
0552 return "MAX_LOST_HITS"
0553 if reason == 3:
0554 return "MAX_CONSECUTIVE_LOST_HITS"
0555 if reason == 4:
0556 return "LOST_HIT_FRACTION"
0557 if reason == 5:
0558 return "MIN_PT"
0559 if reason == 6:
0560 return "CHARGE_SIGNIFICANCE"
0561 if reason == 7:
0562 return "LOOPER"
0563 if reason == 8:
0564 return "MAX_CCC_LOST_HITS"
0565 if reason == 9:
0566 return "NO_SEGMENTS_FOR_VALID_LAYERS"
0567 if reason == 10:
0568 return "SEED_EXTENSION"
0569 if reason == 255:
0570 return "NOT_STOPPED"
0571 else:
0572 return "UNDEFINED STOPPING REASON"
0573
0574
0575 def PrintTrackInfo(track, fake = None, frac = 0, fake_info = None):
0576 ''' Prints info on the track. Called from PlotFakes method in graphics.py. '''
0577 if isinstance(track, Track):
0578 if track.nMatchedTrackingParticles() == 0:
0579 print(str(track.index()) + ": FAKE \nSTOP REASON: " + StopReason(track))
0580 print("Has " + str(track.nValid()) + " valid hits")
0581 if fake_info:
0582 fake_info.Print()
0583 else:
0584 reco_str = str(track.index()) + ": RECOVERED "
0585 for info in track.matchedTrackingParticleInfos():
0586 reco_str += str(info.trackingParticle().index()) + " " + str(info.shareFrac()) + "\nSTOP REASON: " + StopReason(track)
0587 print(reco_str)
0588 else:
0589 print(str(track.index()) + ": REAL")
0590 if track.nMatchedTracks() == 0: print("NOT RECOVERED")
0591 elif track.nMatchedTracks() == 1: print("RECOVERED")
0592 else: print("RECOVERED " + str(track.nMatchedTracks()) + " TIMES")
0593 decaycount = 0
0594 for decay in track.decayVertices(): decaycount += 1
0595 if decaycount: print("DECAYED " + str(decaycount) + " TIMES")
0596
0597 if fake:
0598 if frac:
0599 num, div, npar = SharedHitFrac(fake, track, 1)
0600 print("Fake share fraction: " + str(num) + " / " + str(div) + ", track has " + str(npar) + " hits")
0601 else:
0602 dec = SharedHitFrac(fake, track, 0)
0603 print("Fake shares " + str(dec) + " fraction of hits with track")
0604 print("Shared hits from beginning: " + str(SharedHitsFromBeginning(track, fake, 10)))
0605
0606 if isinstance(track, TrackingParticle):
0607 print("Parameters:")
0608 print("px : " + str(track.px()) + " py : " + str(track.py()) + " pz : " + str(track.pz()))
0609 print("pt : " + str(track.pca_pt()) + " eta : " + str(track.pca_eta()) + " phi : " + str(track.pca_phi()))
0610 print("dxy : " + str(track.pca_dxy()) + " dz : " + str(track.pca_dz()) + " q : " + str(track.q()) + "\n")
0611 else:
0612 print("Parameters:")
0613 print("px : " + str(track.px()) + " py : " + str(track.py()) + " pz : " + str(track.pz()))
0614 print("pt : " + str(track.pt()) + " eta : " + str(track.eta()) + " phi : " + str(track.phi()))
0615 print("dxy : " + str(track.dxy()) + " dz : " + str(track.dz()) + " q : " + str(track.q()) + "\n")
0616
0617
0618
0619
0620 class FakeInfo(object):
0621 '''
0622 A storage and analysis class for a fake track.
0623 Construct this object with a fake track as a parameter to perform analysis on that fake track.
0624 The results can then be obtained from object attributes.
0625 '''
0626 def __init__(self, fake, real_criterion = ["consecutive", 3], end_criterion = ["nMissing", 1]):
0627 self.fake = fake
0628 self.index = fake.index()
0629 self.nHits = fake.nValid()
0630 self.nMatches = 0
0631 self.matches = []
0632 self.nReconstructed = 0
0633 self.nDecays = 0
0634
0635 start = next(iter(fake.hits()))
0636 self.start_loc = [start.x(), start.y(), start.z()]
0637
0638 self.stop_reason = fake.stopReason()
0639
0640 self.FindMatches(fake, real_criterion, end_criterion)
0641 self.fake_class, self.fake_class_str = self.Classify()
0642
0643 def FindMatches(self, fake, real_criterion, end_criterion):
0644 ''' Finds matches for the fake track. '''
0645 matched_particles = MatchedParticles(fake, real_criterion)
0646 self.nMatches = len(matched_particles)
0647 self.nIncludedDecayParticles = 0
0648 self.decayLinks = []
0649
0650 if matched_particles:
0651 for particle in matched_particles:
0652 self.matches.append(MatchInfo(particle, fake, real_criterion, end_criterion))
0653 for match in self.matches:
0654 if match.nReconstructed > 0:
0655 self.nReconstructed += 1
0656 if match.nDecays > 0:
0657 self.nDecays += 1
0658 if match.nDaughters > 0:
0659 for another_match in self.matches:
0660 if another_match.parentVtxIndex in match.decayVtxIndexes:
0661 self.nIncludedDecayParticles += 1
0662 self.decayLinks.append([match.index, another_match.index])
0663
0664
0665 self.unmatched_max_shared = -1
0666 else:
0667 self.unmatched_max_shared = MaxSharedHits(fake)
0668 self.max_shared, self.max_frac = MaxSharedHits(fake, True)
0669
0670 if not self.nMatches and not IsUnmatched(fake):
0671 self.nMatches = -1
0672
0673 def Classify(self):
0674 ''' Classify the fake after analysis '''
0675 if self.nMatches == -1:
0676 return -1, "UNCLASSIFIED"
0677 if self.nMatches == 0:
0678 return 0, "UNMATCHED"
0679 if self.nMatches >= 2:
0680 if self.decayLinks:
0681 return 21, "MERGED DECAY TRACK MATCHES"
0682 elif self.nReconstructed >= self.nMatches:
0683 return 22, "MULTIPLE ALL RECONSTRUCTED MATCHES"
0684 elif self.nReconstructed > 0:
0685 return 23, "MULTIPLE PARTIALLY RECONSTRUCTED MATCHES"
0686 else:
0687 return 20, "MULTIPLE UNRECONSTRUCTED MATCHES"
0688 if self.nMatches == 1:
0689 if self.nReconstructed > 0 and self.nDecays > 0:
0690 return 11, "RECONSTRUCTED AND DECAYED MATCH"
0691 if self.nReconstructed > 0:
0692 return 12, "RECONSTRUCTED MATCH"
0693 if self.nDecays > 0:
0694 return 13, "DECAYED MATCH"
0695 else:
0696 return 10, "MATCH"
0697
0698 def Print(self):
0699 ''' Prints fake track classification info with matched particle infos. '''
0700 print("CLASS: " + str(self.fake_class) + " WITH " + str(self.nMatches) + " MATCHES")
0701 print("Has " + str(self.nIncludedDecayParticles) + " included decay particles, with links: " + str(self.decayLinks))
0702 for match in self.matches: match.Print()
0703
0704 class MatchInfo(object):
0705 ''' A storage and analysis class for TrackingParticles matched to a fake track. '''
0706 def __init__(self, match, fake, real_criterion = ["consecutive", 3], end_criterion = ["nMissing", 2]):
0707 ''' match = the matched TrackingParticle '''
0708 self.index = match.index()
0709 self.particle = match
0710
0711
0712 self.decayVertices = [[vtx.x(), vtx.y(), vtx.z()] for vtx in match.decayVertices()]
0713 self.nDecays = len(self.decayVertices)
0714 self.decayVtxIndexes = [vtx.index() for vtx in match.decayVertices()]
0715
0716 self.shared_hits, un, needed = SharedHitFrac(fake, match, 1)
0717
0718 self.daughterIndexes = []
0719 for vtx in match.decayVertices():
0720 for particle in vtx.daughterTrackingParticles():
0721 self.daughterIndexes.append(particle.index())
0722 self.nDaughters = len(self.daughterIndexes)
0723 vtx = match.parentVertex()
0724 self.parentVtx = [vtx.x(), vtx.y(), vtx.z()]
0725 self.parentVtxIndex = vtx.index()
0726
0727 self.nReconstructed = match.nMatchedTracks()
0728
0729
0730
0731 self.nPix, self.nPixLayers = MatchPixelHits(fake, match, real_criterion)
0732
0733
0734 last, fake_end, particle_end, self.end_class = FindEndOfTracking(fake, match, end_criterion)
0735 if last.isValidHit(): self.last_loc = [last.x(), last.y(), last.z()]
0736 else: self.last_loc = [0,0,0]
0737 if fake_end.isValidHit(): self.fake_end_loc = [fake_end.x(), fake_end.y(), fake_end.z()]
0738 else: self.fake_end_loc = [0,0,0]
0739 if particle_end.isValidHit(): self.particle_end_loc = [particle_end.x(), particle_end.y(), particle_end.z()]
0740 else: self.particle_end_loc = [0,0,0]
0741
0742 self.last_detId = last.detId()
0743 self.fake_end_detId = fake_end.detId()
0744 self.particle_end_detId = particle_end.detId()
0745
0746 self.particle_pdgId = match.pdgId()
0747 self.particle_pt = match.pt()
0748
0749 if isinstance(last, GluedHit):
0750 self.last_str = last.monoHit().layerStr()
0751 else:
0752 self.last_str = last.layerStr()
0753 if isinstance(fake_end, GluedHit):
0754 self.fake_end_str = fake_end.monoHit().layerStr()
0755 else:
0756 self.fake_end_str = fake_end.layerStr()
0757 if isinstance(particle_end, GluedHit):
0758 self.particle_end_str = particle_end.monoHit().layerStr()
0759 else:
0760 self.particle_end_str = particle_end.layerStr()
0761
0762
0763 def Print(self):
0764 ''' Prints match info. '''
0765 print("Match " + str(self.index) + ": nReconstructed " + str(self.nReconstructed) +\
0766 ", daughters: " + str(self.daughterIndexes) +\
0767 ", tracking failed in " + self.last_str + ", to " + self.particle_end_str + ", wrong hit in " + self.fake_end_str)
0768
0769
0770
0771 class EndInfo(object):
0772 ''' Storage class for end of tracking information for a matched fake '''
0773 def __init__(self):
0774 self.last = []
0775 self.fake_end = []
0776 self.particle_end = []
0777 self.last_str = ""
0778 self.fake_end_str = ""
0779 self.particle_end_str = ""
0780 self.fake_class = -1
0781 self.end_class = -1
0782 self.last_detid = -1
0783 self.fake_end_detid = -1
0784 self.particle_end_detid = -1
0785 self.particle_pdgId = -1
0786 self.particle_pt = -1
0787
0788 class ResolutionData(object):
0789 ''' Storage class for matched fake track resolutions '''
0790 def __init__(self):
0791 self.pt_rec = []
0792 self.pt_sim = []
0793 self.pt_delta = []
0794 self.eta = []
0795 self.Lambda = []
0796 self.cotTheta = []
0797 self.phi = []
0798 self.dxy = []
0799 self.dz = []
0800 self.fake_class = []
0801
0802 def __iter__(self):
0803 for i in range(len(self.pt_rec)):
0804 yield ResolutionItem(self.pt_rec[i], self.pt_sim[i], self.pt_delta[i], self.eta[i], self.Lambda[i], self.cotTheta[i], self.phi[i], self.dxy[i], self.dz[i], self.fake_class[i])
0805
0806 class ResolutionItem(object):
0807 ''' A storage class for ResolutionData iteration '''
0808 def __init__(self, pt_rec, pt_sim, pt_delta, eta, Lambda, cotTheta, phi, dxy, dz, fake_class):
0809 self.pt_rec = pt_rec
0810 self.pt_sim = pt_sim
0811 self.pt_delta = pt_delta
0812 self.eta = eta
0813 self.Lambda = Lambda
0814 self.cotTheta = cotTheta
0815 self.phi = phi
0816 self.dxy = dxy
0817 self.dz = dz
0818 self.fake_class = fake_class
0819
0820
0821
0822 def ClassifyEventFakes(ntuple_file, nEvents = 100, return_fakes = False, real_criterion = ["consecutive", 3]):
0823 '''
0824 Classifies all fakes in the first nEvents events in the ntuple_file (TrackingNtuple object).
0825 Returns a dictionary of class items, with class index as a key and number of fakes in the class as the value.
0826 '''
0827 i = 0
0828 results = {class_item: 0 for class_item in classes}
0829 fake_list = []
0830 for event in ntuple_file:
0831 fakes = FindFakes(event)
0832 for fake in fakes:
0833 info = FakeInfo(fake, real_criterion)
0834 results[info.fake_class] += 1
0835
0836 if return_fakes:
0837 fake_list.append(info)
0838 i += 1
0839 if i >= nEvents:
0840 break
0841
0842 if return_fakes:
0843 return results, fake_list
0844 return results
0845
0846 def Calculate_MaxMatchedHits(ntuple_file, nEvents = 100):
0847 '''
0848 Calculates the maximum number of shared hits for the fakes in the first nEvents events in the ntuple_file (TrackingNtuple object).
0849 Returns a list of these maximum numbers for each analysed fake track.
0850 '''
0851 i = 0
0852 results = []
0853 for event in ntuple_file:
0854 fakes = FindFakes(event)
0855 for fake in fakes:
0856 res_temp = 0
0857 for particle in FindAssociatedParticles(fake):
0858 shared, par, nAll = SharedHitFrac(fake, particle, 1)
0859 if shared > res_temp:
0860 res_temp = shared
0861 results.append(res_temp)
0862
0863 i += 1
0864 if i >= nEvents:
0865 break
0866
0867 return results
0868
0869
0870 def Calculate_MaxMatchedConsecutiveHits(ntuple_file, nEvents = 100, frac = False):
0871 '''
0872 Calculates the maximum number of CONSECUTIVE (with respect to fake track) shared hits for the fakes in the first nEvents events in the ntuple_file (TrackingNtuple object).
0873 Returns a list of these maximum numbers for each analysed fake track.
0874 '''
0875 i = 0
0876 results = []
0877 for event in ntuple_file:
0878 fakes = FindFakes(event)
0879 for fake in fakes:
0880 res_temp = 0
0881
0882 for particle in FindAssociatedParticles(fake):
0883 tolerance_mask = SharedHitsFromBeginning(particle, fake, fake.nValid())
0884 diff = [abs(tolerance_mask[j+1] - tolerance_mask[j]) for j in range(len(tolerance_mask)-1)]
0885 if frac:
0886 if diff and 1.0*max(diff)/fake.nValid() > res_temp:
0887 res_temp = 1.0*max(diff)/fake.nValid()
0888 elif 1.0*tolerance_mask[0]/fake.nValid() > res_temp:
0889 res_temp = 1.0*tolerance_mask[0]/fake.nValid()
0890 else:
0891 if diff and max(diff) > res_temp:
0892 res_temp = max(diff)
0893 elif tolerance_mask[0] > res_temp:
0894 res_temp = tolerance_mask[0]
0895 ''' Uncomment to debug
0896 if frac:
0897 if 1.0*res_temp/fake.nValid() > 0.75:
0898 print 1.0*res_temp/fake.nValid()
0899 print res_temp
0900 print [hit.index() for hit in fake.hits()]
0901 print [hit.index() for hit in particle.hits()]
0902 print tolerance_mask
0903 print diff
0904 print fake.nValid()
0905 print particle.nValid()
0906
0907 results.append(1.0*res_temp/fake.nValid())
0908 '''
0909 results.append(res_temp)
0910
0911 i += 1
0912 if i >= nEvents:
0913 break
0914
0915 return results
0916
0917 def Calculate_MaxMatchedHits_RealTracks(ntuple_file, nEvents = 100):
0918 '''
0919 Similar as Calculate_MaxMatchedHits, but for true tracks.
0920 '''
0921 i = 0
0922 results = []
0923 for event in ntuple_file:
0924 for track in event.tracks():
0925 if track.nMatchedTrackingParticles() >= 1:
0926 res_temp = 0
0927 for info in track.matchedTrackingParticleInfos():
0928 particle = info.trackingParticle()
0929 shared, par, nAll = SharedHitFrac(track, particle, 1)
0930 if shared > res_temp:
0931 res_temp = shared
0932 results.append(res_temp)
0933
0934 i += 1
0935 if i >= nEvents:
0936 break
0937
0938 return results
0939
0940 def Calculate_MaxMatchedConsecutiveHits_RealTracks(ntuple_file, nEvents = 100):
0941 '''
0942 Similar as Calculate_MaxMatchedConsecutiveHits, but for true tracks.
0943 '''
0944 i = 0
0945 results = []
0946 for event in ntuple_file:
0947 for track in event.tracks():
0948 if track.nMatchedTrackingParticles() >= 1:
0949 res_temp = 0
0950 for info in track.matchedTrackingParticleInfos():
0951 particle = info.trackingParticle()
0952 tolerance_mask = SharedHitsFromBeginning(particle, track, track.nValid())
0953 diff = [abs(tolerance_mask[j+1] - tolerance_mask[j]) for j in range(len(tolerance_mask)-1)]
0954 if diff and max(diff) > res_temp:
0955 res_temp = max(diff)
0956 elif tolerance_mask[0] > res_temp:
0957 res_temp = tolerance_mask[0]
0958
0959 results.append(res_temp)
0960
0961 i += 1
0962 if i >= nEvents:
0963 break
0964
0965 return results
0966
0967 def Calculate_MatchPixelHits(ntuple_file, nEvents = 100, fake_mask = []):
0968 '''
0969 Calculate the amount of pixelhits and pixellayers in the first consectutive shared fake hits, starting from the matched three hits.
0970 Returns a list including numbers of pixelhits (for each fake) in nPix_data, and a list including numbers of pixellayers (for each fake) in nLay_data
0971 '''
0972 i = 0
0973 nPix_data = []
0974 nLay_data = []
0975 for event in ntuple_file:
0976 fakes = FindFakes(event)
0977 for fake in fakes:
0978 info = FakeInfo(fake)
0979 if not fake_mask or info.fake_class in fake_mask:
0980 for match in info.matches:
0981 nPix_data.append(match.nPix)
0982 nLay_data.append(match.nPixLayers)
0983
0984 i += 1
0985 if i >= nEvents:
0986 break
0987
0988 return nPix_data, nLay_data
0989
0990 def Calculate_UnmatchedSharedHits(ntuple_file, nEvents = 100, fake_mask = [0, -1]):
0991 '''
0992 Calculates the amount of shared hits between fake tracks and some TrackingParticles for the "no match" and "loose match" classes.
0993 Returns a list of the results for each fake.
0994 '''
0995 results = []
0996 i = 0
0997 for event in ntuple_file:
0998 fakes = FindFakes(event)
0999 for fake in fakes:
1000 info = FakeInfo(fake)
1001 if info.fake_class in fake_mask:
1002 results.append(info.unmatched_max_shared)
1003 i += 1
1004 if i >= nEvents:
1005 break
1006
1007 return results
1008
1009 def Calculate_SharedHits(ntuple_file, nEvents = 100, mask = []):
1010 '''
1011 Calculates the amount of shared hits between fake tracks and some TrackingParticle.
1012 For filtering only some of the classes to the data, put the class indexes inside the mask list.
1013 Returns a list of the results for each fake.
1014 '''
1015 hits_shared = []
1016 hit_fractions = []
1017 i = 0
1018 for event in ntuple_file:
1019 fakes = FindFakes(event)
1020 for fake in fakes:
1021 info = FakeInfo(fake)
1022 if not mask or info.fake_class in mask:
1023 hits_shared.append(info.max_shared)
1024 hit_fractions.append(info.max_frac)
1025 i += 1
1026 if i >= nEvents:
1027 break
1028
1029 return hits_shared, hit_fractions
1030
1031 def Calculate_SharedHits_RealTracks(ntuple_file, nEvents = 100):
1032 '''
1033 Calculates the amount of shared hits between true tracks and associated TrackingParticles.
1034 Returns a list of the results for each true track.
1035 '''
1036 hits_shared = []
1037 hit_fractions = []
1038 i = 0
1039 for event in ntuple_file:
1040 for track in event.tracks():
1041 if track.nMatchedTrackingParticles() >= 1:
1042 info = FakeInfo(track)
1043 hits_shared.append(info.max_shared)
1044 hit_fractions.append(info.max_frac)
1045 i += 1
1046 if i >= nEvents:
1047 break
1048
1049 return hits_shared, hit_fractions
1050
1051 def Calculate_IndludedDecayHitFractions(ntuple_file, nEvents = 100):
1052 '''
1053 Calculates the hit fractions (from fake) for the daughter and parent particles from a decay,
1054 to analyse the multiple matches class "fake includes a decay interaction".
1055 Returns: daughter_frac = fraction of daughter particle hits from fake
1056 parent_frac = fraction of parent particle hits from fake
1057 total_frac = the sum of these daughter and parent fractions
1058 '''
1059 daughter_frac = []
1060 parent_frac = []
1061 total_frac = []
1062 i = 0
1063 for event in ntuple_file:
1064 fakes = FindFakes(event)
1065 for fake in fakes:
1066 info = FakeInfo(fake)
1067 if info.fake_class == 21:
1068 if len(info.decayLinks) >= 2: print("Double or more decays!!!11")
1069 for link in info.decayLinks:
1070 par_ind = link[0]
1071 dau_ind = link[1]
1072 for match in info.matches:
1073 if match.index == par_ind: par_hits = match.shared_hits
1074 if match.index == dau_ind: dau_hits = match.shared_hits
1075 fake_hits = info.nHits
1076
1077 parent_frac.append(1.0*par_hits/fake_hits)
1078 daughter_frac.append(1.0*dau_hits/fake_hits)
1079 total_frac.append(1.0*(dau_hits + par_hits)/fake_hits)
1080 i += 1
1081 if i >= nEvents:
1082 break
1083
1084 return daughter_frac, parent_frac, total_frac
1085
1086 def Get_EndOfTrackingPoints(ntuple_file, nEvents = 100, mask = []):
1087 '''
1088 Performs analysis and returns a list of EndInfos containing information on end of tracking for each fake track.
1089 '''
1090 end_infos = []
1091 i = 0
1092 for event in ntuple_file:
1093 fakes = FindFakes(event)
1094 for fake in fakes:
1095 info = FakeInfo(fake)
1096 if not mask or info.fake_class in mask:
1097 for match in info.matches:
1098 end = EndInfo()
1099 end.last = match.last_loc
1100 end.fake_end = match.fake_end_loc
1101 end.particle_end = match.particle_end_loc
1102 end.end_class = match.end_class
1103 end.fake_class = info.fake_class
1104 end.last_str = match.last_str
1105 end.fake_end_str = match.fake_end_str
1106 end.particle_end_str = match.particle_end_str
1107 end.last_detId = match.last_detId
1108 end.fake_end_detId = match.fake_end_detId
1109 end.particle_end_detId = match.particle_end_detId
1110 end.particle_pdgId = match.particle_pdgId
1111 end.particle_pt = match.particle_pt
1112
1113 end_infos.append(end)
1114 i += 1
1115 if i >= nEvents:
1116 break
1117
1118 return end_infos
1119
1120 def Get_EndOfTrackingPointsReal(ntuple_file, nEvents = 100):
1121 '''
1122 Performs analysis and returns a list of EndInfos containing information on end of tracking for each TRUE track.
1123 '''
1124 end_infos = []
1125 i = 0
1126 for event in ntuple_file:
1127 trues = FindTrues(event)
1128 for true in trues:
1129 for info in true.matchedTrackingParticleInfos():
1130 particle = info.trackingParticle()
1131 last, track_end, particle_end, end_class = FindEndOfTracking(true, particle)
1132 end = EndInfo()
1133 if last.isValidHit(): end.last = [last.x(), last.y(), last.z()]
1134 if track_end.isValidHit(): end.fake_end = [track_end.x(), track_end.y(), track_end.z()]
1135 if particle_end.isValidHit(): end.particle_end = [particle_end.x(), particle_end.y(), particle_end.z()]
1136 end.end_class = end_class
1137 end.fake_class = -1
1138 end.last_str = last.layerStr()
1139 end.fake_end_str = track_end.layerStr()
1140 end.particle_end_str = particle_end.layerStr()
1141 end.last_detId = last.detId()
1142 end.fake_end_detId = track_end.detId()
1143 end.particle_end_detId = particle_end.detId()
1144 end.particle_pdgId = particle.pdgId()
1145 end.particle_pt = particle.pt()
1146
1147 end_infos.append(end)
1148 i += 1
1149 if i >= nEvents:
1150 break
1151
1152 return end_infos
1153
1154 def Save_Normalisation_Coefficients(ntuple_file):
1155 '''
1156 Calculates normalisation coefficients for detector layers, which would normalise the amount of
1157 hits per layer with respect to the total amount of TrackingParticle hits in the layer.
1158 Saves the results in a dictionary with the layer indexes as the keys and the normalised coefficients as the values.
1159 The resulted dictionary is saved to file "normalisation_coefficients.dmp" with pickle library.
1160 '''
1161 norm_c = copy(layer_data_tmp)
1162
1163 print(sum([val for ind, val in norm_c.items()]))
1164 for event in ntuple_file:
1165 print(event.entry()+1)
1166 for particle in event.trackingParticles():
1167 for hit in particle.hits():
1168 if hit.isValidHit():
1169 norm_c[layer_names_rev[hit.layerStr()]] += 1
1170 norm_sum = sum([val for ind, val in norm_c.items()])
1171 print(norm_sum)
1172 print(norm_c)
1173 for i, c in norm_c.items():
1174 norm_c[i] = 1.0*c/norm_sum
1175
1176 print("normalisation_coefficients.dmp")
1177 print(norm_c)
1178
1179 norm_file = open("normalisation_coefficients.dmp",'w')
1180 pickle.dump(norm_c, norm_file)
1181 norm_file.close()
1182
1183 def Get_Normalisation_Coefficients():
1184 '''
1185 Loads the detector layer normalisation coefficients from a file created by the previous Save_Normalisation_Coefficients function.
1186 '''
1187 norm_file = open("normalisation_coefficients.dmp",'r')
1188 coefficients = pickle.load(norm_file)
1189 norm_file.close()
1190 return coefficients
1191
1192 def Resolution_Analysis_Fakes(ntuple_file, nEvents = 100, real_criterion = ["consecutive", 3]):
1193 '''
1194 Performs analysis and returns a ResolutionData object containing the track parameter resolutions for fakes and matched particles.
1195 '''
1196 res = ResolutionData()
1197 i = 0
1198 for event in ntuple_file:
1199
1200 fakes = FindFakes(event)
1201 for fake in fakes:
1202 info = FakeInfo(fake, real_criterion)
1203 for match in info.matches:
1204 par = match.particle
1205 if par.pca_pt() == 0: continue
1206
1207 res.pt_rec.append(fake.pt())
1208 res.pt_sim.append(par.pca_pt())
1209 res.pt_delta.append(fake.pt() - par.pca_pt())
1210 res.eta.append(fake.eta() - par.pca_eta())
1211 res.Lambda.append(getattr(fake, 'lambda')() - par.pca_lambda())
1212 res.cotTheta.append(fake.cotTheta() - par.pca_cotTheta())
1213 res.phi.append(fake.phi() - par.pca_phi())
1214 res.dxy.append(fake.dxy() - par.pca_dxy())
1215 res.dz.append(fake.dz() - par.pca_dz())
1216
1217 res.fake_class.append(info.fake_class)
1218
1219 i += 1
1220 if i >= nEvents:
1221 break
1222 return res
1223
1224 def Resolution_Analysis_Trues(ntuple_file, nEvents = 100):
1225 '''
1226 Performs analysis and returns a ResolutionData object containing the track parameter resolutions for true tracks and associated particles.
1227 '''
1228 res = ResolutionData()
1229 i = 0
1230 for event in ntuple_file:
1231
1232 trues = FindTrues(event)
1233 for true in trues:
1234 for info in true.matchedTrackingParticleInfos():
1235 par = info.trackingParticle()
1236 if par.pca_pt() == 0: continue
1237
1238 res.pt_rec.append(true.pt())
1239 res.pt_sim.append(par.pca_pt())
1240 res.pt_delta.append(true.pt() - par.pca_pt())
1241 res.eta.append(true.eta() - par.pca_eta())
1242 res.Lambda.append(getattr(true, 'lambda')() - par.pca_lambda())
1243 res.cotTheta.append(true.cotTheta() - par.pca_cotTheta())
1244 res.phi.append(true.phi() - par.pca_phi())
1245 res.dxy.append(true.dxy() - par.pca_dxy())
1246 res.dz.append(true.dz() - par.pca_dz())
1247
1248 res.fake_class.append(-1)
1249
1250 i += 1
1251 if i >= nEvents:
1252 break
1253 return res
1254
1255 def Calculate_AmountOfFakes(ntuple_file):
1256 ''' Calculated the amount of fakes in the data. '''
1257 n = 0
1258 for event in ntuple_file:
1259 n += len(FindFakes(event))
1260 return n
1261
1262 def Analyse_EOT_Error(end_list, bpix3_mask = False, detId_mask = "all", pt_mask = False):
1263 '''
1264 Analyses the distance between the fake and particle hits following the last shared hit in the end of tracking.
1265 bpix3_mask filters out all fakes except those which have the last shared hit in BPix3 layer and the following particle hit in TIB1 layer.
1266 detId_mask filters with respect to the fact if the following fake and particle hits have the same or different detector ID.
1267 pt_mask filters out the fakes with the particle pt < 0.7.
1268
1269 Returns a list of the errors for each fake.
1270 '''
1271 error = []
1272 for end in end_list:
1273 if end.fake_end != [0,0,0] and end.particle_end != [0,0,0]:
1274 if pt_mask and end.particle_pt < 0.7: continue
1275
1276 if bpix3_mask and end.end_class == 0 and end.last_str == "BPix3" and end.particle_end_str == "TIB1":
1277 if detId_mask == "same" and end.fake_end_detId == end.particle_end_detId:
1278 error.append(Distance(end.fake_end[0:2], end.particle_end[0:2]))
1279 elif detId_mask == "different" and end.fake_end_detId != end.particle_end_detId:
1280 error.append(Distance(end.fake_end, end.particle_end))
1281 elif detId_mask == "all":
1282 error.append(Distance(end.fake_end, end.particle_end))
1283 if error and error[-1] == 0.0: print("Error is 0.0?!")
1284 if error and error[-1] > 10: print(str(end.fake_end_detId) + " and " + str(end.particle_end_detId) + ": " + str(error[-1]) + " z: " + str(end.fake_end[2]) + " " + str(end.particle_end[2]))
1285 elif not bpix3_mask:
1286 if detId_mask == "same" and end.fake_end_detId == end.particle_end_detId:
1287 error.append(Distance(end.fake_end[0:2], end.particle_end[0:2]))
1288 elif detId_mask == "different" and end.fake_end_detId != end.particle_end_detId:
1289 error.append(Distance(end.fake_end, end.particle_end))
1290 elif detId_mask == "all":
1291 error.append(Distance(end.fake_end, end.particle_end))
1292
1293 print(sum(error)/len(error))
1294 return error
1295
1296 def EndOfTrackingDetectorInfo(end_list, end_mask = [0], BPix3mask = False):
1297 '''
1298 Prints info about how many end of trackings have the particle hit and the fake hit in the same detector module.
1299 BPix3_mask filters out all fakes except those which have the last shared hit in BPix3 layer and the following particle hit in TIB1 layer.
1300 '''
1301 data = []
1302 for end in end_list:
1303 if (not end_mask or end.end_class in end_mask) and (not BPix3mask or (end.last_str == "BPix3" and end.particle_end_str == "TIB1")):
1304 if end.particle_end_detId == end.fake_end_detId:
1305 data.append(1)
1306 else:
1307 data.append(0)
1308 print("Same detector id between fake end and particle end: " + str(sum(data)))
1309 print("Different detector id: " + str(len(data)-sum(data)))
1310
1311 def Analyse_EOT_ParticleDoubleHit(end_list, layer = "BPix3", end_mask = [0,4]):
1312 '''
1313 Prints info on how many fakes have an end of tracking with both last shared hit and the following particle hit in the same detector layer.
1314 On default, this is analysed on BPix3 layer and the fakes with the end classes 0 or 4 (4 is merged to 3 in some analysis).
1315 '''
1316 doubles = 0
1317 all_particles = 0
1318 for end in end_list:
1319 if (not end_mask or end.end_class in end_mask) and (layer in end.last_str):
1320 if end.last_str == end.particle_end_str:
1321 doubles += 1
1322
1323 all_particles += 1
1324
1325 print("In layer " + layer + " there are " + str(doubles) + " end of trackings out of " + str(all_particles) + " (" + str(100.0*doubles/all_particles) + ") which have a double hit in the EOT layer")
1326
1327 def TrackPt(ntuple_file, nEvents = 100):
1328 '''
1329 Returns two lists on fake track and true track pt values.
1330 '''
1331 fake_pts = []
1332 true_pts = []
1333
1334 i = 0
1335 for event in ntuple_file:
1336 print("Event: " + str(i))
1337 for track in event.tracks():
1338 if track.nMatchedTrackingParticles() == 0:
1339 fake_pts.append(track.pt())
1340 else:
1341 true_pts.append(track.pt())
1342
1343 i += 1
1344 if i >= nEvents:
1345 break
1346
1347 return fake_pts, true_pts
1348