File indexing completed on 2024-11-26 02:34:40
0001
0002
0003 from builtins import range
0004 import ROOT
0005 from array import array
0006 from collections import OrderedDict
0007 from copy import copy
0008 import pickle
0009 from math import sqrt, copysign, sin, cos, pi
0010
0011 from Validation.RecoTrack.plotting.ntuple import *
0012
0013
0014
0015 ''' These are different class labels, which are mainly used for plotting histograms. '''
0016
0017
0018 classes = {-1: "UNCLASSIFIED",
0019 0: "UNMATCHED",
0020 21: "MERGED DECAY TRACK MATCHES",
0021 22: "MULTIPLE ALL RECONSTRUCTED MATCHES",
0022 23: "MULTIPLE PARTIALLY RECONSTRUCTED MATCHES",
0023 20: "MULTIPLE UNRECONSTRUCTED MATCHES",
0024 11: "RECONSTRUCTED AND DECAYED MATCH",
0025 12: "RECONSTRUCTED MATCH",
0026 13: "DECAYED MATCH",
0027 10: "MATCH"}
0028
0029
0030 end_class_names = {0: "Fake and particle tracks differ",
0031 1: "Tracking ended correctly",
0032 2: "Fake ended before particle",
0033 3: "Particle ended before fake",
0034 4: "Particle ended within two hits in same layer"}
0035
0036
0037 layer_names_t = ((1, "BPix1"),
0038 (2, "BPix2"),
0039 (3, "BPix3"),
0040 (4, "FPix1"),
0041 (5, "FPix2"),
0042 (11, "TIB1"),
0043 (12, "TIB2"),
0044 (13, "TIB3"),
0045 (14, "TIB4"),
0046 (21, "TID1"),
0047 (22, "TID2"),
0048 (23, "TID3"),
0049 (31, "TOB1"),
0050 (32, "TOB2"),
0051 (33, "TOB3"),
0052 (34, "TOB4"),
0053 (35, "TOB5"),
0054 (36, "TOB6"),
0055 (41, "TEC1"),
0056 (42, "TEC2"),
0057 (43, "TEC3"),
0058 (44, "TEC4"),
0059 (45, "TEC5"),
0060 (46, "TEC6"),
0061 (47, "TEC7"),
0062 (48, "TEC8"),
0063 (49, "TEC9"))
0064 layer_names = OrderedDict(layer_names_t)
0065
0066 layer_names_rev_t = (("BPix1", 1),
0067 ("BPix2", 2),
0068 ("BPix3", 3),
0069 ("FPix1", 4),
0070 ("FPix2", 5),
0071 ("TIB1", 11),
0072 ("TIB2", 12),
0073 ("TIB3", 13),
0074 ("TIB4", 14),
0075 ("TID1", 21),
0076 ("TID2", 22),
0077 ("TID3", 23),
0078 ("TOB1", 31),
0079 ("TOB2", 32),
0080 ("TOB3", 33),
0081 ("TOB4", 34),
0082 ("TOB5", 35),
0083 ("TOB6", 36),
0084 ("TEC1", 41),
0085 ("TEC2", 42),
0086 ("TEC3", 43),
0087 ("TEC4", 44),
0088 ("TEC5", 45),
0089 ("TEC6", 46),
0090 ("TEC7", 47),
0091 ("TEC8", 48),
0092 ("TEC9", 49))
0093 layer_names_rev = OrderedDict(layer_names_rev_t)
0094
0095
0096 layer_data_tmp_t = ((1,0),(2,0),(3,0),(4,0),(5,0),
0097 (11,0),(12,0),(13,0),(14,0),
0098 (21,0),(22,0),(23,0),
0099 (31,0),(32,0),(33,0),(34,0),(35,0),(36,0),
0100 (41,0),(42,0),(43,0),(44,0),(45,0),(46,0),(47,0),(48,0),(49,0))
0101 layer_data_tmp = OrderedDict(layer_data_tmp_t)
0102
0103
0104 invalid_types_t = ((0, "valid"),
0105 (1, "missing"),
0106 (2, "inactive"),
0107 (3, "bad"),
0108 (4, "missing_inner"),
0109 (5, "missing_outer"))
0110 invalid_types = OrderedDict(invalid_types_t)
0111
0112
0113 invalid_types_tmp_t = ((0,0),(1,0),(2,0),(3,0),(4,0),(5,0))
0114 invalid_types_tmp = OrderedDict(invalid_types_tmp_t)
0115
0116
0117
0118
0119 def FindFakes(event):
0120 '''Returns fake tracks of an event in a list'''
0121 fakes = []
0122 for track in event.tracks():
0123 if track.nMatchedTrackingParticles() == 0:
0124 fakes.append(track)
0125 print("Event: " + str(event.entry()+1) + " Number of fake tracks: " + str(len(fakes)))
0126 return fakes
0127
0128 def FindTrues(event):
0129 '''Returns true tracks of an event in a list'''
0130 trues = []
0131 for track in event.tracks():
0132 if track.nMatchedTrackingParticles() >= 0:
0133 trues.append(track)
0134 print("Event: " + str(event.entry()+1) + " Number of true tracks: " + str(len(trues)))
0135 return trues
0136
0137 def Distance(x1,x2):
0138 '''Returns Euclidean distance between two iterable vectors.'''
0139 d = 0
0140 for i in range(len(x1)):
0141 d += abs(x1[i]-x2[i])**2
0142 d = sqrt(d)
0143 return d
0144
0145 def FindUntrackedParticles(event):
0146 '''Returns untracked TrackingParticles of an event, according to MTFEfficiencySelector.'''
0147 untracked = []
0148
0149 for particle in event.trackingParticles():
0150 if (particle.isValid() and particle.nMatchedTracks() == 0 and MTFEfficiencySelector(particle)):
0151 untracked.append(particle)
0152 print("Number of untracked particles: " + str(len(untracked)))
0153 return untracked
0154
0155 def MTFEfficiencySelector(particle):
0156 '''
0157 A selector to analyse MultiTrackFinder efficiency rate.
0158 Used to analyse untracked particles.
0159 Returns True if particle fulfills the criterion for an "interesting" particle,
0160 which could have been detected precisely.
0161 '''
0162 if(particle.pt() > 0.9 and abs(particle.eta()) < 2.5
0163 and (particle.parentVertex().x()**2 + particle.parentVertex().y()**2 < 3.5**2)
0164 and abs(particle.parentVertex().z()) < 30 and particle.q() != 0 and particle.event() == 0):
0165 return True
0166 return False
0167
0168 def EfficiencyRate(ntuple, N):
0169 '''
0170 Calculates the efficiency rate of N first events in the ntuple class.
0171 Efficiency rate is the fraction between tracked particles and all particles.
0172 Prints output.
0173 '''
0174 tracked = 0
0175 untracked = 0
0176 i = 0
0177 for event in ntuple:
0178 if (i >= N): break
0179 for particle in event.trackingParticles():
0180 if (MTFEfficiencySelector(particle)):
0181 if(particle.nMatchedTracks() == 0): untracked += 1
0182 else: tracked += 1
0183 i += 1
0184 print("In " + str(N) + " events there are:")
0185 print("Tracked particles: " + str(tracked))
0186 print("Untracked particles: " + str(untracked))
0187 print("Efficiency rate: " + str(float(tracked)/(tracked+untracked)))
0188
0189 def SharedHitFrac(track, particle, frac = 0):
0190 '''
0191 Shared hits or hit fractions between a Track and a TrackingParticle.
0192 If frac = 0, returns number of shared hits, number of hits in the Track and number of hits in the TrackingParticle.
0193 Otherwise returns the shared hit fraction between the Track and the TrackingParticle.
0194 '''
0195 particle_hits = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0196 shared_hits = 0
0197 ntrack_hits = 0
0198 for hit in track.hits():
0199 if hit.isValidHit() and hit.index() in particle_hits:
0200 shared_hits += 1
0201 ntrack_hits += 1
0202 if frac:
0203 return shared_hits, ntrack_hits, len(particle_hits)
0204 else:
0205 return 1.0*shared_hits/ntrack_hits
0206
0207 def SharedHitsFromBeginning(track, particle, tolerance=0):
0208 '''
0209 Checks the shared hits with a particle from the beginning of the track.
0210 Tolerance is the amount of allowed differences (errors) between tracks.
0211 Returns an list including the amount of shared hits from the beginning
0212 as the function of tolerance (index).
0213
0214 Example:
0215 Output: [3, 3, 4, 4, 4]
0216 Means: first 3 particle hits are shared with track,
0217 then 1 unshared particle hit,
0218 then 1 more shared particle hit,
0219 then 2 unshared particle hits
0220 until tolerance < 0 (or particle track ended)
0221
0222 NOTE: Often this is called with a TrackingParticle as parameter "track" and a Track as the parameter "particle",
0223 which is because it was later needed to analyse the hits which are consecutive with respect to the Track.
0224 Sorry about inconvenience.
0225 '''
0226 particle_hits = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0227 track_hits = [hit.index() for hit in track.hits() if hit.isValidHit()]
0228
0229
0230 count = 0
0231 i = 0
0232 result = []
0233 while tolerance >= 0 and i < len(particle_hits) and count < len(track_hits):
0234 if particle_hits[i] in track_hits:
0235 count += 1
0236 i += 1
0237 else:
0238 tolerance -= 1
0239 result.append(count)
0240 i += 1
0241 if tolerance >= 0:
0242 result.append(count)
0243 return result
0244
0245 def SharedHits(track, particle):
0246 '''Returns shared hits between a Track and a TrackingParticle in a list'''
0247 res_hits = []
0248 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0249 track_hits = [hit for hit in track.hits() if hit.isValidHit()]
0250
0251 for hit in track_hits:
0252 if hit.index() in particle_hit_indexes:
0253 res_hits.append(hit)
0254
0255 return res_hits
0256
0257 def FindAssociatedParticles(track):
0258 '''Returns TrackingParticles in a list that have any shared hits with the given Track'''
0259 particle_inds = []
0260 particles = []
0261 for hit in track.hits():
0262 if hit.isValidHit() and hit.nSimHits() >= 0:
0263 for simHit in hit.simHits():
0264 particle = simHit.trackingParticle()
0265 if particle.index() not in particle_inds:
0266 particle_inds.append(particle.index())
0267 particles.append(particle)
0268 return particles
0269
0270 def MatchedParticles(fake, real_criterion = ["consecutive", 3]):
0271 '''
0272 Find the possible matched real particle of a fake track.
0273 Currently three possible criterions for evaluating a possible match:
0274 consecutive: has a given amount of hits which are consecutive in a particle track (default with 3 hit limit)
0275 fraction: a given fraction of fake hits is included in a particle track
0276 NLay: a given number of pixel / strip layers are included in the shared hits
0277
0278 Parameters: fake track, criterion type and limit value in a list
0279 Returns: a list of matched particles
0280 '''
0281 CRITERION = real_criterion[0]
0282 LIMIT = real_criterion[1]
0283
0284 particles = FindAssociatedParticles(fake)
0285 matches = []
0286 for particle in particles:
0287 if CRITERION == "consecutive":
0288 tolerance_mask = SharedHitsFromBeginning(particle, fake, particle.nValid())
0289 diff = [abs(tolerance_mask[i+1] - tolerance_mask[i]) for i in range(len(tolerance_mask)-1)]
0290 if tolerance_mask[0] >= LIMIT or (diff and max(diff) >= LIMIT):
0291 matches.append(particle)
0292 if CRITERION == "fraction":
0293 if SharedHitFrac(fake, particle, 0) >= LIMIT:
0294 matches.append(particle)
0295 if CRITERION == "NLay":
0296 hits = SharedHits(fake, particle)
0297 nPix = 0
0298 nStr = 0
0299 tracked_layers = []
0300 for hit in hits:
0301 layer = hit.layerStr()
0302 if layer not in tracked_layers:
0303 tracked_layers.append(layer)
0304 if "Pix" in layer: nPix += 1
0305 else: nStr += 1
0306 if 2*nPix + nStr >= LIMIT:
0307 matches.append(particle)
0308 return matches
0309
0310 def IsUnmatched(fake, unmatch_criterion = ["nShared", 2]):
0311 '''
0312 Returns True if the the particle is unmatched to any particle with respect
0313 to the criterion. Criterion is by default that if there are n <= 2 hits
0314 shared between any track an the fake, the fake is unmatched.
0315 '''
0316 CRITERION = unmatch_criterion[0]
0317 LIMIT = unmatch_criterion[1]
0318
0319 for particle in FindAssociatedParticles(fake):
0320 if CRITERION == "nShared":
0321 shared, track_hits, particle_hits = SharedHitFrac(fake, particle, 1)
0322 if shared > LIMIT:
0323 return False
0324 return True
0325
0326 def FindEndOfTracking(fake, particle, end_criterion = ["nMissing", 2], real_criterion = ["consecutive", 3], only_valid = False):
0327 '''
0328 Finds the point where the fake does not track the particle anymore, according to
0329 end_criterion, which is 2 consecutive missing layers in particle hits by default.
0330 Returns: last: the last shared hit between the fake and the particle, which is the first hit in particle trajectory after which tracks separate by the end criterion (or end)
0331 fake_end: the fake track hit following the last shared hit
0332 particle_end: the particle hit following the last shared hit
0333 fake_end and particle_end might be the same as the last shared hit, if the fake track or the particle track (or both) end
0334 '''
0335 CRITERION = end_criterion[0]
0336 LIMIT = end_criterion[1]
0337 REAL_CRITERION = real_criterion[0]
0338 REAL_LIMIT = real_criterion[1]
0339
0340 if CRITERION == "nMissing" and REAL_CRITERION == "consecutive":
0341 if only_valid:
0342 particle_hits = [hit for hit in particle.hits() if hit.isValidHit()]
0343 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0344 track_hits = [hit for hit in fake.hits() if hit.isValidHit()]
0345 track_hit_indexes = [hit.index() for hit in fake.hits() if hit.isValidHit()]
0346 else:
0347 particle_hits = [hit for hit in particle.hits()]
0348 particle_hit_indexes = [hit.index() for hit in particle.hits()]
0349 track_hits = [hit for hit in fake.hits()]
0350 track_hit_indexes = [hit.index() for hit in fake.hits()]
0351
0352
0353
0354 tolerance = LIMIT
0355 i = 0
0356 start_tolerance = 0
0357 last = particle_hits[0]
0358 particle_end = particle_hits[0]
0359 fake_end = particle_hits[0]
0360
0361 while i < len(track_hits):
0362
0363 if track_hit_indexes[i] in particle_hit_indexes:
0364 start_tolerance += 1
0365
0366 if start_tolerance >= REAL_LIMIT:
0367
0368 tolerance = LIMIT
0369 i = particle_hit_indexes.index(track_hit_indexes[i])
0370 last = particle_hits[i]
0371 particle_end = particle_hits[min([i+1, len(particle_hits)-1])]
0372 fake_end = track_hits[min(track_hit_indexes.index(particle_hit_indexes[i])+1, len(track_hits)-1)]
0373
0374 break
0375 i += 1
0376 else:
0377 start_tolerance = 0
0378 i += 1
0379
0380 while tolerance >= 0 and i < len(particle_hits):
0381
0382
0383 if particle_hit_indexes[i] in track_hit_indexes:
0384 tolerance = LIMIT
0385 last = particle_hits[i]
0386 particle_end = particle_hits[min([i+1, len(particle_hits)-1])]
0387 fake_end = track_hits[min(track_hit_indexes.index(particle_hit_indexes[i])+1, len(track_hits)-1)]
0388
0389 elif not (particle_hits[i-1].layerStr() in particle_hits[i].layerStr() or particle_hits[i].layerStr() in particle_hits[i-1].layerStr()):
0390 tolerance -= 1
0391 i += 1
0392 end_class = 0
0393 if last.index() == fake_end.index() and last.index() == particle_end.index():
0394 end_class = 1
0395 elif last.index() == fake_end.index(): end_class = 2
0396 elif last.index() == particle_end.index(): end_class = 3
0397 elif last.layerStr() == particle_hits[-1].layerStr() and (len(particle_hits)-1 - i < 4): end_class = 3
0398 '''
0399 if tolerance >= LIMIT: # If the end of the particle was reached, last and fail are the same
0400 last = particle_end
0401 fake_end = particle_end
0402 end_class = 1
0403
0404 print [[hit.index(), hit.layerStr()] for hit in track_hits]
0405 print [[hit.index(), hit.layerStr()] for hit in particle_hits]
0406 print i
0407 print last.index()
0408 print fake_end.index()
0409 print particle_end.index()
0410 print end_class
0411 print "*****"
0412 input()
0413 '''
0414 return last, fake_end, particle_end, end_class
0415
0416 def MatchPixelHits(fake, particle, real_criterion = ["consecutive", 3]):
0417 '''
0418 Finds how many shared pixelhits fake has with a particle, starting at the beginning
0419 of shared hits.
0420 '''
0421 CRITERION = real_criterion[0]
0422 LIMIT = real_criterion[1]
0423
0424 if CRITERION == "consecutive":
0425 particle_hits = [hit for hit in particle.hits() if hit.isValidHit()]
0426 track_hits = [hit for hit in fake.hits() if hit.isValidHit()]
0427 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0428
0429
0430 i = 0
0431 start_tolerance = 0
0432 hit_candidates = []
0433 start_flag = False
0434
0435 layer_strs = []
0436 nPix = 0
0437
0438 while i <= len(track_hits)-1:
0439 if track_hits[i].index() in particle_hit_indexes:
0440 start_tolerance += 1
0441 hit_candidates.append(track_hits[i])
0442 if start_tolerance >= LIMIT:
0443 start_flag = True
0444 i += 1
0445 elif start_flag:
0446
0447 break
0448 else:
0449 hit_candidates = []
0450 start_tolerance = 0
0451 i += 1
0452
0453
0454 for hit in hit_candidates:
0455 if "Pix" in hit.layerStr():
0456 if hit.layerStr() not in layer_strs:
0457 layer_strs.append(hit.layerStr())
0458 nPix += 1
0459 else:
0460 break
0461 nPixLayers = len(layer_strs)
0462 ''' Uncomment to analyse fakes having >= 4 pixelhits
0463 if nPixLayers >= 4: #print [hit.layerStr() for hit in hit_candidates]
0464 if 'BPix1' in layer_strs and 'BPix2' in layer_strs:
0465 if 'BPix3' in layer_strs and 'FPix1' in layer_strs:
0466 print "B3-F1"
0467 elif 'FPix1' in layer_strs and 'FPix2' in layer_strs:
0468 print "F1-F2"
0469 else:
0470 print "B1-B2 Other"
0471 else:
0472 print "Other"
0473 '''
0474
0475 if start_tolerance == 0:
0476 print("Match is not a real match :\\")
0477 if len(hit_candidates)<3 or not start_flag:
0478 print("No hit candidates from a match")
0479 print([hit.index() for hit in fake.hits() if hit.isValidHit()])
0480 print(particle_hit_indexes)
0481 print([hit.index() for hit in hit_candidates])
0482 print(start_tolerance)
0483 print(start_flag)
0484 return -1, -1
0485 return nPix, nPixLayers
0486
0487 if CRITERION == "NLay":
0488 particle_hits = [hit for hit in particle.hits() if hit.isValidHit()]
0489 track_hits = [hit for hit in fake.hits() if hit.isValidHit()]
0490 particle_hit_indexes = [hit.index() for hit in particle.hits() if hit.isValidHit()]
0491
0492
0493 i = 0
0494 hit_candidates = []
0495 start_flag = False
0496
0497 layer_strs = []
0498 nPix = 0
0499
0500 while i <= len(track_hits)-1:
0501 if track_hits[i].index() in particle_hit_indexes:
0502 hit_candidates.append(track_hits[i])
0503 if "Pix" in hit.layerStr():
0504 start_flag = True
0505 i += 1
0506 elif start_flag:
0507
0508 break
0509 else:
0510 i += 1
0511
0512
0513 for hit in hit_candidates:
0514 if "Pix" in hit.layerStr():
0515 if hit.layerStr() not in layer_strs:
0516 layer_strs.append(hit.layerStr())
0517 nPix += 1
0518 else:
0519 break
0520 nPixLayers = len(layer_strs)
0521
0522 return nPix, nPixLayers
0523
0524 return -1, -1
0525
0526
0527 def MaxSharedHits(fake, fraction = False):
0528 ''' Returns the maximum amount of shared hits which fake shares with some simulated particle. '''
0529 max_shared = 0
0530 max_frac = 0
0531 for particle in FindAssociatedParticles(fake):
0532 shared, nTrack, nParticle = SharedHitFrac(fake, particle, 1)
0533 frac = 1.0*shared/nParticle
0534 if shared > max_shared:
0535 max_shared = shared
0536 max_frac = frac
0537 if fraction: return max_shared, max_frac
0538 return max_shared
0539
0540
0541
0542
0543 def StopReason(track):
0544 ''' Converts track stop reason index to string '''
0545 reason = track.stopReason()
0546 if reason == 0:
0547 return "UNINITIALIZED"
0548 if reason == 1:
0549 return "MAX_HITS"
0550 if reason == 2:
0551 return "MAX_LOST_HITS"
0552 if reason == 3:
0553 return "MAX_CONSECUTIVE_LOST_HITS"
0554 if reason == 4:
0555 return "LOST_HIT_FRACTION"
0556 if reason == 5:
0557 return "MIN_PT"
0558 if reason == 6:
0559 return "CHARGE_SIGNIFICANCE"
0560 if reason == 7:
0561 return "LOOPER"
0562 if reason == 8:
0563 return "MAX_CCC_LOST_HITS"
0564 if reason == 9:
0565 return "NO_SEGMENTS_FOR_VALID_LAYERS"
0566 if reason == 10:
0567 return "SEED_EXTENSION"
0568 if reason == 255:
0569 return "NOT_STOPPED"
0570 else:
0571 return "UNDEFINED STOPPING REASON"
0572
0573
0574 def PrintTrackInfo(track, fake = None, frac = 0, fake_info = None):
0575 ''' Prints info on the track. Called from PlotFakes method in graphics.py. '''
0576 if isinstance(track, Track):
0577 if track.nMatchedTrackingParticles() == 0:
0578 print(str(track.index()) + ": FAKE \nSTOP REASON: " + StopReason(track))
0579 print("Has " + str(track.nValid()) + " valid hits")
0580 if fake_info:
0581 fake_info.Print()
0582 else:
0583 reco_str = str(track.index()) + ": RECOVERED "
0584 for info in track.matchedTrackingParticleInfos():
0585 reco_str += str(info.trackingParticle().index()) + " " + str(info.shareFrac()) + "\nSTOP REASON: " + StopReason(track)
0586 print(reco_str)
0587 else:
0588 print(str(track.index()) + ": REAL")
0589 if track.nMatchedTracks() == 0: print("NOT RECOVERED")
0590 elif track.nMatchedTracks() == 1: print("RECOVERED")
0591 else: print("RECOVERED " + str(track.nMatchedTracks()) + " TIMES")
0592 decaycount = 0
0593 for decay in track.decayVertices(): decaycount += 1
0594 if decaycount: print("DECAYED " + str(decaycount) + " TIMES")
0595
0596 if fake:
0597 if frac:
0598 num, div, npar = SharedHitFrac(fake, track, 1)
0599 print("Fake share fraction: " + str(num) + " / " + str(div) + ", track has " + str(npar) + " hits")
0600 else:
0601 dec = SharedHitFrac(fake, track, 0)
0602 print("Fake shares " + str(dec) + " fraction of hits with track")
0603 print("Shared hits from beginning: " + str(SharedHitsFromBeginning(track, fake, 10)))
0604
0605 if isinstance(track, TrackingParticle):
0606 print("Parameters:")
0607 print("px : " + str(track.px()) + " py : " + str(track.py()) + " pz : " + str(track.pz()))
0608 print("pt : " + str(track.pca_pt()) + " eta : " + str(track.pca_eta()) + " phi : " + str(track.pca_phi()))
0609 print("dxy : " + str(track.pca_dxy()) + " dz : " + str(track.pca_dz()) + " q : " + str(track.q()) + "\n")
0610 else:
0611 print("Parameters:")
0612 print("px : " + str(track.px()) + " py : " + str(track.py()) + " pz : " + str(track.pz()))
0613 print("pt : " + str(track.pt()) + " eta : " + str(track.eta()) + " phi : " + str(track.phi()))
0614 print("dxy : " + str(track.dxy()) + " dz : " + str(track.dz()) + " q : " + str(track.q()) + "\n")
0615
0616
0617
0618
0619 class FakeInfo(object):
0620 '''
0621 A storage and analysis class for a fake track.
0622 Construct this object with a fake track as a parameter to perform analysis on that fake track.
0623 The results can then be obtained from object attributes.
0624 '''
0625 def __init__(self, fake, real_criterion = ["consecutive", 3], end_criterion = ["nMissing", 1]):
0626 self.fake = fake
0627 self.index = fake.index()
0628 self.nHits = fake.nValid()
0629 self.nMatches = 0
0630 self.matches = []
0631 self.nReconstructed = 0
0632 self.nDecays = 0
0633
0634 start = next(iter(fake.hits()))
0635 self.start_loc = [start.x(), start.y(), start.z()]
0636
0637 self.stop_reason = fake.stopReason()
0638
0639 self.FindMatches(fake, real_criterion, end_criterion)
0640 self.fake_class, self.fake_class_str = self.Classify()
0641
0642 def FindMatches(self, fake, real_criterion, end_criterion):
0643 ''' Finds matches for the fake track. '''
0644 matched_particles = MatchedParticles(fake, real_criterion)
0645 self.nMatches = len(matched_particles)
0646 self.nIncludedDecayParticles = 0
0647 self.decayLinks = []
0648
0649 if matched_particles:
0650 for particle in matched_particles:
0651 self.matches.append(MatchInfo(particle, fake, real_criterion, end_criterion))
0652 for match in self.matches:
0653 if match.nReconstructed > 0:
0654 self.nReconstructed += 1
0655 if match.nDecays > 0:
0656 self.nDecays += 1
0657 if match.nDaughters > 0:
0658 for another_match in self.matches:
0659 if another_match.parentVtxIndex in match.decayVtxIndexes:
0660 self.nIncludedDecayParticles += 1
0661 self.decayLinks.append([match.index, another_match.index])
0662
0663
0664 self.unmatched_max_shared = -1
0665 else:
0666 self.unmatched_max_shared = MaxSharedHits(fake)
0667 self.max_shared, self.max_frac = MaxSharedHits(fake, True)
0668
0669 if not self.nMatches and not IsUnmatched(fake):
0670 self.nMatches = -1
0671
0672 def Classify(self):
0673 ''' Classify the fake after analysis '''
0674 if self.nMatches == -1:
0675 return -1, "UNCLASSIFIED"
0676 if self.nMatches == 0:
0677 return 0, "UNMATCHED"
0678 if self.nMatches >= 2:
0679 if self.decayLinks:
0680 return 21, "MERGED DECAY TRACK MATCHES"
0681 elif self.nReconstructed >= self.nMatches:
0682 return 22, "MULTIPLE ALL RECONSTRUCTED MATCHES"
0683 elif self.nReconstructed > 0:
0684 return 23, "MULTIPLE PARTIALLY RECONSTRUCTED MATCHES"
0685 else:
0686 return 20, "MULTIPLE UNRECONSTRUCTED MATCHES"
0687 if self.nMatches == 1:
0688 if self.nReconstructed > 0 and self.nDecays > 0:
0689 return 11, "RECONSTRUCTED AND DECAYED MATCH"
0690 if self.nReconstructed > 0:
0691 return 12, "RECONSTRUCTED MATCH"
0692 if self.nDecays > 0:
0693 return 13, "DECAYED MATCH"
0694 else:
0695 return 10, "MATCH"
0696
0697 def Print(self):
0698 ''' Prints fake track classification info with matched particle infos. '''
0699 print("CLASS: " + str(self.fake_class) + " WITH " + str(self.nMatches) + " MATCHES")
0700 print("Has " + str(self.nIncludedDecayParticles) + " included decay particles, with links: " + str(self.decayLinks))
0701 for match in self.matches: match.Print()
0702
0703 class MatchInfo(object):
0704 ''' A storage and analysis class for TrackingParticles matched to a fake track. '''
0705 def __init__(self, match, fake, real_criterion = ["consecutive", 3], end_criterion = ["nMissing", 2]):
0706 ''' match = the matched TrackingParticle '''
0707 self.index = match.index()
0708 self.particle = match
0709
0710
0711 self.decayVertices = [[vtx.x(), vtx.y(), vtx.z()] for vtx in match.decayVertices()]
0712 self.nDecays = len(self.decayVertices)
0713 self.decayVtxIndexes = [vtx.index() for vtx in match.decayVertices()]
0714
0715 self.shared_hits, un, needed = SharedHitFrac(fake, match, 1)
0716
0717 self.daughterIndexes = []
0718 for vtx in match.decayVertices():
0719 for particle in vtx.daughterTrackingParticles():
0720 self.daughterIndexes.append(particle.index())
0721 self.nDaughters = len(self.daughterIndexes)
0722 vtx = match.parentVertex()
0723 self.parentVtx = [vtx.x(), vtx.y(), vtx.z()]
0724 self.parentVtxIndex = vtx.index()
0725
0726 self.nReconstructed = match.nMatchedTracks()
0727
0728
0729
0730 self.nPix, self.nPixLayers = MatchPixelHits(fake, match, real_criterion)
0731
0732
0733 last, fake_end, particle_end, self.end_class = FindEndOfTracking(fake, match, end_criterion)
0734 if last.isValidHit(): self.last_loc = [last.x(), last.y(), last.z()]
0735 else: self.last_loc = [0,0,0]
0736 if fake_end.isValidHit(): self.fake_end_loc = [fake_end.x(), fake_end.y(), fake_end.z()]
0737 else: self.fake_end_loc = [0,0,0]
0738 if particle_end.isValidHit(): self.particle_end_loc = [particle_end.x(), particle_end.y(), particle_end.z()]
0739 else: self.particle_end_loc = [0,0,0]
0740
0741 self.last_detId = last.detId()
0742 self.fake_end_detId = fake_end.detId()
0743 self.particle_end_detId = particle_end.detId()
0744
0745 self.particle_pdgId = match.pdgId()
0746 self.particle_pt = match.pt()
0747
0748 if isinstance(last, GluedHit):
0749 self.last_str = last.monoHit().layerStr()
0750 else:
0751 self.last_str = last.layerStr()
0752 if isinstance(fake_end, GluedHit):
0753 self.fake_end_str = fake_end.monoHit().layerStr()
0754 else:
0755 self.fake_end_str = fake_end.layerStr()
0756 if isinstance(particle_end, GluedHit):
0757 self.particle_end_str = particle_end.monoHit().layerStr()
0758 else:
0759 self.particle_end_str = particle_end.layerStr()
0760
0761
0762 def Print(self):
0763 ''' Prints match info. '''
0764 print("Match " + str(self.index) + ": nReconstructed " + str(self.nReconstructed) +\
0765 ", daughters: " + str(self.daughterIndexes) +\
0766 ", tracking failed in " + self.last_str + ", to " + self.particle_end_str + ", wrong hit in " + self.fake_end_str)
0767
0768
0769
0770 class EndInfo(object):
0771 ''' Storage class for end of tracking information for a matched fake '''
0772 def __init__(self):
0773 self.last = []
0774 self.fake_end = []
0775 self.particle_end = []
0776 self.last_str = ""
0777 self.fake_end_str = ""
0778 self.particle_end_str = ""
0779 self.fake_class = -1
0780 self.end_class = -1
0781 self.last_detid = -1
0782 self.fake_end_detid = -1
0783 self.particle_end_detid = -1
0784 self.particle_pdgId = -1
0785 self.particle_pt = -1
0786
0787 class ResolutionData(object):
0788 ''' Storage class for matched fake track resolutions '''
0789 def __init__(self):
0790 self.pt_rec = []
0791 self.pt_sim = []
0792 self.pt_delta = []
0793 self.eta = []
0794 self.Lambda = []
0795 self.cotTheta = []
0796 self.phi = []
0797 self.dxy = []
0798 self.dz = []
0799 self.fake_class = []
0800
0801 def __iter__(self):
0802 for i in range(len(self.pt_rec)):
0803 yield ResolutionItem(self.pt_rec[i], self.pt_sim[i], self.pt_delta[i], self.eta[i], self.Lambda[i], self.cotTheta[i], self.phi[i], self.dxy[i], self.dz[i], self.fake_class[i])
0804
0805 class ResolutionItem(object):
0806 ''' A storage class for ResolutionData iteration '''
0807 def __init__(self, pt_rec, pt_sim, pt_delta, eta, Lambda, cotTheta, phi, dxy, dz, fake_class):
0808 self.pt_rec = pt_rec
0809 self.pt_sim = pt_sim
0810 self.pt_delta = pt_delta
0811 self.eta = eta
0812 self.Lambda = Lambda
0813 self.cotTheta = cotTheta
0814 self.phi = phi
0815 self.dxy = dxy
0816 self.dz = dz
0817 self.fake_class = fake_class
0818
0819
0820
0821 def ClassifyEventFakes(ntuple_file, nEvents = 100, return_fakes = False, real_criterion = ["consecutive", 3]):
0822 '''
0823 Classifies all fakes in the first nEvents events in the ntuple_file (TrackingNtuple object).
0824 Returns a dictionary of class items, with class index as a key and number of fakes in the class as the value.
0825 '''
0826 i = 0
0827 results = {class_item: 0 for class_item in classes}
0828 fake_list = []
0829 for event in ntuple_file:
0830 fakes = FindFakes(event)
0831 for fake in fakes:
0832 info = FakeInfo(fake, real_criterion)
0833 results[info.fake_class] += 1
0834
0835 if return_fakes:
0836 fake_list.append(info)
0837 i += 1
0838 if i >= nEvents:
0839 break
0840
0841 if return_fakes:
0842 return results, fake_list
0843 return results
0844
0845 def Calculate_MaxMatchedHits(ntuple_file, nEvents = 100):
0846 '''
0847 Calculates the maximum number of shared hits for the fakes in the first nEvents events in the ntuple_file (TrackingNtuple object).
0848 Returns a list of these maximum numbers for each analysed fake track.
0849 '''
0850 i = 0
0851 results = []
0852 for event in ntuple_file:
0853 fakes = FindFakes(event)
0854 for fake in fakes:
0855 res_temp = 0
0856 for particle in FindAssociatedParticles(fake):
0857 shared, par, nAll = SharedHitFrac(fake, particle, 1)
0858 if shared > res_temp:
0859 res_temp = shared
0860 results.append(res_temp)
0861
0862 i += 1
0863 if i >= nEvents:
0864 break
0865
0866 return results
0867
0868
0869 def Calculate_MaxMatchedConsecutiveHits(ntuple_file, nEvents = 100, frac = False):
0870 '''
0871 Calculates the maximum number of CONSECUTIVE (with respect to fake track) shared hits for the fakes in the first nEvents events in the ntuple_file (TrackingNtuple object).
0872 Returns a list of these maximum numbers for each analysed fake track.
0873 '''
0874 i = 0
0875 results = []
0876 for event in ntuple_file:
0877 fakes = FindFakes(event)
0878 for fake in fakes:
0879 res_temp = 0
0880
0881 for particle in FindAssociatedParticles(fake):
0882 tolerance_mask = SharedHitsFromBeginning(particle, fake, fake.nValid())
0883 diff = [abs(tolerance_mask[j+1] - tolerance_mask[j]) for j in range(len(tolerance_mask)-1)]
0884 if frac:
0885 if diff and 1.0*max(diff)/fake.nValid() > res_temp:
0886 res_temp = 1.0*max(diff)/fake.nValid()
0887 elif 1.0*tolerance_mask[0]/fake.nValid() > res_temp:
0888 res_temp = 1.0*tolerance_mask[0]/fake.nValid()
0889 else:
0890 if diff and max(diff) > res_temp:
0891 res_temp = max(diff)
0892 elif tolerance_mask[0] > res_temp:
0893 res_temp = tolerance_mask[0]
0894 ''' Uncomment to debug
0895 if frac:
0896 if 1.0*res_temp/fake.nValid() > 0.75:
0897 print 1.0*res_temp/fake.nValid()
0898 print res_temp
0899 print [hit.index() for hit in fake.hits()]
0900 print [hit.index() for hit in particle.hits()]
0901 print tolerance_mask
0902 print diff
0903 print fake.nValid()
0904 print particle.nValid()
0905
0906 results.append(1.0*res_temp/fake.nValid())
0907 '''
0908 results.append(res_temp)
0909
0910 i += 1
0911 if i >= nEvents:
0912 break
0913
0914 return results
0915
0916 def Calculate_MaxMatchedHits_RealTracks(ntuple_file, nEvents = 100):
0917 '''
0918 Similar as Calculate_MaxMatchedHits, but for true tracks.
0919 '''
0920 i = 0
0921 results = []
0922 for event in ntuple_file:
0923 for track in event.tracks():
0924 if track.nMatchedTrackingParticles() >= 1:
0925 res_temp = 0
0926 for info in track.matchedTrackingParticleInfos():
0927 particle = info.trackingParticle()
0928 shared, par, nAll = SharedHitFrac(track, particle, 1)
0929 if shared > res_temp:
0930 res_temp = shared
0931 results.append(res_temp)
0932
0933 i += 1
0934 if i >= nEvents:
0935 break
0936
0937 return results
0938
0939 def Calculate_MaxMatchedConsecutiveHits_RealTracks(ntuple_file, nEvents = 100):
0940 '''
0941 Similar as Calculate_MaxMatchedConsecutiveHits, but for true tracks.
0942 '''
0943 i = 0
0944 results = []
0945 for event in ntuple_file:
0946 for track in event.tracks():
0947 if track.nMatchedTrackingParticles() >= 1:
0948 res_temp = 0
0949 for info in track.matchedTrackingParticleInfos():
0950 particle = info.trackingParticle()
0951 tolerance_mask = SharedHitsFromBeginning(particle, track, track.nValid())
0952 diff = [abs(tolerance_mask[j+1] - tolerance_mask[j]) for j in range(len(tolerance_mask)-1)]
0953 if diff and max(diff) > res_temp:
0954 res_temp = max(diff)
0955 elif tolerance_mask[0] > res_temp:
0956 res_temp = tolerance_mask[0]
0957
0958 results.append(res_temp)
0959
0960 i += 1
0961 if i >= nEvents:
0962 break
0963
0964 return results
0965
0966 def Calculate_MatchPixelHits(ntuple_file, nEvents = 100, fake_mask = []):
0967 '''
0968 Calculate the amount of pixelhits and pixellayers in the first consectutive shared fake hits, starting from the matched three hits.
0969 Returns a list including numbers of pixelhits (for each fake) in nPix_data, and a list including numbers of pixellayers (for each fake) in nLay_data
0970 '''
0971 i = 0
0972 nPix_data = []
0973 nLay_data = []
0974 for event in ntuple_file:
0975 fakes = FindFakes(event)
0976 for fake in fakes:
0977 info = FakeInfo(fake)
0978 if not fake_mask or info.fake_class in fake_mask:
0979 for match in info.matches:
0980 nPix_data.append(match.nPix)
0981 nLay_data.append(match.nPixLayers)
0982
0983 i += 1
0984 if i >= nEvents:
0985 break
0986
0987 return nPix_data, nLay_data
0988
0989 def Calculate_UnmatchedSharedHits(ntuple_file, nEvents = 100, fake_mask = [0, -1]):
0990 '''
0991 Calculates the amount of shared hits between fake tracks and some TrackingParticles for the "no match" and "loose match" classes.
0992 Returns a list of the results for each fake.
0993 '''
0994 results = []
0995 i = 0
0996 for event in ntuple_file:
0997 fakes = FindFakes(event)
0998 for fake in fakes:
0999 info = FakeInfo(fake)
1000 if info.fake_class in fake_mask:
1001 results.append(info.unmatched_max_shared)
1002 i += 1
1003 if i >= nEvents:
1004 break
1005
1006 return results
1007
1008 def Calculate_SharedHits(ntuple_file, nEvents = 100, mask = []):
1009 '''
1010 Calculates the amount of shared hits between fake tracks and some TrackingParticle.
1011 For filtering only some of the classes to the data, put the class indexes inside the mask list.
1012 Returns a list of the results for each fake.
1013 '''
1014 hits_shared = []
1015 hit_fractions = []
1016 i = 0
1017 for event in ntuple_file:
1018 fakes = FindFakes(event)
1019 for fake in fakes:
1020 info = FakeInfo(fake)
1021 if not mask or info.fake_class in mask:
1022 hits_shared.append(info.max_shared)
1023 hit_fractions.append(info.max_frac)
1024 i += 1
1025 if i >= nEvents:
1026 break
1027
1028 return hits_shared, hit_fractions
1029
1030 def Calculate_SharedHits_RealTracks(ntuple_file, nEvents = 100):
1031 '''
1032 Calculates the amount of shared hits between true tracks and associated TrackingParticles.
1033 Returns a list of the results for each true track.
1034 '''
1035 hits_shared = []
1036 hit_fractions = []
1037 i = 0
1038 for event in ntuple_file:
1039 for track in event.tracks():
1040 if track.nMatchedTrackingParticles() >= 1:
1041 info = FakeInfo(track)
1042 hits_shared.append(info.max_shared)
1043 hit_fractions.append(info.max_frac)
1044 i += 1
1045 if i >= nEvents:
1046 break
1047
1048 return hits_shared, hit_fractions
1049
1050 def Calculate_IndludedDecayHitFractions(ntuple_file, nEvents = 100):
1051 '''
1052 Calculates the hit fractions (from fake) for the daughter and parent particles from a decay,
1053 to analyse the multiple matches class "fake includes a decay interaction".
1054 Returns: daughter_frac = fraction of daughter particle hits from fake
1055 parent_frac = fraction of parent particle hits from fake
1056 total_frac = the sum of these daughter and parent fractions
1057 '''
1058 daughter_frac = []
1059 parent_frac = []
1060 total_frac = []
1061 i = 0
1062 for event in ntuple_file:
1063 fakes = FindFakes(event)
1064 for fake in fakes:
1065 info = FakeInfo(fake)
1066 if info.fake_class == 21:
1067 if len(info.decayLinks) >= 2: print("Double or more decays!!!11")
1068 for link in info.decayLinks:
1069 par_ind = link[0]
1070 dau_ind = link[1]
1071 for match in info.matches:
1072 if match.index == par_ind: par_hits = match.shared_hits
1073 if match.index == dau_ind: dau_hits = match.shared_hits
1074 fake_hits = info.nHits
1075
1076 parent_frac.append(1.0*par_hits/fake_hits)
1077 daughter_frac.append(1.0*dau_hits/fake_hits)
1078 total_frac.append(1.0*(dau_hits + par_hits)/fake_hits)
1079 i += 1
1080 if i >= nEvents:
1081 break
1082
1083 return daughter_frac, parent_frac, total_frac
1084
1085 def Get_EndOfTrackingPoints(ntuple_file, nEvents = 100, mask = []):
1086 '''
1087 Performs analysis and returns a list of EndInfos containing information on end of tracking for each fake track.
1088 '''
1089 end_infos = []
1090 i = 0
1091 for event in ntuple_file:
1092 fakes = FindFakes(event)
1093 for fake in fakes:
1094 info = FakeInfo(fake)
1095 if not mask or info.fake_class in mask:
1096 for match in info.matches:
1097 end = EndInfo()
1098 end.last = match.last_loc
1099 end.fake_end = match.fake_end_loc
1100 end.particle_end = match.particle_end_loc
1101 end.end_class = match.end_class
1102 end.fake_class = info.fake_class
1103 end.last_str = match.last_str
1104 end.fake_end_str = match.fake_end_str
1105 end.particle_end_str = match.particle_end_str
1106 end.last_detId = match.last_detId
1107 end.fake_end_detId = match.fake_end_detId
1108 end.particle_end_detId = match.particle_end_detId
1109 end.particle_pdgId = match.particle_pdgId
1110 end.particle_pt = match.particle_pt
1111
1112 end_infos.append(end)
1113 i += 1
1114 if i >= nEvents:
1115 break
1116
1117 return end_infos
1118
1119 def Get_EndOfTrackingPointsReal(ntuple_file, nEvents = 100):
1120 '''
1121 Performs analysis and returns a list of EndInfos containing information on end of tracking for each TRUE track.
1122 '''
1123 end_infos = []
1124 i = 0
1125 for event in ntuple_file:
1126 trues = FindTrues(event)
1127 for true in trues:
1128 for info in true.matchedTrackingParticleInfos():
1129 particle = info.trackingParticle()
1130 last, track_end, particle_end, end_class = FindEndOfTracking(true, particle)
1131 end = EndInfo()
1132 if last.isValidHit(): end.last = [last.x(), last.y(), last.z()]
1133 if track_end.isValidHit(): end.fake_end = [track_end.x(), track_end.y(), track_end.z()]
1134 if particle_end.isValidHit(): end.particle_end = [particle_end.x(), particle_end.y(), particle_end.z()]
1135 end.end_class = end_class
1136 end.fake_class = -1
1137 end.last_str = last.layerStr()
1138 end.fake_end_str = track_end.layerStr()
1139 end.particle_end_str = particle_end.layerStr()
1140 end.last_detId = last.detId()
1141 end.fake_end_detId = track_end.detId()
1142 end.particle_end_detId = particle_end.detId()
1143 end.particle_pdgId = particle.pdgId()
1144 end.particle_pt = particle.pt()
1145
1146 end_infos.append(end)
1147 i += 1
1148 if i >= nEvents:
1149 break
1150
1151 return end_infos
1152
1153 def Save_Normalisation_Coefficients(ntuple_file):
1154 '''
1155 Calculates normalisation coefficients for detector layers, which would normalise the amount of
1156 hits per layer with respect to the total amount of TrackingParticle hits in the layer.
1157 Saves the results in a dictionary with the layer indexes as the keys and the normalised coefficients as the values.
1158 The resulted dictionary is saved to file "normalisation_coefficients.dmp" with pickle library.
1159 '''
1160 norm_c = copy(layer_data_tmp)
1161
1162 print(sum([val for ind, val in norm_c.items()]))
1163 for event in ntuple_file:
1164 print(event.entry()+1)
1165 for particle in event.trackingParticles():
1166 for hit in particle.hits():
1167 if hit.isValidHit():
1168 norm_c[layer_names_rev[hit.layerStr()]] += 1
1169 norm_sum = sum([val for ind, val in norm_c.items()])
1170 print(norm_sum)
1171 print(norm_c)
1172 for i, c in norm_c.items():
1173 norm_c[i] = 1.0*c/norm_sum
1174
1175 print("normalisation_coefficients.dmp")
1176 print(norm_c)
1177
1178 norm_file = open("normalisation_coefficients.dmp",'w')
1179 pickle.dump(norm_c, norm_file)
1180 norm_file.close()
1181
1182 def Get_Normalisation_Coefficients():
1183 '''
1184 Loads the detector layer normalisation coefficients from a file created by the previous Save_Normalisation_Coefficients function.
1185 '''
1186 norm_file = open("normalisation_coefficients.dmp",'r')
1187 coefficients = pickle.load(norm_file)
1188 norm_file.close()
1189 return coefficients
1190
1191 def Resolution_Analysis_Fakes(ntuple_file, nEvents = 100, real_criterion = ["consecutive", 3]):
1192 '''
1193 Performs analysis and returns a ResolutionData object containing the track parameter resolutions for fakes and matched particles.
1194 '''
1195 res = ResolutionData()
1196 i = 0
1197 for event in ntuple_file:
1198
1199 fakes = FindFakes(event)
1200 for fake in fakes:
1201 info = FakeInfo(fake, real_criterion)
1202 for match in info.matches:
1203 par = match.particle
1204 if par.pca_pt() == 0: continue
1205
1206 res.pt_rec.append(fake.pt())
1207 res.pt_sim.append(par.pca_pt())
1208 res.pt_delta.append(fake.pt() - par.pca_pt())
1209 res.eta.append(fake.eta() - par.pca_eta())
1210 res.Lambda.append(getattr(fake, 'lambda')() - par.pca_lambda())
1211 res.cotTheta.append(fake.cotTheta() - par.pca_cotTheta())
1212 res.phi.append(fake.phi() - par.pca_phi())
1213 res.dxy.append(fake.dxy() - par.pca_dxy())
1214 res.dz.append(fake.dz() - par.pca_dz())
1215
1216 res.fake_class.append(info.fake_class)
1217
1218 i += 1
1219 if i >= nEvents:
1220 break
1221 return res
1222
1223 def Resolution_Analysis_Trues(ntuple_file, nEvents = 100):
1224 '''
1225 Performs analysis and returns a ResolutionData object containing the track parameter resolutions for true tracks and associated particles.
1226 '''
1227 res = ResolutionData()
1228 i = 0
1229 for event in ntuple_file:
1230
1231 trues = FindTrues(event)
1232 for true in trues:
1233 for info in true.matchedTrackingParticleInfos():
1234 par = info.trackingParticle()
1235 if par.pca_pt() == 0: continue
1236
1237 res.pt_rec.append(true.pt())
1238 res.pt_sim.append(par.pca_pt())
1239 res.pt_delta.append(true.pt() - par.pca_pt())
1240 res.eta.append(true.eta() - par.pca_eta())
1241 res.Lambda.append(getattr(true, 'lambda')() - par.pca_lambda())
1242 res.cotTheta.append(true.cotTheta() - par.pca_cotTheta())
1243 res.phi.append(true.phi() - par.pca_phi())
1244 res.dxy.append(true.dxy() - par.pca_dxy())
1245 res.dz.append(true.dz() - par.pca_dz())
1246
1247 res.fake_class.append(-1)
1248
1249 i += 1
1250 if i >= nEvents:
1251 break
1252 return res
1253
1254 def Calculate_AmountOfFakes(ntuple_file):
1255 ''' Calculated the amount of fakes in the data. '''
1256 n = 0
1257 for event in ntuple_file:
1258 n += len(FindFakes(event))
1259 return n
1260
1261 def Analyse_EOT_Error(end_list, bpix3_mask = False, detId_mask = "all", pt_mask = False):
1262 '''
1263 Analyses the distance between the fake and particle hits following the last shared hit in the end of tracking.
1264 bpix3_mask filters out all fakes except those which have the last shared hit in BPix3 layer and the following particle hit in TIB1 layer.
1265 detId_mask filters with respect to the fact if the following fake and particle hits have the same or different detector ID.
1266 pt_mask filters out the fakes with the particle pt < 0.7.
1267
1268 Returns a list of the errors for each fake.
1269 '''
1270 error = []
1271 for end in end_list:
1272 if end.fake_end != [0,0,0] and end.particle_end != [0,0,0]:
1273 if pt_mask and end.particle_pt < 0.7: continue
1274
1275 if bpix3_mask and end.end_class == 0 and end.last_str == "BPix3" and end.particle_end_str == "TIB1":
1276 if detId_mask == "same" and end.fake_end_detId == end.particle_end_detId:
1277 error.append(Distance(end.fake_end[0:2], end.particle_end[0:2]))
1278 elif detId_mask == "different" and end.fake_end_detId != end.particle_end_detId:
1279 error.append(Distance(end.fake_end, end.particle_end))
1280 elif detId_mask == "all":
1281 error.append(Distance(end.fake_end, end.particle_end))
1282 if error and error[-1] == 0.0: print("Error is 0.0?!")
1283 if error and error[-1] > 10: print(str(end.fake_end_detId) + " and " + str(end.particle_end_detId) + ": " + str(error[-1]) + " z: " + str(end.fake_end[2]) + " " + str(end.particle_end[2]))
1284 elif not bpix3_mask:
1285 if detId_mask == "same" and end.fake_end_detId == end.particle_end_detId:
1286 error.append(Distance(end.fake_end[0:2], end.particle_end[0:2]))
1287 elif detId_mask == "different" and end.fake_end_detId != end.particle_end_detId:
1288 error.append(Distance(end.fake_end, end.particle_end))
1289 elif detId_mask == "all":
1290 error.append(Distance(end.fake_end, end.particle_end))
1291
1292 print(sum(error)/len(error))
1293 return error
1294
1295 def EndOfTrackingDetectorInfo(end_list, end_mask = [0], BPix3mask = False):
1296 '''
1297 Prints info about how many end of trackings have the particle hit and the fake hit in the same detector module.
1298 BPix3_mask filters out all fakes except those which have the last shared hit in BPix3 layer and the following particle hit in TIB1 layer.
1299 '''
1300 data = []
1301 for end in end_list:
1302 if (not end_mask or end.end_class in end_mask) and (not BPix3mask or (end.last_str == "BPix3" and end.particle_end_str == "TIB1")):
1303 if end.particle_end_detId == end.fake_end_detId:
1304 data.append(1)
1305 else:
1306 data.append(0)
1307 print("Same detector id between fake end and particle end: " + str(sum(data)))
1308 print("Different detector id: " + str(len(data)-sum(data)))
1309
1310 def Analyse_EOT_ParticleDoubleHit(end_list, layer = "BPix3", end_mask = [0,4]):
1311 '''
1312 Prints info on how many fakes have an end of tracking with both last shared hit and the following particle hit in the same detector layer.
1313 On default, this is analysed on BPix3 layer and the fakes with the end classes 0 or 4 (4 is merged to 3 in some analysis).
1314 '''
1315 doubles = 0
1316 all_particles = 0
1317 for end in end_list:
1318 if (not end_mask or end.end_class in end_mask) and (layer in end.last_str):
1319 if end.last_str == end.particle_end_str:
1320 doubles += 1
1321
1322 all_particles += 1
1323
1324 print("In layer " + layer + " there are " + str(doubles) + " end of trackings out of " + str(all_particles) + " (" + str(100.0*doubles/all_particles) + ") which have a double hit in the EOT layer")
1325
1326 def TrackPt(ntuple_file, nEvents = 100):
1327 '''
1328 Returns two lists on fake track and true track pt values.
1329 '''
1330 fake_pts = []
1331 true_pts = []
1332
1333 i = 0
1334 for event in ntuple_file:
1335 print("Event: " + str(i))
1336 for track in event.tracks():
1337 if track.nMatchedTrackingParticles() == 0:
1338 fake_pts.append(track.pt())
1339 else:
1340 true_pts.append(track.pt())
1341
1342 i += 1
1343 if i >= nEvents:
1344 break
1345
1346 return fake_pts, true_pts
1347