File indexing completed on 2024-04-06 12:25:21
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014 #ifndef RecoJets_FFTJetAlgorithms_clusteringTreeConverters_h
0015 #define RecoJets_FFTJetAlgorithms_clusteringTreeConverters_h
0016
0017 #include <cfloat>
0018
0019 #include "fftjet/Peak.hh"
0020 #include "fftjet/AbsClusteringTree.hh"
0021 #include "fftjet/SparseClusteringTree.hh"
0022
0023 #include "FWCore/Utilities/interface/Exception.h"
0024 #include "DataFormats/JetReco/interface/PattRecoTree.h"
0025 #include "DataFormats/JetReco/interface/PattRecoPeak.h"
0026
0027 namespace fftjetcms {
0028
0029
0030 template <class Real>
0031 void sparsePeakTreeToStorable(const fftjet::SparseClusteringTree<fftjet::Peak, long>& in,
0032 bool writeOutScaleInfo,
0033 reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >* out);
0034
0035
0036
0037
0038
0039
0040 template <class Real>
0041 void sparsePeakTreeFromStorable(const reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >& in,
0042 const std::vector<double>* scaleSetIfNotAdaptive,
0043 double completeEventScale,
0044 fftjet::SparseClusteringTree<fftjet::Peak, long>* out);
0045
0046
0047
0048 template <class Real>
0049 void densePeakTreeToStorable(const fftjet::AbsClusteringTree<fftjet::Peak, long>& in,
0050 bool writeOutScaleInfo,
0051 reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >* out);
0052
0053
0054
0055
0056
0057
0058 template <class Real>
0059 void densePeakTreeFromStorable(const reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >& in,
0060 const std::vector<double>* scaleSetIfNotAdaptive,
0061 double completeEventScale,
0062 fftjet::AbsClusteringTree<fftjet::Peak, long>* out);
0063 }
0064
0065
0066
0067
0068
0069
0070
0071 namespace fftjetcms {
0072
0073
0074 template <class Real>
0075 void sparsePeakTreeToStorable(const fftjet::SparseClusteringTree<fftjet::Peak, long>& sparseTree,
0076 const bool writeOutScaleInfo,
0077 reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >* tree) {
0078 typedef fftjet::SparseClusteringTree<fftjet::Peak, long> SparseTree;
0079 typedef reco::PattRecoPeak<Real> StoredPeak;
0080 typedef reco::PattRecoNode<StoredPeak> StoredNode;
0081 typedef reco::PattRecoTree<Real, StoredPeak> StoredTree;
0082
0083 assert(tree);
0084
0085 tree->clear();
0086 tree->setSparse(true);
0087
0088 const unsigned nNodes = sparseTree.size();
0089 double hessian[3] = {0., 0., 0.};
0090
0091
0092 tree->reserveNodes(nNodes - 1);
0093 for (unsigned i = 1; i < nNodes; ++i) {
0094 const SparseTree::Node& node(sparseTree.getNode(i));
0095 const fftjet::Peak& peak(node.getCluster());
0096 peak.hessian(hessian);
0097 StoredNode sn(StoredPeak(peak.eta(),
0098 peak.phi(),
0099 peak.magnitude(),
0100 hessian,
0101 peak.driftSpeed(),
0102 peak.magSpeed(),
0103 peak.lifetime(),
0104 peak.scale(),
0105 peak.nearestNeighborDistance(),
0106 peak.clusterRadius(),
0107 peak.clusterSeparation(),
0108 peak.splitTime(),
0109 peak.mergeTime()),
0110 node.originalLevel(),
0111 node.mask(),
0112 node.parent());
0113 tree->addNode(sn);
0114 }
0115
0116
0117
0118
0119
0120
0121 if (writeOutScaleInfo) {
0122
0123 const unsigned nScales = sparseTree.nLevels();
0124 tree->reserveScales(nScales - 1);
0125 for (unsigned i = 1; i < nScales; ++i)
0126 tree->addScale(sparseTree.getScale(i));
0127 }
0128 }
0129
0130
0131
0132 template <class Real>
0133 void sparsePeakTreeFromStorable(const reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >& in,
0134 const std::vector<double>* scaleSetIfNotAdaptive,
0135 const double completeEventScale,
0136 fftjet::SparseClusteringTree<fftjet::Peak, long>* out) {
0137 typedef fftjet::SparseClusteringTree<fftjet::Peak, long> SparseTree;
0138 typedef reco::PattRecoPeak<Real> StoredPeak;
0139 typedef reco::PattRecoNode<StoredPeak> StoredNode;
0140 typedef reco::PattRecoTree<Real, StoredPeak> StoredTree;
0141
0142 if (!in.isSparse())
0143 throw cms::Exception("FFTJetBadConfig") << "can't restore sparse clustering tree"
0144 << " from densely stored record" << std::endl;
0145
0146 assert(out);
0147 out->clear();
0148
0149 const std::vector<StoredNode>& nodes(in.getNodes());
0150 const unsigned n = nodes.size();
0151 out->reserveNodes(n + 1U);
0152
0153 double hessian[3] = {0., 0., 0.};
0154
0155 for (unsigned i = 0; i < n; ++i) {
0156 const StoredNode& snode(nodes[i]);
0157 const StoredPeak& p(snode.getCluster());
0158 p.hessian(hessian);
0159 fftjet::Peak peak(p.eta(),
0160 p.phi(),
0161 p.magnitude(),
0162 hessian,
0163 p.driftSpeed(),
0164 p.magSpeed(),
0165 p.lifetime(),
0166 p.scale(),
0167 p.nearestNeighborDistance(),
0168 1.0,
0169 0.0,
0170 0.0,
0171 p.clusterRadius(),
0172 p.clusterSeparation());
0173 peak.setSplitTime(p.splitTime());
0174 peak.setMergeTime(p.mergeTime());
0175 const SparseTree::Node node(peak, snode.originalLevel(), snode.mask());
0176 out->addNode(node, snode.parent());
0177 }
0178
0179 const std::vector<Real>& storedScales(in.getScales());
0180 if (!storedScales.empty()) {
0181 const unsigned nsc = storedScales.size();
0182 out->reserveScales(nsc + 1U);
0183 out->addScale(DBL_MAX);
0184 const Real* scales = &storedScales[0];
0185 for (unsigned i = 0; i < nsc; ++i)
0186 out->addScale(scales[i]);
0187 } else if (scaleSetIfNotAdaptive && !scaleSetIfNotAdaptive->empty()) {
0188 const unsigned nsc = scaleSetIfNotAdaptive->size();
0189
0190
0191
0192 if (completeEventScale)
0193 out->reserveScales(nsc + 2U);
0194 else
0195 out->reserveScales(nsc + 1U);
0196 out->addScale(DBL_MAX);
0197 const double* scales = &(*scaleSetIfNotAdaptive)[0];
0198 for (unsigned i = 0; i < nsc; ++i)
0199 out->addScale(scales[i]);
0200 if (completeEventScale)
0201 out->addScale(completeEventScale);
0202 } else {
0203 throw cms::Exception("FFTJetBadConfig") << "can't restore sparse clustering tree scales" << std::endl;
0204 }
0205 }
0206
0207
0208
0209 template <class Real>
0210 void densePeakTreeToStorable(const fftjet::AbsClusteringTree<fftjet::Peak, long>& in,
0211 bool writeOutScaleInfo,
0212 reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >* out) {
0213 typedef fftjet::AbsClusteringTree<fftjet::Peak, long> DenseTree;
0214 typedef reco::PattRecoPeak<Real> StoredPeak;
0215 typedef reco::PattRecoNode<StoredPeak> StoredNode;
0216 typedef reco::PattRecoTree<Real, StoredPeak> StoredTree;
0217
0218 assert(out);
0219 out->clear();
0220 out->setSparse(false);
0221
0222 const unsigned nLevels = in.nLevels();
0223 double hessian[3] = {0., 0., 0.};
0224
0225
0226 out->reserveNodes(in.nClusters() - 1);
0227
0228 for (unsigned i = 1; i < nLevels; ++i) {
0229 const unsigned int nclus = in.nClusters(i);
0230 DenseTree::NodeId id(i, 0);
0231 for (; id.second < nclus; ++id.second) {
0232 const fftjet::Peak& peak(in.getCluster(id));
0233 peak.hessian(hessian);
0234 StoredNode sn(StoredPeak(peak.eta(),
0235 peak.phi(),
0236 peak.magnitude(),
0237 hessian,
0238 peak.driftSpeed(),
0239 peak.magSpeed(),
0240 peak.lifetime(),
0241 peak.scale(),
0242 peak.nearestNeighborDistance(),
0243 peak.clusterRadius(),
0244 peak.clusterSeparation(),
0245 peak.splitTime(),
0246 peak.mergeTime()),
0247 i,
0248 0,
0249 0);
0250 out->addNode(sn);
0251 }
0252 }
0253
0254
0255
0256
0257
0258
0259 if (writeOutScaleInfo) {
0260
0261 const unsigned nScales = in.nLevels();
0262 out->reserveScales(nScales - 1);
0263 for (unsigned i = 1; i < nScales; ++i)
0264 out->addScale(in.getScale(i));
0265 }
0266 }
0267
0268
0269
0270 template <class Real>
0271 void densePeakTreeFromStorable(const reco::PattRecoTree<Real, reco::PattRecoPeak<Real> >& in,
0272 const std::vector<double>* scaleSetIfNotAdaptive,
0273 const double completeEventScale,
0274 fftjet::AbsClusteringTree<fftjet::Peak, long>* out) {
0275 typedef fftjet::AbsClusteringTree<fftjet::Peak, long> DenseTree;
0276 typedef reco::PattRecoPeak<Real> StoredPeak;
0277 typedef reco::PattRecoNode<StoredPeak> StoredNode;
0278 typedef reco::PattRecoTree<Real, StoredPeak> StoredTree;
0279
0280 if (in.isSparse())
0281 throw cms::Exception("FFTJetBadConfig") << "can't restore dense clustering tree"
0282 << " from sparsely stored record" << std::endl;
0283
0284 assert(out);
0285 out->clear();
0286
0287 const std::vector<StoredNode>& nodes(in.getNodes());
0288 const unsigned n = nodes.size();
0289 double hessian[3] = {0., 0., 0.};
0290
0291 const std::vector<Real>& scales(in.getScales());
0292 unsigned int scnum = 0;
0293 std::vector<fftjet::Peak> clusters;
0294 const unsigned scsize1 = scales.size();
0295 unsigned scsize2 = scaleSetIfNotAdaptive ? scaleSetIfNotAdaptive->size() : 0;
0296 if (scsize2 && completeEventScale)
0297 ++scsize2;
0298 const unsigned scsize = (scsize1 == 0 ? scsize2 : scsize1);
0299
0300 if (scsize == 0)
0301 throw cms::Exception("FFTJetBadConfig")
0302 << " No scales passed to the function densePeakTreeFromStorable()" << std::endl;
0303
0304
0305 const double* sc_not_ad = scsize2 ? &(*scaleSetIfNotAdaptive)[0] : nullptr;
0306
0307 unsigned templevel = n ? nodes[0].originalLevel() : 1;
0308 for (unsigned i = 0; i < n; ++i) {
0309 const StoredNode& snode(nodes[i]);
0310 const StoredPeak& p(snode.getCluster());
0311 p.hessian(hessian);
0312
0313 const unsigned levelNumber = snode.originalLevel();
0314
0315 if (templevel != levelNumber) {
0316 if (scnum >= scsize)
0317 throw cms::Exception("FFTJetBadConfig") << "bad scales, please check the scales" << std::endl;
0318 const double scale = ((scsize1 == 0) ? sc_not_ad[scnum] : scales[scnum]);
0319 out->insert(scale, clusters, 0L);
0320 clusters.clear();
0321 templevel = levelNumber;
0322 ++scnum;
0323 }
0324
0325 fftjet::Peak apeak(p.eta(),
0326 p.phi(),
0327 p.magnitude(),
0328 hessian,
0329 p.driftSpeed(),
0330 p.magSpeed(),
0331 p.lifetime(),
0332 p.scale(),
0333 p.nearestNeighborDistance(),
0334 1.0,
0335 0.0,
0336 0.0,
0337 p.clusterRadius(),
0338 p.clusterSeparation());
0339 apeak.setSplitTime(p.splitTime());
0340 apeak.setMergeTime(p.mergeTime());
0341 clusters.push_back(apeak);
0342
0343 if (i == (n - 1) && levelNumber != scsize)
0344 throw cms::Exception("FFTJetBadConfig") << "bad scales, please check the scales" << std::endl;
0345 }
0346
0347 const double scale = scsize1 ? scales[scnum] : completeEventScale ? completeEventScale : sc_not_ad[scnum];
0348 out->insert(scale, clusters, 0L);
0349 }
0350 }
0351
0352 #endif