Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-04-06 12:15:32

0001 // -*- C++ -*-
0002 //
0003 // Package:    TestCompareDDSpecsDumpFiles
0004 // Class:      TestCompareDDSpecsDumpFiles
0005 //
0006 /**\class TestCompareDDSpecsDumpFiles TestCompareDDSpecsDumpFiles.cc test/TestCompareDDSpecsDumpFiles/src/TestCompareDDSpecsDumpFiles.cc
0007 
0008  Description: Compares two SpecPars dump files 
0009 
0010  Implementation:
0011      Read two files with a certain format and compare each line.
0012 **/
0013 //
0014 // Original Author:  Ianna Osborne
0015 //         Created:  Thu Dec 2, 2010
0016 //
0017 //
0018 
0019 #include <fstream>
0020 #include <boost/tokenizer.hpp>
0021 #include <boost/algorithm/string.hpp>
0022 
0023 #include "FWCore/Framework/interface/one/EDAnalyzer.h"
0024 #include "FWCore/Framework/interface/MakerMacros.h"
0025 
0026 class TestCompareDDSpecsDumpFiles : public edm::one::EDAnalyzer<> {
0027 public:
0028   explicit TestCompareDDSpecsDumpFiles(const edm::ParameterSet&);
0029   ~TestCompareDDSpecsDumpFiles(void) override;
0030 
0031   void beginJob() override {}
0032   void analyze(edm::Event const&, edm::EventSetup const&) override;
0033   void endJob() override {}
0034 
0035 private:
0036   typedef boost::tokenizer<boost::char_separator<char> > tokenizer;
0037 
0038   std::string clean(const std::string& in);
0039   std::string merge(const std::list<std::string>& list);
0040   std::string fillAndSort(const tokenizer::iterator& start,
0041                           const tokenizer::iterator& end,
0042                           std::list<std::string>& list);
0043   std::string preFill(const tokenizer::iterator& it, std::list<std::string>& list);
0044 
0045   std::string fname1_;
0046   std::string fname2_;
0047   double tol_;
0048   std::ifstream f1_;
0049   std::ifstream f2_;
0050 };
0051 
0052 TestCompareDDSpecsDumpFiles::TestCompareDDSpecsDumpFiles(const edm::ParameterSet& ps)
0053     : fname1_(ps.getParameter<std::string>("dumpFile1")),
0054       fname2_(ps.getParameter<std::string>("dumpFile2")),
0055       tol_(ps.getUntrackedParameter<double>("tolerance", 0.000001)),
0056       f1_(fname1_.c_str(), std::ios::in),
0057       f2_(fname2_.c_str(), std::ios::in) {
0058   if (!f1_ || !f2_) {
0059     throw cms::Exception("MissingFileDDTest") << fname1_ << " and/or " << fname2_ << " do not exist.";
0060   }
0061 }
0062 
0063 TestCompareDDSpecsDumpFiles::~TestCompareDDSpecsDumpFiles(void) {
0064   f1_.close();
0065   f2_.close();
0066 }
0067 
0068 std::string TestCompareDDSpecsDumpFiles::clean(const std::string& in) {
0069   std::string str(in);
0070   boost::trim(str);
0071   size_t found1;
0072   found1 = str.find("/");
0073   if (found1 != std::string::npos) {
0074     size_t found2;
0075     found2 = str.find(" ");
0076     if (found2 != std::string::npos) {
0077       str.erase(found1, found2);
0078     }
0079 
0080     boost::trim(str);
0081   }
0082 
0083   return str;
0084 }
0085 
0086 std::string TestCompareDDSpecsDumpFiles::preFill(const tokenizer::iterator& it, std::list<std::string>& list) {
0087   boost::char_separator<char> space(" ");
0088 
0089   tokenizer firstString(*it, space);
0090   tokenizer::iterator fit = firstString.begin();
0091   std::string str(*it);
0092   str.erase(0, (*fit).size());
0093   boost::trim(str);
0094   list.emplace_back(clean(str));
0095 
0096   return *fit;
0097 }
0098 
0099 std::string TestCompareDDSpecsDumpFiles::merge(const std::list<std::string>& list) {
0100   std::string str("");
0101   for (const auto& it : list) {
0102     str.append(it);
0103     str.append("|");
0104   }
0105 
0106   return str;
0107 }
0108 
0109 std::string TestCompareDDSpecsDumpFiles::fillAndSort(const tokenizer::iterator& start,
0110                                                      const tokenizer::iterator& end,
0111                                                      std::list<std::string>& list) {
0112   for (tokenizer::iterator it = start; it != end; ++it) {
0113     list.emplace_back(clean(*it));
0114   }
0115   list.sort();
0116 
0117   return merge(list);
0118 }
0119 
0120 void TestCompareDDSpecsDumpFiles::analyze(const edm::Event&, const edm::EventSetup&) {
0121   std::string l1, l2;
0122   boost::char_separator<char> sep("|");
0123 
0124   int line = 0;
0125 
0126   while (!f1_.eof() && !f2_.eof()) {
0127     getline(f1_, l1);
0128     getline(f2_, l2);
0129 
0130     if (l1.empty() && l2.empty())
0131       continue;
0132 
0133     tokenizer tokens1(l1, sep);
0134     std::list<std::string> items1;
0135     tokenizer::iterator tok_iter1 = tokens1.begin();
0136     std::string firstStr1 = preFill(tok_iter1, items1);
0137     ++tok_iter1;
0138 
0139     tokenizer tokens2(l2, sep);
0140     std::list<std::string> items2;
0141     tokenizer::iterator tok_iter2 = tokens2.begin();
0142     std::string firstStr2 = preFill(tok_iter2, items2);
0143     ++tok_iter2;
0144 
0145     edm::LogInfo("TestCompareDDSpecsDumpFiles")
0146         << "#" << ++line << " Comparing " << firstStr1 << " " << firstStr1.size() << " with " << firstStr2 << " "
0147         << firstStr2.size() << " : ";
0148 
0149     if (firstStr1 != firstStr2) {
0150       edm::LogError("TestCompareDDSpecsDumpFiles") << ">>>>>> Cannot compare lines!!!!"
0151                                                    << "\n";
0152     }
0153 
0154     // If the lines do not match, they may need sorting.
0155     if (l1 != l2) {
0156       // The first cleaned token is already in the list.
0157       std::string sl1 = fillAndSort(tok_iter1, tokens1.end(), items1);
0158       std::string sl2 = fillAndSort(tok_iter2, tokens2.end(), items2);
0159 
0160       // Compare sorted lines.
0161       if (sl1 != sl2) {
0162         edm::LogError("TestCompareDDSpecsDumpFiles") << "#" << line << " Lines don't match.\n"
0163                                                      << "[" << l1 << "]\n"
0164                                                      << "[" << l2 << "]\n";
0165 
0166         // Remove common tokens.
0167         tokenizer sl1tokens(sl1, sep);
0168         for (tokenizer::iterator it = sl1tokens.begin(); it != sl1tokens.end(); ++it) {
0169           std::string str(*it);
0170           str.append("|");
0171           size_t found;
0172           found = sl2.find(str);
0173           if (found == std::string::npos) {
0174             str.erase(remove(str.begin(), str.end(), '|'), str.end());
0175             edm::LogError("TestCompareDDSpecsDumpFiles") << "<<<<<===== " << str << "\n";
0176           } else {
0177             sl2.erase(found, (*it).size());
0178           }
0179         }
0180         // Print what's left.
0181         tokenizer sl2tokens(sl2, sep);
0182         for (tokenizer::iterator it = sl2tokens.begin(); it != sl2tokens.end(); ++it)
0183           edm::LogError("TestCompareDDSpecsDumpFiles") << "=====>>>>> " << *it << "\n";
0184       } else
0185         edm::LogInfo("TestCompareDDSpecsDumpFiles") << " OK."
0186                                                     << "\n";
0187     } else
0188       edm::LogInfo("TestCompareDDSpecsDumpFiles") << " OK."
0189                                                   << "\n";
0190   }
0191 }
0192 
0193 DEFINE_FWK_MODULE(TestCompareDDSpecsDumpFiles);