Back to home page

Project CMSSW displayed by LXR

 
 

    


File indexing completed on 2024-04-06 12:24:15

0001 /*
0002  * Tests for loading graphs via the converted protobuf files.
0003  * For more info, see https://gitlab.cern.ch/mrieger/CMSSW-DNN.
0004  *
0005  * Author: Marcel Rieger
0006  */
0007 
0008 #include <stdexcept>
0009 #include <cppunit/extensions/HelperMacros.h>
0010 
0011 #include "PhysicsTools/TensorFlow/interface/TensorFlow.h"
0012 
0013 #include "testBaseCUDA.h"
0014 
0015 class testGraphLoadingCUDA : public testBaseCUDA {
0016   CPPUNIT_TEST_SUITE(testGraphLoadingCUDA);
0017   CPPUNIT_TEST(test);
0018   CPPUNIT_TEST_SUITE_END();
0019 
0020 public:
0021   std::string pyScript() const override;
0022   void test() override;
0023 };
0024 
0025 CPPUNIT_TEST_SUITE_REGISTRATION(testGraphLoadingCUDA);
0026 
0027 std::string testGraphLoadingCUDA::pyScript() const { return "createconstantgraph.py"; }
0028 
0029 void testGraphLoadingCUDA::test() {
0030   if (!cms::cudatest::testDevices())
0031     return;
0032 
0033   std::vector<edm::ParameterSet> psets;
0034   edm::ServiceToken serviceToken = edm::ServiceRegistry::createSet(psets);
0035   edm::ServiceRegistry::Operate operate(serviceToken);
0036 
0037   // Setup the CUDA Service
0038   edmplugin::PluginManager::configure(edmplugin::standard::config());
0039 
0040   std::string const config = R"_(import FWCore.ParameterSet.Config as cms
0041 process = cms.Process('Test')
0042 process.add_(cms.Service('ResourceInformationService'))
0043 process.add_(cms.Service('CUDAService'))
0044 )_";
0045   std::unique_ptr<edm::ParameterSet> params;
0046   edm::makeParameterSets(config, params);
0047   edm::ServiceToken tempToken(edm::ServiceRegistry::createServicesFromConfig(std::move(params)));
0048   edm::ServiceRegistry::Operate operate2(tempToken);
0049   edm::Service<CUDAInterface> cuda;
0050   std::cout << "CUDA service enabled: " << cuda->enabled() << std::endl;
0051 
0052   std::cout << "Testing CUDA backend" << std::endl;
0053   tensorflow::Backend backend = tensorflow::Backend::cuda;
0054 
0055   // load the graph
0056   std::string pbFile = dataPath_ + "/constantgraph.pb";
0057   tensorflow::setLogging();
0058   tensorflow::Options options{backend};
0059   tensorflow::GraphDef* graphDef = tensorflow::loadGraphDef(pbFile);
0060   CPPUNIT_ASSERT(graphDef != nullptr);
0061 
0062   // create a new session and add the graphDef
0063   tensorflow::Session* session = tensorflow::createSession(graphDef, options);
0064   CPPUNIT_ASSERT(session != nullptr);
0065 
0066   // check for exception
0067   CPPUNIT_ASSERT_THROW(tensorflow::createSession(nullptr, options), cms::Exception);
0068 
0069   // example evaluation
0070   tensorflow::Tensor input(tensorflow::DT_FLOAT, {1, 10});
0071   float* d = input.flat<float>().data();
0072   for (size_t i = 0; i < 10; i++, d++) {
0073     *d = float(i);
0074   }
0075   tensorflow::Tensor scale(tensorflow::DT_FLOAT, {});
0076   scale.scalar<float>()() = 1.0;
0077 
0078   std::vector<tensorflow::Tensor> outputs;
0079   tensorflow::Status status = session->Run({{"input", input}, {"scale", scale}}, {"output"}, {}, &outputs);
0080   if (!status.ok()) {
0081     std::cout << status.ToString() << std::endl;
0082     CPPUNIT_ASSERT(false);
0083   }
0084 
0085   // check the output
0086   CPPUNIT_ASSERT(outputs.size() == 1);
0087   std::cout << outputs[0].DebugString() << std::endl;
0088   CPPUNIT_ASSERT(outputs[0].matrix<float>()(0, 0) == 46.);
0089 
0090   // run again using the convenience helper
0091   outputs.clear();
0092   tensorflow::run(session, {{"input", input}, {"scale", scale}}, {"output"}, &outputs);
0093   CPPUNIT_ASSERT(outputs.size() == 1);
0094   std::cout << outputs[0].DebugString() << std::endl;
0095   CPPUNIT_ASSERT(outputs[0].matrix<float>()(0, 0) == 46.);
0096 
0097   // check for exception
0098   CPPUNIT_ASSERT_THROW(tensorflow::run(session, {{"foo", input}}, {"output"}, &outputs), cms::Exception);
0099 
0100   // cleanup
0101   CPPUNIT_ASSERT(tensorflow::closeSession(session));
0102   CPPUNIT_ASSERT(session == nullptr);
0103   delete graphDef;
0104 }