File indexing completed on 2023-04-13 23:19:20
0001
0002
0003
0004
0005
0006
0007
0008 #include <stdexcept>
0009 #include <cppunit/extensions/HelperMacros.h>
0010
0011 #include "PhysicsTools/TensorFlow/interface/TensorFlow.h"
0012
0013 #include "testBaseCUDA.h"
0014
0015 class testMetaGraphLoadingCUDA : public testBaseCUDA {
0016 CPPUNIT_TEST_SUITE(testMetaGraphLoadingCUDA);
0017 CPPUNIT_TEST(test);
0018 CPPUNIT_TEST_SUITE_END();
0019
0020 public:
0021 std::string pyScript() const override;
0022 void test() override;
0023 };
0024
0025 CPPUNIT_TEST_SUITE_REGISTRATION(testMetaGraphLoadingCUDA);
0026
0027 std::string testMetaGraphLoadingCUDA::pyScript() const { return "creategraph.py"; }
0028
0029 void testMetaGraphLoadingCUDA::test() {
0030 if (!cms::cudatest::testDevices())
0031 return;
0032
0033 std::vector<edm::ParameterSet> psets;
0034 edm::ServiceToken serviceToken = edm::ServiceRegistry::createSet(psets);
0035 edm::ServiceRegistry::Operate operate(serviceToken);
0036
0037
0038 edmplugin::PluginManager::configure(edmplugin::standard::config());
0039
0040 std::string const config = R"_(import FWCore.ParameterSet.Config as cms
0041 process = cms.Process('Test')
0042 process.add_(cms.Service('ResourceInformationService'))
0043 process.add_(cms.Service('CUDAService'))
0044 )_";
0045 std::unique_ptr<edm::ParameterSet> params;
0046 edm::makeParameterSets(config, params);
0047 edm::ServiceToken tempToken(edm::ServiceRegistry::createServicesFromConfig(std::move(params)));
0048 edm::ServiceRegistry::Operate operate2(tempToken);
0049 edm::Service<CUDAInterface> cuda;
0050 std::cout << "CUDA service enabled: " << cuda->enabled() << std::endl;
0051
0052 std::cout << "Testing CUDA backend" << std::endl;
0053 tensorflow::Backend backend = tensorflow::Backend::cuda;
0054
0055
0056 std::string exportDir = dataPath_ + "/simplegraph";
0057 tensorflow::setLogging();
0058 tensorflow::Options options{backend};
0059 tensorflow::MetaGraphDef* metaGraphDef = tensorflow::loadMetaGraphDef(exportDir);
0060 CPPUNIT_ASSERT(metaGraphDef != nullptr);
0061
0062
0063 tensorflow::Session* session1 = tensorflow::createSession(options);
0064 CPPUNIT_ASSERT(session1 != nullptr);
0065
0066
0067 tensorflow::Session* session2 = tensorflow::createSession(metaGraphDef, exportDir, options);
0068 CPPUNIT_ASSERT(session2 != nullptr);
0069
0070
0071 CPPUNIT_ASSERT_THROW(tensorflow::createSession(nullptr, exportDir, options), cms::Exception);
0072
0073
0074 tensorflow::Tensor input(tensorflow::DT_FLOAT, {1, 10});
0075 float* d = input.flat<float>().data();
0076 for (size_t i = 0; i < 10; i++, d++) {
0077 *d = float(i);
0078 }
0079 tensorflow::Tensor scale(tensorflow::DT_FLOAT, {});
0080 scale.scalar<float>()() = 1.0;
0081
0082 std::vector<tensorflow::Tensor> outputs;
0083 tensorflow::Status status = session2->Run({{"input", input}, {"scale", scale}}, {"output"}, {}, &outputs);
0084 if (!status.ok()) {
0085 std::cout << status.ToString() << std::endl;
0086 CPPUNIT_ASSERT(false);
0087 }
0088
0089
0090 CPPUNIT_ASSERT(outputs.size() == 1);
0091 std::cout << outputs[0].DebugString() << std::endl;
0092 CPPUNIT_ASSERT(outputs[0].matrix<float>()(0, 0) == 46.);
0093
0094
0095 outputs.clear();
0096 tensorflow::run(session2, {{"input", input}, {"scale", scale}}, {"output"}, &outputs);
0097 CPPUNIT_ASSERT(outputs.size() == 1);
0098 std::cout << outputs[0].DebugString() << std::endl;
0099 CPPUNIT_ASSERT(outputs[0].matrix<float>()(0, 0) == 46.);
0100
0101
0102 CPPUNIT_ASSERT_THROW(tensorflow::run(session2, {{"foo", input}}, {"output"}, &outputs), cms::Exception);
0103
0104
0105 CPPUNIT_ASSERT(tensorflow::closeSession(session1));
0106 CPPUNIT_ASSERT(tensorflow::closeSession(session2));
0107 delete metaGraphDef;
0108 }