Skip to content

Instantly share code, notes, and snippets.

@dkurt
Last active October 14, 2020 07:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dkurt/b14cc4cb802bbcf006284b363f037690 to your computer and use it in GitHub Desktop.
Save dkurt/b14cc4cb802bbcf006284b363f037690 to your computer and use it in GitHub Desktop.
cmake_minimum_required(VERSION 3.4.3)
project(sample CXX)
find_package(InferenceEngine REQUIRED)
find_package(ngraph REQUIRED)
add_executable(${CMAKE_PROJECT_NAME} main.cpp)
target_compile_features(${CMAKE_PROJECT_NAME} PRIVATE cxx_range_for)
target_link_libraries(${CMAKE_PROJECT_NAME}
${InferenceEngine_LIBRARIES}
${NGRAPH_LIBRARIES}
pthread
)
#include <sys/time.h>
#include <inference_engine.hpp>
#include <ngraph/ngraph.hpp>
using namespace InferenceEngine;
class WeightsLoader {
public:
WeightsLoader(Core& ie, const std::string& weights) {
// Read weights using .xml with Const weights (regular .xml file)
net = ie.ReadNetwork("../graph.xml", weights);
// Iterate over weights
for (const auto& node : net.getFunction()->get_ops()) {
// if (!node->is_constant()) // OpenVINO 2020.4
if (!ngraph::op::is_constant(node)) // >= OpenVINO 2021.1
continue;
if (node->get_output_element_type(0) != ngraph::element::f32)
continue;
auto c = std::dynamic_pointer_cast<ngraph::op::Constant>(node);
ngraph::Shape shape = node->get_output_shape(0);
if (shape.empty())
continue;
float* data = (float*)c->get_data_ptr();
auto blob = make_shared_blob<float>(TensorDesc(Precision::FP32, shape, Layout::ANY), data);
parameters[node->get_friendly_name()] = blob;
}
}
void feed(InferRequest& req) {
req.SetInput(parameters);
}
private:
BlobMap parameters;
CNNNetwork net;
};
void benchmark(InferRequest& req) {
timeval start, end;
static const int N = 1;
// Warmup
// req.Infer();
gettimeofday(&start, 0);
for (int i = 0; i < N; ++i) {
req.Infer();
}
gettimeofday(&end, 0);
std::cout << "Infer time: " << ((end.tv_sec - start.tv_sec) * 1e+3 +
(end.tv_usec - start.tv_usec) * 1e-3) / N << "ms" << std::endl;
}
int main(int argc, char** argv) {
timeval start, end;
Core ie;
{
// Benchmark static weights
CNNNetwork net = ie.ReadNetwork("../graph.xml", "../graph_0.bin");
ExecutableNetwork execNet = ie.LoadNetwork(net, "CPU");
InferRequest req = execNet.CreateInferRequest();
benchmark(req);
}
// Benchmark dynamic weights
CNNNetwork net = ie.ReadNetwork("../graph_dynamic.xml", "../graph_0.bin");
ExecutableNetwork execNet = ie.LoadNetwork(net, "CPU");
InferRequest req = execNet.CreateInferRequest();
{
gettimeofday(&start, 0);
WeightsLoader loader(ie, "../graph_1.bin");
loader.feed(req);
gettimeofday(&end, 0);
std::cout << "Load time: " << (end.tv_sec - start.tv_sec) * 1e+3 +
(end.tv_usec - start.tv_usec) * 1e-3 << "ms" << std::endl;
benchmark(req);
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment