forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
/
executable_network.hpp
82 lines (65 loc) · 2.46 KB
/
executable_network.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief a header file for ExecutableNetwork
* @file executable_network.hpp
*/
#pragma once
#include <ie_common.h>
#include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
#include <map>
#include <memory>
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include "async_infer_request.hpp"
#include "ie_icore.hpp"
#include "infer_request.hpp"
namespace HeteroPlugin {
class Engine;
/**
* @class ExecutableNetwork
* @brief Interface of executable network
*/
class HeteroExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
public:
typedef std::shared_ptr<HeteroExecutableNetwork> Ptr;
/**
* @brief constructor
*/
HeteroExecutableNetwork(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config,
Engine* plugin);
/**
* @brief Import from opened file constructor
*/
HeteroExecutableNetwork(std::istream& heteroModel,
const std::map<std::string, std::string>& config,
Engine* plugin);
InferenceEngine::IInferRequestInternal::Ptr CreateInferRequestImpl(
InferenceEngine::InputsDataMap networkInputs,
InferenceEngine::OutputsDataMap networkOutputs) override;
InferenceEngine::IInferRequestInternal::Ptr CreateInferRequestImpl(
const std::vector<std::shared_ptr<const ov::Node>>& inputs,
const std::vector<std::shared_ptr<const ov::Node>>& outputs) override;
InferenceEngine::IInferRequestInternal::Ptr CreateInferRequest() override;
InferenceEngine::Parameter GetConfig(const std::string& name) const override;
InferenceEngine::Parameter GetMetric(const std::string& name) const override;
void Export(std::ostream& modelFile) override;
private:
void InitCNNImpl(const InferenceEngine::CNNNetwork& network);
void InitNgraph(const InferenceEngine::CNNNetwork& network);
struct NetworkDesc {
std::string _device;
InferenceEngine::CNNNetwork _clonedNetwork;
InferenceEngine::SoExecutableNetworkInternal _network;
};
std::vector<NetworkDesc> _networks;
Engine* _heteroPlugin;
std::string _name;
std::map<std::string, std::string> _config;
std::unordered_map<std::string, std::string> _blobNameMap;
};
} // namespace HeteroPlugin