forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathov_caching.cpp
72 lines (66 loc) · 2.41 KB
/
ov_caching.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
#include <openvino/runtime/core.hpp>
void part0() {
std::string modelPath = "/tmp/myModel.xml";
std::string device = "GNA";
ov::AnyMap config;
//! [ov:caching:part0]
ov::Core core; // Step 1: create ov::Core object
core.set_property(ov::cache_dir("/path/to/cache/dir")); // Step 1b: Enable caching
auto model = core.read_model(modelPath); // Step 2: Read Model
//... // Step 3: Prepare inputs/outputs
//... // Step 4: Set device configuration
auto compiled = core.compile_model(model, device, config); // Step 5: LoadNetwork
//! [ov:caching:part0]
if (!compiled) {
throw std::runtime_error("error");
}
}
void part1() {
std::string modelPath = "/tmp/myModel.xml";
std::string device = "GNA";
ov::AnyMap config;
//! [ov:caching:part1]
ov::Core core; // Step 1: create ov::Core object
auto compiled = core.compile_model(modelPath, device, config); // Step 2: Compile model by file path
//! [ov:caching:part1]
if (!compiled) {
throw std::runtime_error("error");
}
}
void part2() {
std::string modelPath = "/tmp/myModel.xml";
std::string device = "GNA";
ov::AnyMap config;
//! [ov:caching:part2]
ov::Core core; // Step 1: create ov::Core object
core.set_property(ov::cache_dir("/path/to/cache/dir")); // Step 1b: Enable caching
auto compiled = core.compile_model(modelPath, device, config); // Step 2: Compile model by file path
//! [ov:caching:part2]
if (!compiled) {
throw std::runtime_error("error");
}
}
void part3() {
std::string deviceName = "GNA";
ov::AnyMap config;
ov::Core core;
//! [ov:caching:part3]
// Get list of supported device capabilities
std::vector<std::string> caps = core.get_property(deviceName, ov::device::capabilities);
// Find 'EXPORT_IMPORT' capability in supported capabilities
bool cachingSupported = std::find(caps.begin(), caps.end(), ov::device::capability::EXPORT_IMPORT) != caps.end();
//! [ov:caching:part3]
if (!cachingSupported) {
throw std::runtime_error("GNA should support model caching");
}
}
int main() {
try {
part0();
part1();
part2();
part3();
} catch (...) {
}
return 0;
}