1// This file is part of OpenCV project.
2// It is subject to the license terms in the LICENSE file found in the top-level directory
3// of this distribution and at http://opencv.org/license.html.
4
5#include "precomp.hpp"
6
7#include "net_impl.hpp"
8#include "legacy_backend.hpp"
9
10#include "backend.hpp"
11#include "factory.hpp"
12
13namespace cv {
14namespace dnn {
15CV__DNN_INLINE_NS_BEGIN
16
17
18Ptr<BackendWrapper> Net::Impl::wrap(Mat& host)
19{
20 if (preferableBackend == DNN_BACKEND_OPENCV &&
21 (preferableTarget == DNN_TARGET_CPU || preferableTarget == DNN_TARGET_CPU_FP16))
22 return Ptr<BackendWrapper>();
23
24 MatShape shape(host.dims);
25 for (int i = 0; i < host.dims; ++i)
26 shape[i] = host.size[i];
27
28 void* data = host.data;
29 if (backendWrappers.find(x: data) != backendWrappers.end())
30 {
31 Ptr<BackendWrapper> baseBuffer = backendWrappers[data];
32 if (preferableBackend == DNN_BACKEND_OPENCV)
33 {
34#ifdef HAVE_OPENCL
35 CV_Assert(IS_DNN_OPENCL_TARGET(preferableTarget));
36 return OpenCLBackendWrapper::create(baseBuffer, m&: host);
37#else
38 CV_Error(Error::StsInternal, "");
39#endif
40 }
41 else if (preferableBackend == DNN_BACKEND_HALIDE)
42 {
43 CV_Assert(haveHalide());
44#ifdef HAVE_HALIDE
45 return Ptr<BackendWrapper>(new HalideBackendWrapper(baseBuffer, shape));
46#endif
47 }
48 else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
49 {
50 CV_ERROR_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019;
51 }
52 else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
53 {
54 return wrapMat(backendId: preferableBackend, targetId: preferableTarget, m&: host);
55 }
56 else if (preferableBackend == DNN_BACKEND_WEBNN)
57 {
58#ifdef HAVE_WEBNN
59 return wrapMat(preferableBackend, preferableTarget, host);
60#endif
61 }
62 else if (preferableBackend == DNN_BACKEND_VKCOM)
63 {
64#ifdef HAVE_VULKAN
65 return Ptr<BackendWrapper>(new VkComBackendWrapper(baseBuffer, host));
66#endif
67 }
68 else if (preferableBackend == DNN_BACKEND_CUDA)
69 {
70 CV_Assert(haveCUDA());
71#ifdef HAVE_CUDA
72 switch (preferableTarget)
73 {
74 case DNN_TARGET_CUDA:
75 return CUDABackendWrapperFP32::create(baseBuffer, shape);
76 case DNN_TARGET_CUDA_FP16:
77 return CUDABackendWrapperFP16::create(baseBuffer, shape);
78 default:
79 CV_Assert(IS_DNN_CUDA_TARGET(preferableTarget));
80 }
81#endif
82 }
83 else if (preferableBackend == DNN_BACKEND_TIMVX)
84 {
85#ifdef HAVE_TIMVX
86 return Ptr<BackendWrapper>(new TimVXBackendWrapper(baseBuffer, host));
87#endif
88 }
89 else if (preferableBackend == DNN_BACKEND_CANN)
90 {
91 CV_Assert(0 && "Internal error: DNN_BACKEND_CANN must be implemented through inheritance");
92 }
93 else
94 CV_Error(Error::StsNotImplemented, "Unknown backend identifier");
95 }
96
97 Ptr<BackendWrapper> wrapper = wrapMat(backendId: preferableBackend, targetId: preferableTarget, m&: host);
98 backendWrappers[data] = wrapper;
99 return wrapper;
100}
101
102
103void Net::Impl::initBackend(const std::vector<LayerPin>& blobsToKeep_)
104{
105 CV_TRACE_FUNCTION();
106 if (preferableBackend == DNN_BACKEND_OPENCV)
107 {
108 CV_Assert(preferableTarget == DNN_TARGET_CPU || preferableTarget == DNN_TARGET_CPU_FP16 || IS_DNN_OPENCL_TARGET(preferableTarget));
109 }
110 else if (preferableBackend == DNN_BACKEND_HALIDE)
111 {
112#ifdef HAVE_HALIDE
113 initHalideBackend();
114#else
115 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of Halide");
116#endif
117 }
118 else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
119 {
120 CV_Assert(0 && "Inheritance must be used with OpenVINO backend");
121 }
122 else if (preferableBackend == DNN_BACKEND_WEBNN)
123 {
124#ifdef HAVE_WEBNN
125 initWebnnBackend(blobsToKeep_);
126#else
127 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of WebNN");
128#endif
129 }
130 else if (preferableBackend == DNN_BACKEND_VKCOM)
131 {
132#ifdef HAVE_VULKAN
133 initVkComBackend();
134#else
135 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of Vulkan");
136#endif
137 }
138 else if (preferableBackend == DNN_BACKEND_CUDA)
139 {
140#ifdef HAVE_CUDA
141 initCUDABackend(blobsToKeep_);
142#else
143 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of CUDA/CUDNN");
144#endif
145 }
146 else if (preferableBackend == DNN_BACKEND_TIMVX)
147 {
148#ifdef HAVE_TIMVX
149 initTimVXBackend();
150#else
151 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of TimVX");
152#endif
153 }
154 else if (preferableBackend == DNN_BACKEND_CANN)
155 {
156 CV_Assert(0 && "Internal error: DNN_BACKEND_CANN must be implemented through inheritance");
157 }
158 else
159 {
160 CV_Error(Error::StsNotImplemented, cv::format("Unknown backend identifier: %d", preferableBackend));
161 }
162}
163
164
165void Net::Impl::setPreferableBackend(Net& net, int backendId)
166{
167 if (backendId == DNN_BACKEND_DEFAULT)
168 backendId = (Backend)getParam_DNN_BACKEND_DEFAULT();
169
170 if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
171 backendId = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; // = getInferenceEngineBackendTypeParam();
172
173 if (netWasQuantized && backendId != DNN_BACKEND_OPENCV && backendId != DNN_BACKEND_TIMVX &&
174 backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
175 {
176 CV_LOG_WARNING(NULL, "DNN: Only default, TIMVX and OpenVINO backends support quantized networks");
177 backendId = DNN_BACKEND_OPENCV;
178 }
179#ifdef HAVE_DNN_NGRAPH
180 if (netWasQuantized && backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && INF_ENGINE_VER_MAJOR_LT(INF_ENGINE_RELEASE_2023_0))
181 {
182 CV_LOG_WARNING(NULL, "DNN: OpenVINO 2023.0 and higher is required to supports quantized networks");
183 backendId = DNN_BACKEND_OPENCV;
184 }
185#endif
186
187 if (preferableBackend != backendId)
188 {
189 clear();
190 if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
191 {
192#if defined(HAVE_INF_ENGINE)
193 switchToOpenVINOBackend(net);
194#elif defined(ENABLE_PLUGINS)
195 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend(baseName: "openvino");
196 networkBackend.switchBackend(net);
197#else
198 CV_Error(Error::StsNotImplemented, "OpenVINO backend is not available in the current OpenCV build");
199#endif
200 }
201 else if (backendId == DNN_BACKEND_CANN)
202 {
203#ifdef HAVE_CANN
204 switchToCannBackend(net);
205#else
206 CV_Error(Error::StsNotImplemented, "CANN backend is not availlable in the current OpenCV build");
207#endif
208 }
209 else
210 {
211 preferableBackend = backendId;
212 }
213 }
214}
215
216void Net::Impl::setPreferableTarget(int targetId)
217{
218 if (netWasQuantized && targetId != DNN_TARGET_CPU &&
219 targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16 && targetId != DNN_TARGET_NPU)
220 {
221 CV_LOG_WARNING(NULL, "DNN: Only CPU, OpenCL/OpenCL FP16 and NPU targets are supported by quantized networks");
222 targetId = DNN_TARGET_CPU;
223 }
224
225 if (preferableTarget != targetId)
226 {
227 preferableTarget = targetId;
228 if (IS_DNN_OPENCL_TARGET(targetId))
229 {
230#ifndef HAVE_OPENCL
231#ifdef HAVE_INF_ENGINE
232 if (preferableBackend == DNN_BACKEND_OPENCV)
233#else
234 if (preferableBackend == DNN_BACKEND_DEFAULT ||
235 preferableBackend == DNN_BACKEND_OPENCV)
236#endif // HAVE_INF_ENGINE
237 preferableTarget = DNN_TARGET_CPU;
238#else
239 bool fp16 = ocl::Device::getDefault().isExtensionSupported(extensionName: "cl_khr_fp16");
240 if (!fp16 && targetId == DNN_TARGET_OPENCL_FP16)
241 preferableTarget = DNN_TARGET_OPENCL;
242#endif
243 }
244
245#if !defined(__arm64__) || !__arm64__
246 if (targetId == DNN_TARGET_CPU_FP16)
247 {
248 CV_LOG_WARNING(NULL, "DNN: fall back to DNN_TARGET_CPU. Only ARM v8 CPU is supported by DNN_TARGET_CPU_FP16.");
249 targetId = DNN_TARGET_CPU;
250 }
251#endif
252
253 clear();
254
255 if (targetId == DNN_TARGET_CPU_FP16)
256 {
257 if (useWinograd) {
258 CV_LOG_INFO(NULL, "DNN: DNN_TARGET_CPU_FP16 is set => Winograd convolution is disabled by default to preserve accuracy. If needed, enable it explicitly using enableWinograd(true).");
259 enableWinograd(useWinograd_: false);
260 }
261 }
262 }
263}
264
265
266CV__DNN_INLINE_NS_END
267}} // namespace cv::dnn
268

Provided by KDAB

Privacy Policy
Improve your Profiling and Debugging skills
Find out more

source code of opencv/modules/dnn/src/net_impl_backend.cpp