1// This file is part of OpenCV project.
2// It is subject to the license terms in the LICENSE file found in the top-level directory
3// of this distribution and at http://opencv.org/license.html.
4
5#include "precomp.hpp"
6
7#include "net_impl.hpp"
8#include "legacy_backend.hpp"
9
10#include "backend.hpp"
11#include "factory.hpp"
12
13#ifdef HAVE_CUDA
14#include "cuda4dnn/init.hpp"
15#endif
16
17namespace cv {
18namespace dnn {
19CV__DNN_INLINE_NS_BEGIN
20
21
22Ptr<BackendWrapper> Net::Impl::wrap(Mat& host)
23{
24 if (preferableBackend == DNN_BACKEND_OPENCV &&
25 (preferableTarget == DNN_TARGET_CPU || preferableTarget == DNN_TARGET_CPU_FP16))
26 return Ptr<BackendWrapper>();
27
28 MatShape shape(host.dims);
29 for (int i = 0; i < host.dims; ++i)
30 shape[i] = host.size[i];
31
32 void* data = host.data;
33 if (backendWrappers.find(x: data) != backendWrappers.end())
34 {
35 Ptr<BackendWrapper> baseBuffer = backendWrappers[data];
36 if (preferableBackend == DNN_BACKEND_OPENCV)
37 {
38#ifdef HAVE_OPENCL
39 CV_Assert(IS_DNN_OPENCL_TARGET(preferableTarget));
40 return OpenCLBackendWrapper::create(baseBuffer, m&: host);
41#else
42 CV_Error(Error::StsInternal, "");
43#endif
44 }
45 else if (preferableBackend == DNN_BACKEND_HALIDE)
46 {
47 CV_Assert(haveHalide());
48#ifdef HAVE_HALIDE
49 return Ptr<BackendWrapper>(new HalideBackendWrapper(baseBuffer, shape));
50#endif
51 }
52 else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
53 {
54 CV_ERROR_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019;
55 }
56 else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
57 {
58 return wrapMat(backendId: preferableBackend, targetId: preferableTarget, m&: host);
59 }
60 else if (preferableBackend == DNN_BACKEND_WEBNN)
61 {
62#ifdef HAVE_WEBNN
63 return wrapMat(preferableBackend, preferableTarget, host);
64#endif
65 }
66 else if (preferableBackend == DNN_BACKEND_VKCOM)
67 {
68#ifdef HAVE_VULKAN
69 return Ptr<BackendWrapper>(new VkComBackendWrapper(baseBuffer, host));
70#endif
71 }
72 else if (preferableBackend == DNN_BACKEND_CUDA)
73 {
74 CV_Assert(haveCUDA());
75#ifdef HAVE_CUDA
76 switch (preferableTarget)
77 {
78 case DNN_TARGET_CUDA:
79 return CUDABackendWrapperFP32::create(baseBuffer, shape);
80 case DNN_TARGET_CUDA_FP16:
81 return CUDABackendWrapperFP16::create(baseBuffer, shape);
82 default:
83 CV_Assert(IS_DNN_CUDA_TARGET(preferableTarget));
84 }
85#endif
86 }
87 else if (preferableBackend == DNN_BACKEND_TIMVX)
88 {
89#ifdef HAVE_TIMVX
90 return Ptr<BackendWrapper>(new TimVXBackendWrapper(baseBuffer, host));
91#endif
92 }
93 else if (preferableBackend == DNN_BACKEND_CANN)
94 {
95 CV_Assert(0 && "Internal error: DNN_BACKEND_CANN must be implemented through inheritance");
96 }
97 else
98 CV_Error(Error::StsNotImplemented, "Unknown backend identifier");
99 }
100
101 Ptr<BackendWrapper> wrapper = wrapMat(backendId: preferableBackend, targetId: preferableTarget, m&: host);
102 backendWrappers[data] = wrapper;
103 return wrapper;
104}
105
106
107void Net::Impl::initBackend(const std::vector<LayerPin>& blobsToKeep_)
108{
109 CV_TRACE_FUNCTION();
110 if (preferableBackend == DNN_BACKEND_OPENCV)
111 {
112 CV_Assert(preferableTarget == DNN_TARGET_CPU || preferableTarget == DNN_TARGET_CPU_FP16 || IS_DNN_OPENCL_TARGET(preferableTarget));
113 }
114 else if (preferableBackend == DNN_BACKEND_HALIDE)
115 {
116#ifdef HAVE_HALIDE
117 initHalideBackend();
118#else
119 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of Halide");
120#endif
121 }
122 else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
123 {
124 CV_Assert(0 && "Inheritance must be used with OpenVINO backend");
125 }
126 else if (preferableBackend == DNN_BACKEND_WEBNN)
127 {
128#ifdef HAVE_WEBNN
129 initWebnnBackend(blobsToKeep_);
130#else
131 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of WebNN");
132#endif
133 }
134 else if (preferableBackend == DNN_BACKEND_VKCOM)
135 {
136#ifdef HAVE_VULKAN
137 initVkComBackend();
138#else
139 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of Vulkan");
140#endif
141 }
142 else if (preferableBackend == DNN_BACKEND_CUDA)
143 {
144#ifdef HAVE_CUDA
145 initCUDABackend(blobsToKeep_);
146#else
147 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of CUDA/CUDNN");
148#endif
149 }
150 else if (preferableBackend == DNN_BACKEND_TIMVX)
151 {
152#ifdef HAVE_TIMVX
153 initTimVXBackend();
154#else
155 CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of TimVX");
156#endif
157 }
158 else if (preferableBackend == DNN_BACKEND_CANN)
159 {
160 CV_Assert(0 && "Internal error: DNN_BACKEND_CANN must be implemented through inheritance");
161 }
162 else
163 {
164 CV_Error(Error::StsNotImplemented, cv::format("Unknown backend identifier: %d", preferableBackend));
165 }
166}
167
168
169void Net::Impl::setPreferableBackend(Net& net, int backendId)
170{
171 if (backendId == DNN_BACKEND_DEFAULT)
172 backendId = (Backend)getParam_DNN_BACKEND_DEFAULT();
173
174 if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
175 backendId = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; // = getInferenceEngineBackendTypeParam();
176
177 if (netWasQuantized && backendId != DNN_BACKEND_OPENCV && backendId != DNN_BACKEND_TIMVX &&
178 backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
179 {
180 CV_LOG_WARNING(NULL, "DNN: Only default, TIMVX and OpenVINO backends support quantized networks");
181 backendId = DNN_BACKEND_OPENCV;
182 }
183#ifdef HAVE_DNN_NGRAPH
184 if (netWasQuantized && backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && INF_ENGINE_VER_MAJOR_LT(INF_ENGINE_RELEASE_2023_0))
185 {
186 CV_LOG_WARNING(NULL, "DNN: OpenVINO 2023.0 and higher is required to supports quantized networks");
187 backendId = DNN_BACKEND_OPENCV;
188 }
189#endif
190
191 if (preferableBackend != backendId)
192 {
193 clear();
194 if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
195 {
196#if defined(HAVE_INF_ENGINE)
197 switchToOpenVINOBackend(net);
198#elif defined(ENABLE_PLUGINS)
199 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend(baseName: "openvino");
200 networkBackend.switchBackend(net);
201#else
202 CV_Error(Error::StsNotImplemented, "OpenVINO backend is not available in the current OpenCV build");
203#endif
204 }
205 else if (backendId == DNN_BACKEND_CANN)
206 {
207#ifdef HAVE_CANN
208 switchToCannBackend(net);
209#else
210 CV_Error(Error::StsNotImplemented, "CANN backend is not availlable in the current OpenCV build");
211#endif
212 }
213 else
214 {
215 preferableBackend = backendId;
216 }
217 }
218}
219
220void Net::Impl::setPreferableTarget(int targetId)
221{
222 if (netWasQuantized && targetId != DNN_TARGET_CPU &&
223 targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16 && targetId != DNN_TARGET_NPU)
224 {
225 CV_LOG_WARNING(NULL, "DNN: Only CPU, OpenCL/OpenCL FP16 and NPU targets are supported by quantized networks");
226 targetId = DNN_TARGET_CPU;
227 }
228
229 if (preferableTarget != targetId)
230 {
231 preferableTarget = targetId;
232 if (IS_DNN_OPENCL_TARGET(targetId))
233 {
234#ifndef HAVE_OPENCL
235#ifdef HAVE_INF_ENGINE
236 if (preferableBackend == DNN_BACKEND_OPENCV)
237#else
238 if (preferableBackend == DNN_BACKEND_DEFAULT ||
239 preferableBackend == DNN_BACKEND_OPENCV)
240#endif // HAVE_INF_ENGINE
241 preferableTarget = DNN_TARGET_CPU;
242#else
243 bool fp16 = ocl::Device::getDefault().isExtensionSupported(extensionName: "cl_khr_fp16");
244 if (!fp16 && targetId == DNN_TARGET_OPENCL_FP16)
245 preferableTarget = DNN_TARGET_OPENCL;
246#endif
247 }
248
249 if (IS_DNN_CUDA_TARGET(id: targetId))
250 {
251 preferableTarget = DNN_TARGET_CPU;
252#ifdef HAVE_CUDA
253 if (cuda4dnn::doesDeviceSupportFP16() && targetId == DNN_TARGET_CUDA_FP16)
254 preferableTarget = DNN_TARGET_CUDA_FP16;
255 else
256 preferableTarget = DNN_TARGET_CUDA;
257#endif
258 }
259#if !defined(__arm64__) || !__arm64__
260 if (targetId == DNN_TARGET_CPU_FP16)
261 {
262 CV_LOG_WARNING(NULL, "DNN: fall back to DNN_TARGET_CPU. Only ARM v8 CPU is supported by DNN_TARGET_CPU_FP16.");
263 targetId = DNN_TARGET_CPU;
264 }
265#endif
266
267 clear();
268
269 if (targetId == DNN_TARGET_CPU_FP16)
270 {
271 if (useWinograd) {
272 CV_LOG_INFO(NULL, "DNN: DNN_TARGET_CPU_FP16 is set => Winograd convolution is disabled by default to preserve accuracy. If needed, enable it explicitly using enableWinograd(true).");
273 enableWinograd(useWinograd_: false);
274 }
275 }
276 }
277}
278
279
280CV__DNN_INLINE_NS_END
281}} // namespace cv::dnn
282

Provided by KDAB

Privacy Policy
Update your C++ knowledge – Modern C++11/14/17 Training
Find out more

source code of opencv/modules/dnn/src/net_impl_backend.cpp