- Add Leap15 build
- Remove comment lines in the spec file that cause the insertion of extra lines during a commit OBS-URL: https://build.opensuse.org/package/show/science:machinelearning/openvino?expand=0&rev=16
This commit is contained in:
commit
fbf5e530a9
23
.gitattributes
vendored
Normal file
23
.gitattributes
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
## Default LFS
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.bsp filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.gem filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.jar filter=lfs diff=lfs merge=lfs -text
|
||||
*.lz filter=lfs diff=lfs merge=lfs -text
|
||||
*.lzma filter=lfs diff=lfs merge=lfs -text
|
||||
*.obscpio filter=lfs diff=lfs merge=lfs -text
|
||||
*.oxt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pdf filter=lfs diff=lfs merge=lfs -text
|
||||
*.png filter=lfs diff=lfs merge=lfs -text
|
||||
*.rpm filter=lfs diff=lfs merge=lfs -text
|
||||
*.tbz filter=lfs diff=lfs merge=lfs -text
|
||||
*.tbz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.ttf filter=lfs diff=lfs merge=lfs -text
|
||||
*.txz filter=lfs diff=lfs merge=lfs -text
|
||||
*.whl filter=lfs diff=lfs merge=lfs -text
|
||||
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
.osc
|
11
_constraints
Normal file
11
_constraints
Normal file
@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<constraints>
|
||||
<hardware>
|
||||
<disk>
|
||||
<size unit="G">20</size>
|
||||
</disk>
|
||||
<memory>
|
||||
<size unit="G">8</size>
|
||||
</memory>
|
||||
</hardware>
|
||||
</constraints>
|
16
_service
Normal file
16
_service
Normal file
@ -0,0 +1,16 @@
|
||||
<services>
|
||||
<service name="obs_scm" mode="manual">
|
||||
<param name="url">https://github.com/openvinotoolkit/openvino.git</param>
|
||||
<param name="scm">git</param>
|
||||
<param name="revision">2024.3.0</param>
|
||||
<param name="version">2024.3.0</param>
|
||||
<param name="submodules">enable</param>
|
||||
<param name="filename">openvino</param>
|
||||
<param name="exclude">.git</param>
|
||||
</service>
|
||||
<service name="tar" mode="buildtime" />
|
||||
<service name="recompress" mode="buildtime">
|
||||
<param name="file">*.tar</param>
|
||||
<param name="compression">zstd</param>
|
||||
</service>
|
||||
</services>
|
3
openvino-2024.2.0.obscpio
Normal file
3
openvino-2024.2.0.obscpio
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:50b3efee39ea06430456d49db9b4173b22999d8b3e7547dc658bb37df82b0f1b
|
||||
size 1036420623
|
3
openvino-2024.3.0.obscpio
Normal file
3
openvino-2024.3.0.obscpio
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bacc2b9540afda6c5bd6d17ddea35afe17caefdd4fa1a350ed1c8be2eb290981
|
||||
size 1055294991
|
12
openvino-ComputeLibrary-include-string.patch
Normal file
12
openvino-ComputeLibrary-include-string.patch
Normal file
@ -0,0 +1,12 @@
|
||||
Index: openvino-2024.0.0/src/plugins/intel_cpu/thirdparty/ComputeLibrary/arm_compute/core/utils/logging/IPrinter.h
|
||||
===================================================================
|
||||
--- openvino-2024.0.0.orig/src/plugins/intel_cpu/thirdparty/ComputeLibrary/arm_compute/core/utils/logging/IPrinter.h
|
||||
+++ openvino-2024.0.0/src/plugins/intel_cpu/thirdparty/ComputeLibrary/arm_compute/core/utils/logging/IPrinter.h
|
||||
@@ -25,6 +25,7 @@
|
||||
#define ARM_COMPUTE_LOGGING_PRINTER_H
|
||||
|
||||
#include "support/Mutex.h"
|
||||
+#include <string>
|
||||
|
||||
namespace arm_compute
|
||||
{
|
12
openvino-fix-build-sample-path.patch
Normal file
12
openvino-fix-build-sample-path.patch
Normal file
@ -0,0 +1,12 @@
|
||||
diff -uNr openvino.orig/samples/cpp/build_samples.sh openvino/samples/cpp/build_samples.sh
|
||||
--- openvino.orig/samples/cpp/build_samples.sh 2024-04-25 01:04:42.451868881 -0300
|
||||
+++ openvino/samples/cpp/build_samples.sh 2024-04-25 01:05:04.678342617 -0300
|
||||
@@ -59,7 +59,7 @@
|
||||
printf "\nSetting environment variables for building samples...\n"
|
||||
|
||||
if [ -z "$INTEL_OPENVINO_DIR" ]; then
|
||||
- if [[ "$SAMPLES_SOURCE_DIR" = "/usr/share/openvino"* ]]; then
|
||||
+ if [[ "$SAMPLES_SOURCE_DIR" = "/usr/share/OpenVINO"* ]]; then
|
||||
true
|
||||
elif [ -e "$SAMPLES_SOURCE_DIR/../../setupvars.sh" ]; then
|
||||
setupvars_path="$SAMPLES_SOURCE_DIR/../../setupvars.sh"
|
61
openvino-fix-install-paths.patch
Normal file
61
openvino-fix-install-paths.patch
Normal file
@ -0,0 +1,61 @@
|
||||
Index: openvino-2024.0.0/cmake/developer_package/packaging/archive.cmake
|
||||
===================================================================
|
||||
--- openvino-2024.0.0.orig/cmake/developer_package/packaging/archive.cmake
|
||||
+++ openvino-2024.0.0/cmake/developer_package/packaging/archive.cmake
|
||||
@@ -21,15 +21,19 @@ endif()
|
||||
macro(ov_archive_cpack_set_dirs)
|
||||
# common "archive" package locations
|
||||
# TODO: move current variables to OpenVINO specific locations
|
||||
- set(OV_CPACK_INCLUDEDIR runtime/include)
|
||||
- set(OV_CPACK_OPENVINO_CMAKEDIR runtime/cmake)
|
||||
- set(OV_CPACK_DOCDIR docs)
|
||||
- set(OV_CPACK_LICENSESDIR licenses)
|
||||
- set(OV_CPACK_SAMPLESDIR samples)
|
||||
- set(OV_CPACK_WHEELSDIR tools)
|
||||
- set(OV_CPACK_TOOLSDIR tools)
|
||||
- set(OV_CPACK_DEVREQDIR tools)
|
||||
- set(OV_CPACK_PYTHONDIR python)
|
||||
+ set(OV_CPACK_INCLUDEDIR include)
|
||||
+ set(OV_CPACK_OPENVINO_CMAKEDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME})
|
||||
+ set(OV_CPACK_DOCDIR ${CMAKE_INSTALL_DOCDIR})
|
||||
+ set(OV_CPACK_LICENSESDIR ${CMAKE_INSTALL_DATAROOTDIR}/licenses/${PROJECT_NAME})
|
||||
+ set(OV_CPACK_SAMPLESDIR ${CMAKE_INSTALL_DATAROOTDIR}/${PROJECT_NAME}/samples)
|
||||
+ if (ENABLE_PYTHON)
|
||||
+ find_package(Python3 QUIET COMPONENTS Interpreter)
|
||||
+ file(RELATIVE_PATH OV_PYTHON_MODPATH ${CMAKE_INSTALL_PREFIX} ${Python3_SITEARCH})
|
||||
+ set(OV_CPACK_WHEELSDIR tools)
|
||||
+ set(OV_CPACK_TOOLSDIR tools)
|
||||
+ set(OV_CPACK_DEVREQDIR tools)
|
||||
+ set(OV_CPACK_PYTHONDIR ${OV_PYTHON_MODPATH})
|
||||
+ endif()
|
||||
|
||||
if(USE_BUILD_TYPE_SUBFOLDER)
|
||||
set(build_type ${CMAKE_BUILD_TYPE})
|
||||
@@ -46,11 +50,11 @@ macro(ov_archive_cpack_set_dirs)
|
||||
set(OV_CPACK_RUNTIMEDIR runtime/lib/${ARCH_FOLDER}/${build_type})
|
||||
set(OV_CPACK_ARCHIVEDIR runtime/lib/${ARCH_FOLDER}/${build_type})
|
||||
else()
|
||||
- set(OV_CPACK_LIBRARYDIR runtime/lib/${ARCH_FOLDER})
|
||||
- set(OV_CPACK_RUNTIMEDIR runtime/lib/${ARCH_FOLDER})
|
||||
- set(OV_CPACK_ARCHIVEDIR runtime/lib/${ARCH_FOLDER})
|
||||
+ set(OV_CPACK_LIBRARYDIR ${CMAKE_INSTALL_LIBDIR})
|
||||
+ set(OV_CPACK_RUNTIMEDIR ${CMAKE_INSTALL_LIBDIR})
|
||||
+ set(OV_CPACK_ARCHIVEDIR ${CMAKE_INSTALL_LIBDIR})
|
||||
endif()
|
||||
- set(OV_CPACK_PLUGINSDIR ${OV_CPACK_RUNTIMEDIR})
|
||||
+ set(OV_CPACK_PLUGINSDIR ${OV_CPACK_RUNTIMEDIR}/${PROJECT_NAME})
|
||||
endmacro()
|
||||
|
||||
ov_archive_cpack_set_dirs()
|
||||
Index: openvino-2024.0.0/src/cmake/openvino.cmake
|
||||
===================================================================
|
||||
--- openvino-2024.0.0.orig/src/cmake/openvino.cmake
|
||||
+++ openvino-2024.0.0/src/cmake/openvino.cmake
|
||||
@@ -254,6 +254,7 @@ if(ENABLE_PKGCONFIG_GEN)
|
||||
|
||||
# define relative paths
|
||||
file(RELATIVE_PATH PKGCONFIG_OpenVINO_PREFIX "/${OV_CPACK_RUNTIMEDIR}/pkgconfig" "/")
|
||||
+ cmake_path(NORMAL_PATH PKGCONFIG_OpenVINO_PREFIX)
|
||||
|
||||
set(pkgconfig_in "${OpenVINO_SOURCE_DIR}/cmake/templates/openvino.pc.in")
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.20 AND OV_GENERATOR_MULTI_CONFIG)
|
12
openvino-onnx-ml-defines.patch
Normal file
12
openvino-onnx-ml-defines.patch
Normal file
@ -0,0 +1,12 @@
|
||||
Index: openvino-2024.0.0/thirdparty/dependencies.cmake
|
||||
===================================================================
|
||||
--- openvino-2024.0.0.orig/thirdparty/dependencies.cmake
|
||||
+++ openvino-2024.0.0/thirdparty/dependencies.cmake
|
||||
@@ -482,6 +482,7 @@ if(ENABLE_OV_ONNX_FRONTEND)
|
||||
|
||||
if(ONNX_FOUND)
|
||||
# conan and vcpkg create imported targets 'onnx' and 'onnx_proto'
|
||||
+ add_compile_definitions(ONNX_ML=1)
|
||||
else()
|
||||
add_subdirectory(thirdparty/onnx)
|
||||
endif()
|
28
openvino-remove-npu-compile-tool.patch
Normal file
28
openvino-remove-npu-compile-tool.patch
Normal file
@ -0,0 +1,28 @@
|
||||
diff -uNr openvino.orig/src/plugins/intel_npu/tools/CMakeLists.txt openvino/src/plugins/intel_npu/tools/CMakeLists.txt
|
||||
--- openvino.orig/src/plugins/intel_npu/tools/CMakeLists.txt 2024-08-02 23:32:03.216982353 -0300
|
||||
+++ openvino/src/plugins/intel_npu/tools/CMakeLists.txt 2024-08-04 17:22:22.899469769 -0300
|
||||
@@ -4,5 +4,4 @@
|
||||
#
|
||||
|
||||
add_subdirectory(common)
|
||||
-add_subdirectory(compile_tool)
|
||||
add_subdirectory(single-image-test)
|
||||
diff -uNr openvino.orig/src/plugins/intel_npu/tools/compile_tool/CMakeLists.txt openvino/src/plugins/intel_npu/tools/compile_tool/CMakeLists.txt
|
||||
--- openvino.orig/src/plugins/intel_npu/tools/compile_tool/CMakeLists.txt 2024-08-02 23:32:03.216982353 -0300
|
||||
+++ openvino/src/plugins/intel_npu/tools/compile_tool/CMakeLists.txt 2024-08-03 02:36:25.059440300 -0300
|
||||
@@ -44,13 +44,13 @@
|
||||
#
|
||||
|
||||
install(TARGETS ${TARGET_NAME}
|
||||
- RUNTIME DESTINATION "tools/${TARGET_NAME}"
|
||||
+ RUNTIME DESTINATION "share/OpenVINO/tools/${TARGET_NAME}"
|
||||
COMPONENT ${NPU_INTERNAL_COMPONENT}
|
||||
${OV_CPACK_COMP_NPU_INTERNAL_EXCLUDE_ALL})
|
||||
|
||||
if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/README.md")
|
||||
install(FILES "${CMAKE_CURRENT_SOURCE_DIR}/README.md"
|
||||
- DESTINATION "tools/${TARGET_NAME}"
|
||||
+ DESTINATION "share/OpenVINO/tools/${TARGET_NAME}"
|
||||
COMPONENT ${NPU_INTERNAL_COMPONENT}
|
||||
${OV_CPACK_COMP_NPU_INTERNAL_EXCLUDE_ALL})
|
||||
endif()
|
4
openvino-rpmlintrc
Normal file
4
openvino-rpmlintrc
Normal file
@ -0,0 +1,4 @@
|
||||
addFilter("openvino-sample.*: E: devel-file-in-non-devel-package")
|
||||
|
||||
# These files are part of samples, meant for the user to copy and re-use, so env based hashbangs are preferred
|
||||
addFilter("openvino-sample.*: E: env-script-interpreter")
|
338
openvino.changes
Normal file
338
openvino.changes
Normal file
@ -0,0 +1,338 @@
|
||||
-------------------------------------------------------------------
|
||||
Wed Oct 2 20:56:59 UTC 2024 - Giacomo Comes <gcomes.obs@gmail.com>
|
||||
|
||||
- Add Leap15 build
|
||||
- Remove comment lines in the spec file that cause the insertion
|
||||
of extra lines during a commit
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Sat Aug 10 01:41:06 UTC 2024 - Alessandro de Oliveira Faria <cabelo@opensuse.org>
|
||||
|
||||
- Remove NPU Compile Tool
|
||||
* openvino-remove-npu-compile-tool.patch
|
||||
- Update to 2024.3.0
|
||||
- Summary of major features and improvements
|
||||
* More Gen AI coverage and framework integrations to minimize
|
||||
code changes
|
||||
+ OpenVINO pre-optimized models are now available in Hugging
|
||||
Face making it easier for developers to get started with
|
||||
these models.
|
||||
* Broader Large Language Model (LLM) support and more model
|
||||
compression techniques.
|
||||
+ Significant improvement in LLM performance on Intel
|
||||
discrete GPUs with the addition of Multi-Head Attention
|
||||
(MHA) and OneDNN enhancements.
|
||||
* More portability and performance to run AI at the edge, in the
|
||||
cloud, or locally.
|
||||
+ Improved CPU performance when serving LLMs with the
|
||||
inclusion of vLLM and continuous batching in the OpenVINO
|
||||
Model Server (OVMS). vLLM is an easy-to-use open-source
|
||||
library that supports efficient LLM inferencing and model
|
||||
serving.
|
||||
- Support Change and Deprecation Notices
|
||||
* Using deprecated features and components is not advised.
|
||||
They are available to enable a smooth transition to new
|
||||
solutions and will be discontinued in the future. To keep
|
||||
using discontinued features, you will have to revert to the
|
||||
last LTS OpenVINO version supporting them. For more details,
|
||||
refer to the OpenVINO Legacy Features and Components page.
|
||||
* Discontinued in 2024.0:
|
||||
+ Runtime components:
|
||||
- Intel® Gaussian & Neural Accelerator (Intel® GNA)..Consider
|
||||
using the Neural Processing Unit (NPU) for low-powered
|
||||
systems like Intel® Core™ Ultra or 14th generation
|
||||
and beyond.
|
||||
- OpenVINO C++/C/Python 1.0 APIs (see 2023.3 API transition
|
||||
guide for reference).
|
||||
- All ONNX Frontend legacy API (known as ONNX_IMPORTER_API)
|
||||
- 'PerfomanceMode.UNDEFINED' property as part of the OpenVINO
|
||||
Python API
|
||||
+ Tools:
|
||||
- Deployment Manager. See installation and deployment guides
|
||||
for current distribution options.
|
||||
- Accuracy Checker.
|
||||
- Post-Training Optimization Tool (POT). Neural Network
|
||||
Compression Framework (NNCF) should be used instead.
|
||||
- A Git patch for NNCF integration with huggingface/
|
||||
transformers. The recommended approach is to use
|
||||
huggingface/optimum-intel for applying NNCF optimization
|
||||
on top of models from Hugging Face.
|
||||
- Support for Apache MXNet, Caffe, and Kaldi model formats.
|
||||
Conversion to ONNX may be used as a solution.
|
||||
* Deprecated and to be removed in the future:
|
||||
+ The OpenVINO™ Development Tools package (pip install
|
||||
openvino-dev) will be removed from installation options
|
||||
and distribution channels beginning with OpenVINO 2025.0.
|
||||
+ Model Optimizer will be discontinued with OpenVINO 2025.0.
|
||||
Consider using the new conversion methods instead. For
|
||||
more details, see the model conversion transition guide.
|
||||
+ OpenVINO property Affinity API will be discontinued with
|
||||
OpenVINO 2025.0. It will be replaced with CPU binding
|
||||
configurations (ov::hint::enable_cpu_pinning).
|
||||
+ OpenVINO Model Server components:
|
||||
- “auto shape” and “auto batch size” (reshaping a model
|
||||
in runtime) will be removed in the future. OpenVINO’s
|
||||
dynamic shape models are recommended instead.
|
||||
+ A number of notebooks have been deprecated. For an
|
||||
up-to-date listing of available notebooks, refer to
|
||||
the OpenVINO™ Notebook index (openvinotoolkit.github.io).
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Sat Jun 22 12:01:23 UTC 2024 - Andreas Schwab <schwab@suse.de>
|
||||
|
||||
- Add riscv-cpu-plugin subpackage
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Jun 19 21:36:01 UTC 2024 - Alessandro de Oliveira Faria <cabelo@opensuse.org>
|
||||
|
||||
- Update to 2024.2.0
|
||||
- More Gen AI coverage and framework integrations to minimize code
|
||||
changes
|
||||
* Llama 3 optimizations for CPUs, built-in GPUs, and discrete
|
||||
GPUs for improved performance and efficient memory usage.
|
||||
* Support for Phi-3-mini, a family of AI models that leverages
|
||||
the power of small language models for faster, more accurate
|
||||
and cost-effective text processing.
|
||||
* Python Custom Operation is now enabled in OpenVINO making it
|
||||
easier for Python developers to code their custom operations
|
||||
instead of using C++ custom operations (also supported).
|
||||
Python Custom Operation empowers users to implement their own
|
||||
specialized operations into any model.
|
||||
* Notebooks expansion to ensure better coverage for new models.
|
||||
Noteworthy notebooks added: DynamiCrafter, YOLOv10, Chatbot
|
||||
notebook with Phi-3, and QWEN2.
|
||||
- Broader Large Language Model (LLM) support and more model
|
||||
compression techniques.
|
||||
* GPTQ method for 4-bit weight compression added to NNCF for
|
||||
more efficient inference and improved performance of
|
||||
compressed LLMs.
|
||||
* Significant LLM performance improvements and reduced latency
|
||||
for both built-in GPUs and discrete GPUs.
|
||||
* Significant improvement in 2nd token latency and memory
|
||||
footprint of FP16 weight LLMs on AVX2 (13th Gen Intel® Core™
|
||||
processors) and AVX512 (3rd Gen Intel® Xeon® Scalable
|
||||
Processors) based CPU platforms, particularly for small
|
||||
batch sizes.
|
||||
- More portability and performance to run AI at the edge, in the
|
||||
cloud, or locally.
|
||||
* Model Serving Enhancements:
|
||||
* Preview: OpenVINO Model Server (OVMS) now supports
|
||||
OpenAI-compatible API along with Continuous Batching and
|
||||
PagedAttention, enabling significantly higher throughput
|
||||
for parallel inferencing, especially on Intel® Xeon®
|
||||
processors, when serving LLMs to many concurrent users.
|
||||
* OpenVINO backend for Triton Server now supports built-in
|
||||
GPUs and discrete GPUs, in addition to dynamic
|
||||
shapes support.
|
||||
* Integration of TorchServe through torch.compile OpenVINO
|
||||
backend for easy model deployment, provisioning to
|
||||
multiple instances, model versioning, and maintenance.
|
||||
* Preview: addition of the Generate API, a simplified API
|
||||
for text generation using large language models with only
|
||||
a few lines of code. The API is available through the newly
|
||||
launched OpenVINO GenAI package.
|
||||
* Support for Intel Atom® Processor X Series. For more details,
|
||||
see System Requirements.
|
||||
* Preview: Support for Intel® Xeon® 6 processor.
|
||||
- Support Change and Deprecation Notices
|
||||
* Using deprecated features and components is not advised.
|
||||
They are available to enable a smooth transition to new
|
||||
solutions and will be discontinued in the future.
|
||||
To keep using discontinued features, you will have to revert
|
||||
to the last LTS OpenVINO version supporting them. For more
|
||||
details, refer to the OpenVINO Legacy Features and
|
||||
Components page.
|
||||
* Discontinued in 2024.0:
|
||||
+ Runtime components:
|
||||
- Intel® Gaussian & Neural Accelerator (Intel® GNA).
|
||||
Consider using the Neural Processing Unit (NPU) for
|
||||
low-powered systems like Intel® Core™ Ultra or 14th
|
||||
generation and beyond.
|
||||
- OpenVINO C++/C/Python 1.0 APIs (see 2023.3 API
|
||||
transition guide for reference).
|
||||
- All ONNX Frontend legacy API (known as ONNX_IMPORTER_API)
|
||||
- 'PerfomanceMode.UNDEFINED' property as part of the
|
||||
OpenVINO Python API
|
||||
+ Tools:
|
||||
- Deployment Manager. See installation and deployment
|
||||
guides for current distribution options.
|
||||
- Accuracy Checker.
|
||||
- Post-Training Optimization Tool (POT). Neural Network
|
||||
Compression Framework (NNCF) should be used instead.
|
||||
- A Git patch for NNCF integration with
|
||||
huggingface/transformers. The recommended approach
|
||||
is to use huggingface/optimum-intel for applying NNCF
|
||||
optimization on top of models from Hugging Face.
|
||||
- Support for Apache MXNet, Caffe, and Kaldi model formats.
|
||||
Conversion to ONNX may be used as a solution.
|
||||
* Deprecated and to be removed in the future:
|
||||
+ The OpenVINO™ Development Tools package (pip install
|
||||
openvino-dev) will be removed from installation options
|
||||
and distribution channels beginning with OpenVINO 2025.0.
|
||||
+ Model Optimizer will be discontinued with OpenVINO 2025.0.
|
||||
Consider using the new conversion methods instead. For
|
||||
more details, see the model conversion transition guide.
|
||||
+ OpenVINO property Affinity API will be discontinued with
|
||||
OpenVINO 2025.0. It will be replaced with CPU binding
|
||||
configurations (ov::hint::enable_cpu_pinning).
|
||||
+ OpenVINO Model Server components:
|
||||
+ “auto shape” and “auto batch size” (reshaping a model in
|
||||
runtime) will be removed in the future. OpenVINO’s dynamic
|
||||
shape models are recommended instead.
|
||||
+ A number of notebooks have been deprecated. For an
|
||||
up-to-date listing of available notebooks, refer to the
|
||||
OpenVINO™ Notebook index (openvinotoolkit.github.io).
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu May 9 22:56:53 UTC 2024 - Alessandro de Oliveira Faria <cabelo@opensuse.org>
|
||||
|
||||
- Fix sample source path in build script:
|
||||
* openvino-fix-build-sample-path.patch
|
||||
- Update to 2024.1.0
|
||||
- More Generative AI coverage and framework integrations to
|
||||
minimize code changes.
|
||||
* Mixtral and URLNet models optimized for performance
|
||||
improvements on Intel® Xeon® processors.
|
||||
* Stable Diffusion 1.5, ChatGLM3-6B, and Qwen-7B models
|
||||
optimized for improved inference speed on Intel® Core™
|
||||
Ultra processors with integrated GPU.
|
||||
* Support for Falcon-7B-Instruct, a GenAI Large Language Model
|
||||
(LLM) ready-to-use chat/instruct model with superior
|
||||
performance metrics.
|
||||
* New Jupyter Notebooks added: YOLO V9, YOLO V8
|
||||
Oriented Bounding Boxes Detection (OOB), Stable Diffusion
|
||||
in Keras, MobileCLIP, RMBG-v1.4 Background Removal, Magika,
|
||||
TripoSR, AnimateAnyone, LLaVA-Next, and RAG system with
|
||||
OpenVINO and LangChain.
|
||||
- Broader Large Language Model (LLM) support and more model
|
||||
compression techniques.
|
||||
* LLM compilation time reduced through additional optimizations
|
||||
with compressed embedding. Improved 1st token performance of
|
||||
LLMs on 4th and 5th generations of Intel® Xeon® processors
|
||||
with Intel® Advanced Matrix Extensions (Intel® AMX).
|
||||
* Better LLM compression and improved performance with oneDNN,
|
||||
INT4, and INT8 support for Intel® Arc™ GPUs.
|
||||
* Significant memory reduction for select smaller GenAI
|
||||
models on Intel® Core™ Ultra processors with integrated GPU.
|
||||
- More portability and performance to run AI at the edge,
|
||||
in the cloud, or locally.
|
||||
* The preview NPU plugin for Intel® Core™ Ultra processors
|
||||
is now available in the OpenVINO open-source GitHub
|
||||
repository, in addition to the main OpenVINO package on PyPI.
|
||||
* The JavaScript API is now more easily accessible through
|
||||
the npm repository, enabling JavaScript developers’ seamless
|
||||
access to the OpenVINO API.
|
||||
* FP16 inference on ARM processors now enabled for the
|
||||
Convolutional Neural Network (CNN) by default.
|
||||
- Support Change and Deprecation Notices
|
||||
* Using deprecated features and components is not advised. They
|
||||
are available to enable a smooth transition to new solutions
|
||||
and will be discontinued in the future. To keep using
|
||||
Discontinued features, you will have to revert to the last
|
||||
LTS OpenVINO version supporting them.
|
||||
* For more details, refer to the OpenVINO Legacy Features
|
||||
and Components page.
|
||||
* Discontinued in 2024.0:
|
||||
+ Runtime components:
|
||||
- Intel® Gaussian & Neural Accelerator (Intel® GNA).
|
||||
Consider using the Neural Processing Unit (NPU)
|
||||
for low-powered systems like Intel® Core™ Ultra or
|
||||
14th generation and beyond.
|
||||
- OpenVINO C++/C/Python 1.0 APIs (see 2023.3 API
|
||||
transition guide for reference).
|
||||
- All ONNX Frontend legacy API (known as
|
||||
ONNX_IMPORTER_API)
|
||||
- 'PerfomanceMode.UNDEFINED' property as part of
|
||||
the OpenVINO Python API
|
||||
+ Tools:
|
||||
- Deployment Manager. See installation and deployment
|
||||
guides for current distribution options.
|
||||
- Accuracy Checker.
|
||||
- Post-Training Optimization Tool (POT). Neural Network
|
||||
Compression Framework (NNCF) should be used instead.
|
||||
- A Git patch for NNCF integration with
|
||||
huggingface/transformers. The recommended approach
|
||||
is to use huggingface/optimum-intel for applying
|
||||
NNCF optimization on top of models from Hugging
|
||||
Face.
|
||||
- Support for Apache MXNet, Caffe, and Kaldi model
|
||||
formats. Conversion to ONNX may be used as
|
||||
a solution.
|
||||
* Deprecated and to be removed in the future:
|
||||
+ The OpenVINO™ Development Tools package (pip install
|
||||
openvino-dev) will be removed from installation options
|
||||
and distribution channels beginning with OpenVINO 2025.0.
|
||||
+ Model Optimizer will be discontinued with OpenVINO 2025.0.
|
||||
Consider using the new conversion methods instead. For
|
||||
more details, see the model conversion transition guide.
|
||||
+ OpenVINO property Affinity API will be discontinued with
|
||||
OpenVINO 2025.0. It will be replaced with CPU binding
|
||||
configurations (ov::hint::enable_cpu_pinning).
|
||||
+ OpenVINO Model Server components:
|
||||
- “auto shape” and “auto batch size” (reshaping a model
|
||||
in runtime) will be removed in the future. OpenVINO’s
|
||||
dynamic shape models are recommended instead.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Apr 23 18:57:17 UTC 2024 - Atri Bhattacharya <badshah400@gmail.com>
|
||||
|
||||
- License update: play safe and list all third party licenses as
|
||||
part of the License tag.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Apr 23 12:42:32 UTC 2024 - Atri Bhattacharya <badshah400@gmail.com>
|
||||
|
||||
- Switch to _service file as tagged Source tarball does not
|
||||
include `./thirdparty` submodules.
|
||||
- Update openvino-fix-install-paths.patch to fix python module
|
||||
install path.
|
||||
- Enable python module and split it out into a python subpackage
|
||||
(for now default python3 only).
|
||||
- Explicitly build python metadata (dist-info) and install it
|
||||
(needs simple sed hackery to support "officially" unsupported
|
||||
platform ppc64le).
|
||||
- Specify ENABLE_JS=OFF to turn off javascript bindings as
|
||||
building these requires downloading npm stuff from the network.
|
||||
- Build with system pybind11.
|
||||
- Bump _constraints for updated disk space requirements.
|
||||
- Drop empty %check section, rpmlint was misleading when it
|
||||
recommended adding this.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Apr 19 08:08:02 UTC 2024 - Atri Bhattacharya <badshah400@gmail.com>
|
||||
|
||||
- Numerous specfile cleanups:
|
||||
* Drop redundant `mv` commands and use `install` where
|
||||
appropriate.
|
||||
* Build with system protobuf.
|
||||
* Fix Summary tags.
|
||||
* Trim package descriptions.
|
||||
* Drop forcing CMAKE_BUILD_TYPE=Release, let macro default
|
||||
RelWithDebInfo be used instead.
|
||||
* Correct naming of shared library packages.
|
||||
* Separate out libopenvino_c.so.* into own shared lib package.
|
||||
* Drop rpmlintrc rule used to hide shlib naming mistakes.
|
||||
* Rename Source tarball to %{name}-%{version}.EXT pattern.
|
||||
* Use ldconfig_scriptlet macro for post(un).
|
||||
- Add openvino-onnx-ml-defines.patch -- Define ONNX_ML at compile
|
||||
time when using system onnx to allow using 'onnx-ml.pb.h'
|
||||
instead of 'onnx.pb.h', the latter not being shipped with
|
||||
openSUSE's onnx-devel package (gh#onnx/onnx#3074).
|
||||
- Add openvino-fix-install-paths.patch: Change hard-coded install
|
||||
paths in upstream cmake macro to standard Linux dirs.
|
||||
- Add openvino-ComputeLibrary-include-string.patch: Include header
|
||||
for std::string.
|
||||
- Add external devel packages as Requires for openvino-devel.
|
||||
- Pass -Wl,-z,noexecstack to %build_ldflags to avoid an exec stack
|
||||
issue with intel CPU plugin.
|
||||
- Use ninja for build.
|
||||
- Adapt _constraits file for correct disk space and memory
|
||||
requirements.
|
||||
- Add empty %check section.
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Apr 15 03:18:33 UTC 2024 - Alessandro de Oliveira Faria <cabelo@opensuse.org>
|
||||
|
||||
- Initial package
|
||||
- Version 2024.0.0
|
||||
- Add openvino-rpmlintrc.
|
4
openvino.obsinfo
Normal file
4
openvino.obsinfo
Normal file
@ -0,0 +1,4 @@
|
||||
name: openvino
|
||||
version: 2024.3.0
|
||||
mtime: 1721394417
|
||||
commit: 1e3b88e4e3f89774923e04e845428579f8ffa0fe
|
447
openvino.spec
Normal file
447
openvino.spec
Normal file
@ -0,0 +1,447 @@
|
||||
#
|
||||
# spec file for package openvino
|
||||
#
|
||||
# Copyright (c) 2024 SUSE LLC
|
||||
# Copyright (c) 2024 Alessandro de Oliveira Faria (A.K.A. CABELO) <cabelo@opensuse.org> or <alessandro.faria@owasp.org>
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
# remain the property of their copyright owners, unless otherwise agreed
|
||||
# upon. The license for this file, and modifications and additions to the
|
||||
# file, is the same license as for the pristine package itself (unless the
|
||||
# license for the pristine package is not an Open Source License, in which
|
||||
# case the license is the MIT License). An "Open Source License" is a
|
||||
# license that conforms to the Open Source Definition (Version 1.9)
|
||||
# published by the Open Source Initiative.
|
||||
|
||||
# Please submit bugfixes or comments via https://bugs.opensuse.org/
|
||||
#
|
||||
|
||||
|
||||
%if 0%{?suse_version} < 1600
|
||||
%define isLeap15 %nil
|
||||
%else
|
||||
%undefine isLeap15
|
||||
%endif
|
||||
|
||||
# Compilation takes ~1 hr on OBS for a single python, don't try all supported flavours
|
||||
%if %{defined isLeap15}
|
||||
%define x86_64 x86_64
|
||||
%define pythons python311
|
||||
%else
|
||||
%define pythons python3
|
||||
%endif
|
||||
%define __builder ninja
|
||||
%define so_ver 2430
|
||||
%define shlib lib%{name}%{so_ver}
|
||||
%define shlib_c lib%{name}_c%{so_ver}
|
||||
%define prj_name OpenVINO
|
||||
|
||||
Name: openvino
|
||||
Version: 2024.3.0
|
||||
Release: 0
|
||||
Summary: A toolkit for optimizing and deploying AI inference
|
||||
# Let's be safe and put all third party licenses here, no matter that we use specific thirdparty libs or not
|
||||
License: Apache-2.0 AND BSD-2-Clause AND BSD-3-Clause AND HPND AND JSON AND MIT AND OFL-1.1 AND Zlib
|
||||
URL: https://github.com/openvinotoolkit/openvino
|
||||
Source0: %{name}-%{version}.tar.zst
|
||||
Source1: %{name}-rpmlintrc
|
||||
# PATCH-FEATURE-OPENSUSE openvino-onnx-ml-defines.patch badshah400@gmail.com -- Define ONNX_ML at compile time when using system onnx to allow using 'onnx-ml.pb.h' instead of 'onnx.pb.h', the latter not being shipped with openSUSE's onnx-devel package
|
||||
Patch0: openvino-onnx-ml-defines.patch
|
||||
# PATCH-FEATURE-OPENSUSE openvino-fix-install-paths.patch badshah400@gmail.com -- Fix installation paths hardcoded into upstream defined cmake macros
|
||||
Patch2: openvino-fix-install-paths.patch
|
||||
# PATCH-FIX-UPSTREAM openvino-ComputeLibrary-include-string.patch badshah400@gmail.com -- Include header for std::string
|
||||
Patch3: openvino-ComputeLibrary-include-string.patch
|
||||
# PATCH-FIX-UPSTREAM openvino-fix-build-sample-path.patch cabelo@opensuse.org -- Fix sample source path in build script
|
||||
Patch4: openvino-fix-build-sample-path.patch
|
||||
# PATCH-FIX-UPSTREAM openvino-remove-npu-compile-tool.patch cabelo@opensuse.org -- Remove NPU Compile Tool
|
||||
Patch5: openvino-remove-npu-compile-tool.patch
|
||||
|
||||
BuildRequires: ade-devel
|
||||
BuildRequires: cmake
|
||||
BuildRequires: fdupes
|
||||
%if %{defined isLeap15}
|
||||
BuildRequires: gcc13-c++
|
||||
%else
|
||||
BuildRequires: gcc-c++
|
||||
%endif
|
||||
BuildRequires: ninja
|
||||
BuildRequires: opencl-cpp-headers
|
||||
# FIXME: /usr/include/onnx/onnx-ml.pb.h:17:2: error: This file was generated by
|
||||
# an older version of protoc which is incompatible with your Protocol Buffer
|
||||
# headers. Please regenerate this file with a newer version of protoc.
|
||||
#BuildRequires: cmake(ONNX)
|
||||
BuildRequires: pkgconfig
|
||||
BuildRequires: %{python_module devel}
|
||||
BuildRequires: %{python_module pip}
|
||||
BuildRequires: %{python_module pybind11-devel}
|
||||
BuildRequires: %{python_module setuptools}
|
||||
BuildRequires: %{python_module wheel}
|
||||
BuildRequires: python-rpm-macros
|
||||
BuildRequires: zstd
|
||||
BuildRequires: pkgconfig(flatbuffers)
|
||||
BuildRequires: pkgconfig(libva)
|
||||
BuildRequires: pkgconfig(nlohmann_json)
|
||||
BuildRequires: pkgconfig(ocl-icd)
|
||||
BuildRequires: pkgconfig(protobuf)
|
||||
BuildRequires: pkgconfig(pugixml)
|
||||
%if %{defined isLeap15}
|
||||
BuildRequires: opencl-headers
|
||||
BuildRequires: snappy-devel
|
||||
BuildRequires: tbb-devel
|
||||
%else
|
||||
BuildRequires: pkgconfig(OpenCL-Headers)
|
||||
BuildRequires: pkgconfig(snappy)
|
||||
BuildRequires: pkgconfig(tbb)
|
||||
%endif
|
||||
BuildRequires: pkgconfig(zlib)
|
||||
%ifarch %{arm64}
|
||||
BuildRequires: scons
|
||||
%endif
|
||||
# No 32-bit support
|
||||
ExcludeArch: %{ix86} %{arm32} ppc
|
||||
%define python_subpackage_only 1
|
||||
%python_subpackages
|
||||
|
||||
%description
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
%package -n %{shlib}
|
||||
Summary: Shared library for OpenVINO toolkit
|
||||
|
||||
%description -n %{shlib}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the shared library for OpenVINO.
|
||||
|
||||
%package -n %{shlib_c}
|
||||
Summary: Shared C library for OpenVINO toolkit
|
||||
|
||||
%description -n %{shlib_c}
|
||||
This package provides the C library for OpenVINO.
|
||||
|
||||
%package -n %{name}-devel
|
||||
Summary: Headers and sources for OpenVINO toolkit
|
||||
Requires: %{shlib_c} = %{version}
|
||||
Requires: %{shlib} = %{version}
|
||||
Requires: lib%{name}_ir_frontend%{so_ver} = %{version}
|
||||
Requires: lib%{name}_onnx_frontend%{so_ver} = %{version}
|
||||
Requires: lib%{name}_paddle_frontend%{so_ver} = %{version}
|
||||
Requires: lib%{name}_pytorch_frontend%{so_ver} = %{version}
|
||||
Requires: lib%{name}_tensorflow_frontend%{so_ver} = %{version}
|
||||
Requires: lib%{name}_tensorflow_lite_frontend%{so_ver} = %{version}
|
||||
Requires: pkgconfig(flatbuffers)
|
||||
Requires: pkgconfig(libva)
|
||||
Requires: pkgconfig(nlohmann_json)
|
||||
Requires: pkgconfig(ocl-icd)
|
||||
Requires: pkgconfig(protobuf)
|
||||
Requires: pkgconfig(pugixml)
|
||||
%if %{defined isLeap15}
|
||||
Requires: opencl-headers
|
||||
Requires: snappy-devel
|
||||
Requires: tbb-devel
|
||||
%else
|
||||
Requires: pkgconfig(OpenCL-Headers)
|
||||
Requires: pkgconfig(snappy)
|
||||
Requires: pkgconfig(tbb)
|
||||
%endif
|
||||
Recommends: %{name}-auto-batch-plugin = %{version}
|
||||
Recommends: %{name}-auto-plugin = %{version}
|
||||
Recommends: %{name}-hetero-plugin = %{version}
|
||||
Recommends: %{name}-intel-cpu-plugin = %{version}
|
||||
%ifarch riscv64
|
||||
Recommends: %{name}-riscv-cpu-plugin = %{version}
|
||||
%endif
|
||||
|
||||
%description -n %{name}-devel
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the headers and sources for developing applications with
|
||||
OpenVINO.
|
||||
|
||||
%package -n %{name}-arm-cpu-plugin
|
||||
Summary: Intel CPU plugin for OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-arm-cpu-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the ARM CPU plugin for OpenVINO on %{arm64} archs.
|
||||
|
||||
%package -n %{name}-riscv-cpu-plugin
|
||||
Summary: RISC-V CPU plugin for OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-riscv-cpu-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the RISC-V CPU plugin for OpenVINO on riscv64 archs.
|
||||
|
||||
%package -n %{name}-auto-plugin
|
||||
Summary: Auto / Multi software plugin for OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-auto-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the Auto / Multi software plugin for OpenVINO.
|
||||
|
||||
%package -n %{name}-auto-batch-plugin
|
||||
Summary: Automatic batch software plugin for OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-auto-batch-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the automatic batch software plugin for OpenVINO.
|
||||
|
||||
%package -n %{name}-hetero-plugin
|
||||
Summary: Hetero frontend for Intel OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-hetero-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the hetero frontend for OpenVINO.
|
||||
|
||||
%package -n %{name}-intel-cpu-plugin
|
||||
Summary: Intel CPU plugin for OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-intel-cpu-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the intel CPU plugin for OpenVINO for %{x86_64} archs.
|
||||
|
||||
%package -n %{name}-intel-npu-plugin
|
||||
Summary: Intel NPU plugin for OpenVINO toolkit
|
||||
|
||||
%description -n %{name}-intel-npu-plugin
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the intel NPU plugin for OpenVINO for %{x86_64} archs.
|
||||
|
||||
%package -n lib%{name}_ir_frontend%{so_ver}
|
||||
Summary: Paddle frontend for Intel OpenVINO toolkit
|
||||
|
||||
%description -n lib%{name}_ir_frontend%{so_ver}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the ir frontend for OpenVINO.
|
||||
|
||||
%package -n lib%{name}_onnx_frontend%{so_ver}
|
||||
Summary: Onnx frontend for OpenVINO toolkit
|
||||
|
||||
%description -n lib%{name}_onnx_frontend%{so_ver}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the onnx frontend for OpenVINO.
|
||||
|
||||
%package -n lib%{name}_paddle_frontend%{so_ver}
|
||||
Summary: Paddle frontend for Intel OpenVINO toolkit
|
||||
|
||||
%description -n lib%{name}_paddle_frontend%{so_ver}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the paddle frontend for OpenVINO.
|
||||
|
||||
%package -n lib%{name}_pytorch_frontend%{so_ver}
|
||||
Summary: PyTorch frontend for OpenVINO toolkit
|
||||
|
||||
%description -n lib%{name}_pytorch_frontend%{so_ver}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the pytorch frontend for OpenVINO.
|
||||
|
||||
%package -n lib%{name}_tensorflow_frontend%{so_ver}
|
||||
Summary: TensorFlow frontend for OpenVINO toolkit
|
||||
|
||||
%description -n lib%{name}_tensorflow_frontend%{so_ver}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the tensorflow frontend for OpenVINO.
|
||||
|
||||
%package -n lib%{name}_tensorflow_lite_frontend%{so_ver}
|
||||
Summary: TensorFlow Lite frontend for OpenVINO toolkit
|
||||
|
||||
%description -n lib%{name}_tensorflow_lite_frontend%{so_ver}
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides the tensorflow-lite frontend for OpenVINO.
|
||||
|
||||
%package -n python-openvino
|
||||
Summary: Python module for openVINO toolkit
|
||||
Requires: python-numpy < 2
|
||||
Requires: python-openvino-telemetry
|
||||
|
||||
%description -n python-openvino
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides a Python module for interfacing with openVINO toolkit.
|
||||
|
||||
%package -n %{name}-sample
|
||||
Summary: Samples for use with OpenVINO toolkit
|
||||
BuildArch: noarch
|
||||
|
||||
%description -n %{name}-sample
|
||||
OpenVINO is an open-source toolkit for optimizing and deploying AI inference.
|
||||
|
||||
This package provides some samples for use with openVINO.
|
||||
|
||||
%prep
|
||||
%autosetup -p1
|
||||
|
||||
%build
|
||||
%if %{defined isLeap15}
|
||||
export CC=gcc-13 CXX=g++-13
|
||||
%endif
|
||||
# Otherwise intel_cpu plugin declares an executable stack
|
||||
%ifarch %{x86_64}
|
||||
%define build_ldflags -Wl,-z,noexecstack
|
||||
%endif
|
||||
%cmake \
|
||||
-DCMAKE_CXX_STANDARD=17 \
|
||||
-DBUILD_SHARED_LIBS=ON \
|
||||
-DENABLE_OV_ONNX_FRONTEND=ON \
|
||||
-DENABLE_OV_PADDLE_FRONTEND=ON \
|
||||
-DENABLE_OV_PYTORCH_FRONTEND=ON \
|
||||
-DENABLE_OV_IR_FRONTEND=ON \
|
||||
-DENABLE_OV_TF_FRONTEND=ON \
|
||||
-DENABLE_OV_TF_LITE_FRONTEND=ON \
|
||||
-DENABLE_INTEL_GPU=OFF \
|
||||
-DENABLE_JS=OFF \
|
||||
-DENABLE_PYTHON=ON \
|
||||
-DENABLE_WHEEL=OFF \
|
||||
-DENABLE_SYSTEM_OPENCL=ON \
|
||||
-DENABLE_SYSTEM_PROTOBUF=ON \
|
||||
-DENABLE_SYSTEM_PUGIXML=ON \
|
||||
-DENABLE_SYSTEM_SNAPPY=ON \
|
||||
-DENABLE_SYSTEM_TBB=ON \
|
||||
%if %{defined isLeap15}
|
||||
-DENABLE_TBBBIND_2_5=OFF \
|
||||
%endif
|
||||
-DONNX_USE_PROTOBUF_SHARED_LIBS=ON \
|
||||
-DProtobuf_USE_STATIC_LIBS=OFF \
|
||||
%{nil}
|
||||
%cmake_build
|
||||
# Manually generate dist-info dir
|
||||
export WHEEL_VERSION=%{version} \
|
||||
BUILD_TYPE=RelWithDebInfo
|
||||
%ifarch %{power64}
|
||||
|
||||
# RelWithDebInfo
|
||||
# Manual hackery for power64 because it not "officially" supported
|
||||
sed -i "s/{ARCH}/%{_arch}/" ../src/bindings/python/wheel/setup.py
|
||||
%endif
|
||||
%python_exec ../src/bindings/python/wheel/setup.py dist_info -o ../
|
||||
|
||||
%install
|
||||
%cmake_install
|
||||
|
||||
rm %{buildroot}%{_datadir}/%{prj_name}/samples/cpp/thirdparty/nlohmann_json/.cirrus.yml
|
||||
|
||||
# Hash-bangs in non-exec python sample scripts
|
||||
sed -Ei "1{\@/usr/bin/env@d}" \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/benchmark/bert_benchmark/bert_benchmark.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/benchmark/sync_benchmark/sync_benchmark.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/benchmark/throughput_benchmark/throughput_benchmark.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/classification_sample_async/classification_sample_async.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/hello_classification/hello_classification.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/hello_query_device/hello_query_device.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/hello_reshape_ssd/hello_reshape_ssd.py \
|
||||
%{buildroot}%{_datadir}/%{prj_name}/samples/python/model_creation_sample/model_creation_sample.py
|
||||
|
||||
# Unnecessary if we get our package dependencies and lib paths right!
|
||||
rm -fr %{buildroot}%{_prefix}/install_dependencies \
|
||||
%{buildroot}%{_prefix}/setupvars.sh
|
||||
|
||||
%{python_expand rm %{buildroot}%{$python_sitearch}/requirements.txt
|
||||
chmod -x %{buildroot}%{$python_sitearch}/%{name}/tools/ovc/ovc.py
|
||||
cp -r %{name}-%{version}.dist-info %{buildroot}%{$python_sitearch}/
|
||||
%fdupes %{buildroot}%{$python_sitearch}/%{name}/
|
||||
}
|
||||
|
||||
%fdupes %{buildroot}%{_datadir}/
|
||||
|
||||
# We do not use bundled thirdparty libs
|
||||
rm -fr %{buildroot}%{_datadir}/licenses/*
|
||||
|
||||
%ldconfig_scriptlets -n %{shlib}
|
||||
%ldconfig_scriptlets -n %{shlib_c}
|
||||
%ldconfig_scriptlets -n lib%{name}_ir_frontend%{so_ver}
|
||||
%ldconfig_scriptlets -n lib%{name}_onnx_frontend%{so_ver}
|
||||
%ldconfig_scriptlets -n lib%{name}_paddle_frontend%{so_ver}
|
||||
%ldconfig_scriptlets -n lib%{name}_pytorch_frontend%{so_ver}
|
||||
%ldconfig_scriptlets -n lib%{name}_tensorflow_lite_frontend%{so_ver}
|
||||
%ldconfig_scriptlets -n lib%{name}_tensorflow_frontend%{so_ver}
|
||||
|
||||
%files -n %{shlib}
|
||||
%license LICENSE
|
||||
%{_libdir}/libopenvino.so.*
|
||||
|
||||
%files -n %{shlib_c}
|
||||
%license LICENSE
|
||||
%{_libdir}/libopenvino_c.so.*
|
||||
|
||||
%files -n %{name}-auto-batch-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_auto_batch_plugin.so
|
||||
|
||||
%files -n %{name}-auto-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_auto_plugin.so
|
||||
|
||||
%ifarch %{x86_64}
|
||||
%files -n %{name}-intel-cpu-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_intel_cpu_plugin.so
|
||||
|
||||
%files -n %{name}-intel-npu-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_intel_npu_plugin.so
|
||||
%endif
|
||||
|
||||
%ifarch %{arm64}
|
||||
%files -n %{name}-arm-cpu-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_arm_cpu_plugin.so
|
||||
%endif
|
||||
|
||||
%ifarch riscv64
|
||||
%files -n %{name}-riscv-cpu-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_riscv_cpu_plugin.so
|
||||
%endif
|
||||
|
||||
%files -n %{name}-hetero-plugin
|
||||
%dir %{_libdir}/%{prj_name}
|
||||
%{_libdir}/%{prj_name}/libopenvino_hetero_plugin.so
|
||||
|
||||
%files -n lib%{name}_onnx_frontend%{so_ver}
|
||||
%{_libdir}/libopenvino_onnx_frontend.so.*
|
||||
|
||||
%files -n lib%{name}_ir_frontend%{so_ver}
|
||||
%{_libdir}/libopenvino_ir_frontend.so.*
|
||||
|
||||
%files -n lib%{name}_paddle_frontend%{so_ver}
|
||||
%{_libdir}/libopenvino_paddle_frontend.so.*
|
||||
|
||||
%files -n lib%{name}_pytorch_frontend%{so_ver}
|
||||
%{_libdir}/libopenvino_pytorch_frontend.so.*
|
||||
|
||||
%files -n lib%{name}_tensorflow_frontend%{so_ver}
|
||||
%{_libdir}/libopenvino_tensorflow_frontend.so.*
|
||||
|
||||
%files -n lib%{name}_tensorflow_lite_frontend%{so_ver}
|
||||
%{_libdir}/libopenvino_tensorflow_lite_frontend.so.*
|
||||
|
||||
%files -n %{name}-sample
|
||||
%license LICENSE
|
||||
%{_datadir}/%{prj_name}/
|
||||
|
||||
%files -n %{name}-devel
|
||||
%license LICENSE
|
||||
%{_includedir}/%{name}/
|
||||
%{_libdir}/cmake/%{prj_name}/
|
||||
%{_libdir}/*.so
|
||||
%{_libdir}/pkgconfig/openvino.pc
|
||||
|
||||
%files %{python_files openvino}
|
||||
%license LICENSE
|
||||
%{python_sitearch}/openvino/
|
||||
%{python_sitearch}/openvino*-info/
|
||||
|
||||
%changelog
|
Loading…
x
Reference in New Issue
Block a user