|
| 1 | +\documentclass{beamer} |
| 2 | + |
| 3 | +% Theme choice |
| 4 | +\usetheme{Madrid} |
| 5 | + |
| 6 | +% Optional packages |
| 7 | +\usepackage{graphicx} % For including images |
| 8 | +\usepackage{amsmath} % For math symbols and formulas |
| 9 | +\usepackage{hyperref} % For hyperlinks |
| 10 | + |
| 11 | +% Small helpers for simple boxes in diagrams |
| 12 | +\newcommand{\ovbox}[2]{\colorbox{#1}{\strut\parbox[c][1.2em]{0.6\linewidth}{\centering #2}}} |
| 13 | +\newcommand{\ovpbox}[2]{\colorbox{#1}{\strut\parbox[c][1.2em]{0.15\linewidth}{\centering #2}}} |
| 14 | +\setlength{\fboxsep}{6pt} |
| 15 | + |
| 16 | +% Where to look for images (put the OpenVINO logo into one of these) |
| 17 | +\graphicspath{{./assets/}{./images/}} |
| 18 | + |
| 19 | +\title[OpenVINO introduction]{OpenVINO introduction} |
| 20 | +\author{Obolenskiy Arseniy, Nesterov Alexander} |
| 21 | +\institute{ITLab} |
| 22 | + |
| 23 | +\date{\today} |
| 24 | + |
| 25 | +% Redefine the footline to display both the short title and the org name |
| 26 | +\setbeamertemplate{footline}{ |
| 27 | + \leavevmode% |
| 28 | + \hbox{% |
| 29 | + \begin{beamercolorbox}[wd=.45\paperwidth,ht=2.5ex,dp=1ex,leftskip=1em,center]{author in head/foot}% |
| 30 | + \usebeamerfont{author in head/foot}\insertshortinstitute% Displays the university name |
| 31 | + \end{beamercolorbox}% |
| 32 | + \begin{beamercolorbox}[wd=.45\paperwidth,ht=2.5ex,dp=1ex,leftskip=1em,center]{author in head/foot}% |
| 33 | + \usebeamerfont{author in head/foot}\insertshorttitle% Displays the short title |
| 34 | + \end{beamercolorbox}% |
| 35 | + \begin{beamercolorbox}[wd=.1\paperwidth,ht=2.5ex,dp=1ex,rightskip=1em,center]{author in head/foot}% |
| 36 | + \usebeamerfont{author in head/foot}\insertframenumber{} / \inserttotalframenumber% |
| 37 | + \end{beamercolorbox}}% |
| 38 | + \vskip0pt% |
| 39 | +} |
| 40 | + |
| 41 | +\AtBeginSection[]{ |
| 42 | + \begin{frame} |
| 43 | + \centering |
| 44 | + \Huge\insertsection% |
| 45 | + \end{frame} |
| 46 | +} |
| 47 | + |
| 48 | +\begin{document} |
| 49 | + |
| 50 | +\begin{frame} |
| 51 | + \titlepage% |
| 52 | +\end{frame} |
| 53 | + |
| 54 | +\begin{frame}{Contents} |
| 55 | + \tableofcontents |
| 56 | +\end{frame} |
| 57 | + |
| 58 | +\section{Overview} |
| 59 | +\begin{frame}{What is OpenVINO?} |
| 60 | + \begin{columns}[T,totalwidth=\textwidth] |
| 61 | + \begin{column}{0.7\textwidth} |
| 62 | + OpenVINO (Open Visual Inference and Neural Network Optimization) |
| 63 | + is a toolkit developed by Intel for optimizing and deploying deep learning models |
| 64 | + for inference on Intel hardware. It provides a unified API and a set of tools to streamline |
| 65 | + the process of model optimization, conversion, and deployment across various Intel architectures. |
| 66 | + \end{column} |
| 67 | + \begin{column}{0.25\textwidth} |
| 68 | + \centering |
| 69 | + \includegraphics[width=\linewidth]{openvino-logo.png} |
| 70 | + \end{column} |
| 71 | + \end{columns} |
| 72 | +\end{frame} |
| 73 | + |
| 74 | +\begin{frame}{OpenVINO at a Glance} |
| 75 | + \begin{itemize} |
| 76 | + \item \textbf{Purpose:} Optimize and deploy AI inference across Intel CPUs, GPUs, NPUs, and other accelerators |
| 77 | + \item \textbf{Core components:} Model Optimizer, Runtime (Inference Engine), Post-Training Optimization Tool, Benchmark tools, Notebooks |
| 78 | + \item \textbf{Model formats (Frontends):} IR (\texttt{.xml/.bin}), ONNX (\texttt{.onnx}), TensorFlow (SavedModel/MetaGraph/frozen \texttt{.pb/.pbtxt}), TensorFlow Lite (\texttt{.tflite}), PaddlePaddle (\texttt{.pdmodel}), PyTorch (TorchScript/FX) |
| 79 | + \item \textbf{Targets:} CPU, iGPU, dGPU (e.g., Intel Arc), NPU, and more via plugins |
| 80 | + \item \textbf{Key benefits:} Performance, portability, unified API, quantization (INT8), easy deployment |
| 81 | + \end{itemize} |
| 82 | + \footnotesize Reference: \href{https://docs.openvino.ai/}{docs.openvino.ai} |
| 83 | +\end{frame} |
| 84 | + |
| 85 | +\begin{frame}{Overview Diagram} |
| 86 | + \centering |
| 87 | + \includegraphics[width=\textwidth]{openvino-overview-diagram.jpg} |
| 88 | + \footnotesize Source: \href{https://docs.openvino.ai/2025/_images/openvino-overview-diagram.jpg}{https://docs.openvino.ai/2025/index.html} |
| 89 | +\end{frame} |
| 90 | + |
| 91 | +\begin{frame}{Workflow Overview} |
| 92 | + \centering |
| 93 | + \includegraphics[width=\textwidth]{openvino-use-case.png} |
| 94 | + \footnotesize Source: \href{https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/overview.html}{https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/overview.html} |
| 95 | +\end{frame} |
| 96 | + |
| 97 | +\begin{frame}{Device Plugins Architecture} |
| 98 | + \centering |
| 99 | + \ovbox{gray!15}{\textbf{Application} (C++/Python)}\\[0.6em] |
| 100 | + $\Downarrow$\\[0.2em] |
| 101 | + \ovbox{gray!15}{\textbf{OpenVINO Runtime} (\texttt{ov::Core})}\\[0.6em] |
| 102 | + $\Downarrow$\\[0.2em] |
| 103 | + \ovbox{blue!10}{\textbf{Plugin Dispatcher} (AUTO / MULTI / HETERO)}\\[0.8em] |
| 104 | + $\Downarrow$\\[0.6em] |
| 105 | + |
| 106 | + % Row of device plugins |
| 107 | + \ovpbox{gray!20}{CPU}\hspace{0.6em}% |
| 108 | + \ovpbox{green!15}{GPU}\hspace{0.6em}% |
| 109 | + \ovpbox{magenta!15}{NPU}% |
| 110 | + |
| 111 | + \vspace{0.6em} |
| 112 | + \footnotesize Examples: \texttt{CPU}, \texttt{GPU.0}, \texttt{NPU}, \texttt{AUTO:CPU,GPU}, \texttt{MULTI:GPU,CPU}, \texttt{HETERO:GPU,CPU} |
| 113 | +\end{frame} |
| 114 | + |
| 115 | +\begin{frame}{Device Plugin Details} |
| 116 | + \begin{itemize} |
| 117 | + \item \textbf{CPU}: High compatibility and strong baseline performance; uses optimized kernels (e.g., oneDNN). Supports FP32/FP16/INT8 with quantized models. |
| 118 | + \item \textbf{GPU}: Integrated and discrete Intel GPUs via Level Zero/OpenCL; excels at FP16 and INT8 throughput; benefits from device-specific kernels and memory bandwidth\@. |
| 119 | + \item \textbf{NPU}: Intel NPU (e.g., Core Ultra) for efficient, low-power inference on common vision/LLM ops; ideal for always-on and battery-sensitive workloads. |
| 120 | + \item \textbf{TEMPLATE plugin}: Reference backend for building custom device plugins; demonstrates the plugin API (compiled model, infer request, op support, memory) and is useful for prototyping. |
| 121 | + \end{itemize} |
| 122 | + \footnotesize See: \href{https://docs.openvino.ai/2025/documentation/compatibility-and-support/supported-devices.html}{https://docs.openvino.ai/2025/documentation/compatibility-and-support/supported-devices.html} \;\;|\;\; \href{https://docs.openvino.ai/2024/openvino_docs_OV_UG_supported_plugins_Supported_Devices.html}{Supported devices} |
| 123 | +\end{frame} |
| 124 | + |
| 125 | +\begin{frame}{Inference Modes} |
| 126 | + \begin{itemize} |
| 127 | + \item \textbf{AUTO plugin}: Chooses the “best” device available at runtime; can constrain candidates, e.g., \texttt{AUTO:GPU,CPU}. |
| 128 | + \item \textbf{MULTI plugin}: Executes across multiple devices in parallel to maximize throughput, e.g., \texttt{MULTI:GPU,CPU}. |
| 129 | + \item \textbf{HETERO plugin}: Splits a single graph by layer/op support across devices, e.g., heavy ops on GPU, fallbacks on CPU\@. |
| 130 | + \end{itemize} |
| 131 | + \footnotesize See: \href{https://docs.openvino.ai/2025/openvino-workflow/running-inference/inference-devices-and-modes.html}{https://docs.openvino.ai/2025/openvino-workflow/running-inference/inference-devices-and-modes.html} \;\;|\;\; \href{https://docs.openvino.ai/2024/openvino_docs_OV_UG_supported_plugins_Supported_Devices.html}{Inference Devices and Modes} |
| 132 | +\end{frame} |
| 133 | + |
| 134 | +\section{References} |
| 135 | +\begin{frame}{References} |
| 136 | + \begin{itemize} |
| 137 | + \item OpenVINO Official documentation: \href{https://docs.openvino.ai/}{https://docs.openvino.ai/} |
| 138 | + \item OpenVINO repository: \href{https://github.com/openvinotoolkit/openvino}{https://github.com/openvinotoolkit/openvino} |
| 139 | + \item OpenVINO Contrib: \href{https://github.com/openvinotoolkit/openvino_contrib}{https://github.com/openvinotoolkit/openvino\_contrib} |
| 140 | + \item OpenVINO Notebooks: \href{https://github.com/openvinotoolkit/openvino_notebooks}{https://github.com/openvinotoolkit/openvino\_notebooks} |
| 141 | + \end{itemize} |
| 142 | +\end{frame} |
| 143 | + |
| 144 | +\end{document} |
0 commit comments