diff --git a/notebooks/3D-segmentation-point-clouds/3D-segmentation-point-clouds.ipynb b/notebooks/3D-segmentation-point-clouds/3D-segmentation-point-clouds.ipynb index fbf4fc3a659..e8a1678b3d8 100644 --- a/notebooks/3D-segmentation-point-clouds/3D-segmentation-point-clouds.ipynb +++ b/notebooks/3D-segmentation-point-clouds/3D-segmentation-point-clouds.ipynb @@ -40,14 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q \"openvino>=2023.1.0\" \"tqdm\"\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2023.1.0\" \"tqdm\" \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/async-api/async-api.ipynb b/notebooks/async-api/async-api.ipynb index ed8d1b576c4..596ff0dae97 100644 --- a/notebooks/async-api/async-api.ipynb +++ b/notebooks/async-api/async-api.ipynb @@ -55,14 +55,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "%pip install -q \"openvino>=2023.1.0\"\n", - "%pip install -q opencv-python\n", - "if platform.system() != \"windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q opencv-python \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/auto-device/auto-device.ipynb b/notebooks/auto-device/auto-device.ipynb index 5f4a42bd954..1147c915afb 100644 --- a/notebooks/auto-device/auto-device.ipynb +++ b/notebooks/auto-device/auto-device.ipynb @@ -70,12 +70,10 @@ "import platform\n", "\n", "# Install required packages\n", - "%pip install -q \"openvino>=2023.1.0\" \"numpy<2\" Pillow torch torchvision tqdm --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"openvino>=2023.1.0\" \"matplotlib>=3.4\" Pillow torch torchvision tqdm --extra-index-url https://download.pytorch.org/whl/cpu\n", "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "if platform.system() == \"Darwin\":\n", + " %pip install -q \"numpy<2.0.0\"" ] }, { diff --git a/notebooks/clip-zero-shot-image-classification/clip-zero-shot-classification.ipynb b/notebooks/clip-zero-shot-image-classification/clip-zero-shot-classification.ipynb index f7e005472cd..0334816ba79 100644 --- a/notebooks/clip-zero-shot-image-classification/clip-zero-shot-classification.ipynb +++ b/notebooks/clip-zero-shot-image-classification/clip-zero-shot-classification.ipynb @@ -88,14 +88,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"gradio>=4.19\" \"openvino>=2023.1.0\" \"transformers[torch]>=4.30\" \"datasets\" \"nncf>=2.6.0\" \"torch>=2.1\" Pillow\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"\n", + "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"gradio>=4.19\" \"matplotlib>=3.4\" \"openvino>=2023.1.0\" \"transformers[torch]>=4.30\" \"datasets\" \"nncf>=2.6.0\" \"torch>=2.1\" Pillow\n", "\n", "import requests\n", "\n", diff --git a/notebooks/controlnet-stable-diffusion/controlnet-stable-diffusion.ipynb b/notebooks/controlnet-stable-diffusion/controlnet-stable-diffusion.ipynb index 580f079436f..108f9dafadd 100644 --- a/notebooks/controlnet-stable-diffusion/controlnet-stable-diffusion.ipynb +++ b/notebooks/controlnet-stable-diffusion/controlnet-stable-diffusion.ipynb @@ -118,7 +118,7 @@ "outputs": [], "source": [ "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"torch>=2.1\" \"torchvision\"\n", - "%pip install -q \"diffusers>=0.14.0\" \"transformers>=4.30.2\" \"controlnet-aux>=0.0.6\" \"gradio>=3.36\" --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"diffusers>=0.14.0\" \"matplotlib>=3.4\" \"transformers>=4.30.2\" \"controlnet-aux>=0.0.6\" \"gradio>=3.36\" --extra-index-url https://download.pytorch.org/whl/cpu\n", "%pip install -q \"openvino>=2023.1.0\" \"datasets>=2.14.6\" \"nncf>=2.7.0\"\n", "\n", "import requests\n", diff --git a/notebooks/convert-to-openvino/convert-to-openvino.ipynb b/notebooks/convert-to-openvino/convert-to-openvino.ipynb index ada618d774e..d557df798c5 100644 --- a/notebooks/convert-to-openvino/convert-to-openvino.ipynb +++ b/notebooks/convert-to-openvino/convert-to-openvino.ipynb @@ -43,9 +43,8 @@ "outputs": [], "source": [ "# Required imports. Please execute this cell first.\n", - "%pip install --upgrade pip\n", "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \\\n", - "\"openvino-dev>=2024.0.0\" \"requests\" \"tqdm\" \"transformers>=4.31\" \"onnx<1.16.2\" \"torch>=2.1\" \"torchvision\" \"tensorflow_hub\" \"tensorflow\"" + "\"openvino>=2024.4.0\" \"requests\" \"tqdm\" \"transformers>=4.31\" \"onnx!=1.16.2\" \"torch>=2.1\" \"torchvision\" \"tensorflow_hub\" \"tensorflow\"" ] }, { @@ -617,26 +616,16 @@ ] }, { - "cell_type": "code", - "execution_count": 20, + "attachments": {}, + "cell_type": "markdown", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", - "To disable this warning, you can either:\n", - "\t- Avoid using `tokenizers` before the fork if possible\n", - "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" - ] - } - ], "source": [ + "```python\n", "# Legacy Model Optimizer API\n", "from openvino.tools import mo\n", "\n", - "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, layout=\"nchw\")" + "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, layout=\"nchw\")\n", + "```" ] }, { @@ -670,18 +659,19 @@ ] }, { - "cell_type": "code", - "execution_count": 22, + "attachments": {}, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ "# Legacy Model Optimizer API\n", + "```python\n", "from openvino.tools import mo\n", "\n", "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, layout=\"nchw->nhwc\")\n", "\n", "# alternatively use source_layout and target_layout parameters\n", - "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, source_layout=\"nchw\", target_layout=\"nhwc\")" + "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, source_layout=\"nchw\", target_layout=\"nhwc\")\n", + "```" ] }, { @@ -715,12 +705,13 @@ ] }, { - "cell_type": "code", - "execution_count": 24, + "attachments": {}, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ + "```python\n", "# Legacy Model Optimizer API\n", + "\n", "from openvino.tools import mo\n", "\n", "\n", @@ -728,7 +719,8 @@ " ONNX_CV_MODEL_PATH,\n", " mean_values=[255 * x for x in [0.485, 0.456, 0.406]],\n", " scale_values=[255 * x for x in [0.229, 0.224, 0.225]],\n", - ")" + ")\n", + "```" ] }, { @@ -760,15 +752,16 @@ ] }, { - "cell_type": "code", - "execution_count": 26, + "attachments": {}, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ + "```python\n", "# Legacy Model Optimizer API\n", "from openvino.tools import mo\n", "\n", - "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, reverse_input_channels=True)" + "ov_model = mo.convert_model(ONNX_CV_MODEL_PATH, reverse_input_channels=True)\n", + "```" ] }, { diff --git a/notebooks/cross-lingual-books-alignment/cross-lingual-books-alignment.ipynb b/notebooks/cross-lingual-books-alignment/cross-lingual-books-alignment.ipynb index 8f8408cda5f..76f00fcec7c 100644 --- a/notebooks/cross-lingual-books-alignment/cross-lingual-books-alignment.ipynb +++ b/notebooks/cross-lingual-books-alignment/cross-lingual-books-alignment.ipynb @@ -61,14 +61,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"\n", - "\n", - "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu requests pysbd transformers \"torch>=2.1\" \"openvino>=2023.1.0\" seaborn ipywidgets" + "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu requests pysbd transformers \"torch>=2.1\" \"openvino>=2023.1.0\" seaborn ipywidgets \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/ct-segmentation-quantize/ct-segmentation-quantize-nncf.ipynb b/notebooks/ct-segmentation-quantize/ct-segmentation-quantize-nncf.ipynb index b9b1239b5e3..62dbb0f01c3 100644 --- a/notebooks/ct-segmentation-quantize/ct-segmentation-quantize-nncf.ipynb +++ b/notebooks/ct-segmentation-quantize/ct-segmentation-quantize-nncf.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -68,36 +69,15 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[33mWARNING: Error parsing dependencies of torchsde: .* suffix can only be used with `==` or `!=` operators\n", - " numpy (>=1.19.*) ; python_version >= \"3.7\"\n", - " ~~~~~~~^\u001b[0m\u001b[33m\n", - "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n", - "\u001b[33mWARNING: Error parsing dependencies of torchsde: .* suffix can only be used with `==` or `!=` operators\n", - " numpy (>=1.19.*) ; python_version >= \"3.7\"\n", - " ~~~~~~~^\u001b[0m\u001b[33m\n", - "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" - ] - } - ], + "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q \"openvino>=2023.3.0\" \"monai>=0.9.1\" \"torchmetrics>=0.11.0\" \"nncf>=2.8.0\" \"opencv-python\" torch tqdm --extra-index-url https://download.pytorch.org/whl/cpu\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2023.3.0\" \"monai>=0.9.1\" \"torchmetrics>=0.11.0\" \"nncf>=2.8.0\" \"opencv-python\" \"matplotlib>=3.4\" torch tqdm --extra-index-url https://download.pytorch.org/whl/cpu" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -172,6 +152,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -197,6 +178,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -269,6 +251,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -307,6 +290,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -315,6 +299,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -384,6 +369,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -432,6 +418,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -477,6 +464,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -509,6 +497,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "jupyter": { @@ -555,6 +544,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "execution": { @@ -686,6 +676,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -727,6 +718,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "jupyter": { @@ -740,6 +732,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -780,6 +773,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -818,6 +812,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -852,6 +847,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -1075,6 +1071,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -1181,6 +1178,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -1197,6 +1195,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -1232,6 +1231,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -1284,6 +1284,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -1294,7 +1295,7 @@ "- [NNCF Repository](https://github.com/openvinotoolkit/nncf/)\n", "- [Neural Network Compression Framework for fast model inference](https://arxiv.org/abs/2002.08679)\n", "- [OpenVINO API Tutorial](../openvino-api/openvino-api.ipynb)\n", - "- [OpenVINO PyPI (pip install openvino-dev)](https://pypi.org/project/openvino-dev/)\n", + "- [OpenVINO PyPI (pip install openvino)](https://pypi.org/project/openvino/)\n", "\n", "**Kits19 Data**\n", " - [Kits19 Challenge Homepage](https://kits19.grand-challenge.org/)\n", diff --git a/notebooks/depth-anything/depth-anything-v2.ipynb b/notebooks/depth-anything/depth-anything-v2.ipynb index e3cf0a4afaf..19c361749cd 100644 --- a/notebooks/depth-anything/depth-anything-v2.ipynb +++ b/notebooks/depth-anything/depth-anything-v2.ipynb @@ -87,7 +87,7 @@ "source": [ "import platform\n", "\n", - "%pip install -q \"openvino>=2024.2.0\" \"datasets>=2.14.6\" \"nncf>=2.11.0\" \"tqdm\"\n", + "%pip install -q \"openvino>=2024.2.0\" \"datasets>=2.14.6\" \"nncf>=2.11.0\" \"tqdm\" \"matplotlib>=3.4\"\n", "%pip install -q \"typing-extensions>=4.9.0\" eval-type-backport \"gradio>=4.19\"\n", "%pip install -q -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu\n", "\n", diff --git a/notebooks/depth-anything/depth-anything.ipynb b/notebooks/depth-anything/depth-anything.ipynb index 0b03559047a..de98677fc8e 100644 --- a/notebooks/depth-anything/depth-anything.ipynb +++ b/notebooks/depth-anything/depth-anything.ipynb @@ -72,7 +72,7 @@ "%cd Depth-Anything\n", "\n", "%pip install -q \"openvino>=2023.3.0\" \"datasets>=2.14.6\" \"nncf\" \"tqdm\"\n", - "%pip install -q \"typing-extensions>=4.9.0\" eval-type-backport \"gradio>=4.19\"\n", + "%pip install -q \"typing-extensions>=4.9.0\" eval-type-backport \"gradio>=4.19\" \"matplotlib>=3.4\"\n", "%pip install -q -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu\n", "\n", "if platform.python_version_tuple()[1] in [\"8\", \"9\"]:\n", diff --git a/notebooks/distil-whisper-asr/distil-whisper-asr.ipynb b/notebooks/distil-whisper-asr/distil-whisper-asr.ipynb index 69ea0a68953..c949507a57a 100644 --- a/notebooks/distil-whisper-asr/distil-whisper-asr.ipynb +++ b/notebooks/distil-whisper-asr/distil-whisper-asr.ipynb @@ -70,9 +70,9 @@ }, "outputs": [], "source": [ - "%pip install -q \"transformers>=4.35\" \"torch>=2.1,<2.4.0\" \"torchvision<0.19.0\" \"onnx<1.16.2\" --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"transformers>=4.35\" \"torch>=2.4.1\" \"onnx!=1.16.2\" --extra-index-url https://download.pytorch.org/whl/cpu\n", "%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\"\n", - "%pip install -q \"openvino>=2023.2.0\" datasets \"gradio>=4.0\" \"librosa\" \"soundfile\"\n", + "%pip install -q \"openvino>=2023.2.0\" datasets \"gradio>=4.19\" \"librosa\" \"soundfile\"\n", "%pip install -q \"nncf>=2.6.0\" \"jiwer\"\n", "\n", "import requests\n", diff --git a/notebooks/efficient-sam/efficient-sam.ipynb b/notebooks/efficient-sam/efficient-sam.ipynb index 79d461ccf19..cb7aa94c13c 100644 --- a/notebooks/efficient-sam/efficient-sam.ipynb +++ b/notebooks/efficient-sam/efficient-sam.ipynb @@ -67,14 +67,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"\n", - "\n", - "%pip install -q \"openvino>=2023.3.0\" \"nncf>=2.7.0\" opencv-python \"gradio>=4.13\" torch torchvision tqdm --extra-index-url https://download.pytorch.org/whl/cpu" + "%pip install -q \"openvino>=2023.3.0\" \"nncf>=2.7.0\" opencv-python \"gradio>=4.13\" \"matplotlib>=3.4\" torch torchvision tqdm --extra-index-url https://download.pytorch.org/whl/cpu" ] }, { diff --git a/notebooks/fast-segment-anything/fast-segment-anything.ipynb b/notebooks/fast-segment-anything/fast-segment-anything.ipynb index 974b9fd761e..1a55a02c283 100644 --- a/notebooks/fast-segment-anything/fast-segment-anything.ipynb +++ b/notebooks/fast-segment-anything/fast-segment-anything.ipynb @@ -64,8 +64,8 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -q \"ultralytics==8.2.24\" \"onnx<1.16.2\" tqdm --extra-index-url https://download.pytorch.org/whl/cpu\n", - "%pip install -q \"openvino-dev>=2024.0.0\"\n", + "%pip install -q \"ultralytics==8.2.24\" \"matplotlib>=3.4\" \"onnx<1.16.2\" tqdm --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"openvino>=2024.4.0\"\n", "%pip install -q \"nncf>=2.9.0\"\n", "%pip install -q \"gradio>=4.13\"" ] diff --git a/notebooks/florence2/florence2.ipynb b/notebooks/florence2/florence2.ipynb index 932c3daffb0..aa81e74ee6b 100644 --- a/notebooks/florence2/florence2.ipynb +++ b/notebooks/florence2/florence2.ipynb @@ -50,14 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q \"openvino>=2024.3.0\" \"einops\" \"torch>2.1\" \"torchvision\" \"timm>=0.9.8\" \"transformers>=4.41\" \"pillow\" \"gradio>=4.19\" --extra-index-url https://download.pytorch.org/whl/cpu\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2024.3.0\" \"einops\" \"torch>2.1\" \"torchvision\" \"matplotlib>=3.4\" \"timm>=0.9.8\" \"transformers>=4.41\" \"pillow\" \"gradio>=4.19\" --extra-index-url https://download.pytorch.org/whl/cpu" ] }, { diff --git a/notebooks/handwritten-ocr/handwritten-ocr.ipynb b/notebooks/handwritten-ocr/handwritten-ocr.ipynb index dc269e9f050..786953ef377 100644 --- a/notebooks/handwritten-ocr/handwritten-ocr.ipynb +++ b/notebooks/handwritten-ocr/handwritten-ocr.ipynb @@ -47,15 +47,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "# Install openvino-dev package\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "# Install openvino package\n", + "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/hello-detection/hello-detection.ipynb b/notebooks/hello-detection/hello-detection.ipynb index 013582403c3..534a8f35f5e 100644 --- a/notebooks/hello-detection/hello-detection.ipynb +++ b/notebooks/hello-detection/hello-detection.ipynb @@ -43,7 +43,7 @@ "outputs": [], "source": [ "# Install openvino package\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm" + "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/hello-segmentation/hello-segmentation.ipynb b/notebooks/hello-segmentation/hello-segmentation.ipynb index 88c6e6e35ae..1f0b62df336 100644 --- a/notebooks/hello-segmentation/hello-segmentation.ipynb +++ b/notebooks/hello-segmentation/hello-segmentation.ipynb @@ -42,15 +42,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "# Install required packages\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/hello-world/hello-world.ipynb b/notebooks/hello-world/hello-world.ipynb index 9927f920fbd..08342ec79fb 100644 --- a/notebooks/hello-world/hello-world.ipynb +++ b/notebooks/hello-world/hello-world.ipynb @@ -40,11 +40,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "# Install openvino package\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"\n", - "\n" + "# Install required packages\n", + "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/image-bind/image-bind.ipynb b/notebooks/image-bind/image-bind.ipynb index 0ae86e650f6..d910921439a 100644 --- a/notebooks/image-bind/image-bind.ipynb +++ b/notebooks/image-bind/image-bind.ipynb @@ -105,7 +105,7 @@ "source": [ "import platform\n", "\n", - "%pip install -q \"torch>=2.0.1\" \"torchvision>=0.15.2,<0.17.0\" \"torchaudio>=2.0.2\" --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"torch>=2.0.1\" \"torchvision>=0.15.2,<0.17.0\" \"torchaudio>=2.0.2\" \"matplotlib>=3.4\" --extra-index-url https://download.pytorch.org/whl/cpu\n", "%pip install -q datasets regex librosa soundfile pytorchvideo ftfy \"timm>=0.6.7\" einops fvcore \"openvino>=2024.0.0\" \"nncf>=2.9.0\" numpy scipy --extra-index-url https://download.pytorch.org/whl/cpu\n", "\n", "\n", diff --git a/notebooks/image-classification-quantization/image-classification-quantization.ipynb b/notebooks/image-classification-quantization/image-classification-quantization.ipynb index 37718a29217..a63e55e88b3 100644 --- a/notebooks/image-classification-quantization/image-classification-quantization.ipynb +++ b/notebooks/image-classification-quantization/image-classification-quantization.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "rQc-wXjqrEuR" @@ -24,6 +25,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -56,15 +58,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "# Install required packages\n", - "%pip install -q \"openvino>=2023.1.0\" \"nncf>=2.6.0\" torch torchvision tqdm --extra-index-url https://download.pytorch.org/whl/cpu\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2023.1.0\" \"nncf>=2.6.0\" torch torchvision tqdm \"matplotlib>=3.4\" --extra-index-url https://download.pytorch.org/whl/cpu" ] }, { @@ -87,6 +82,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "D-frbVLKrkmv" @@ -132,6 +128,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -157,6 +154,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "ynLvh8rNc2wv" @@ -205,6 +203,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -255,6 +254,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -288,6 +288,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -307,6 +308,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -338,6 +340,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -457,6 +460,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "vQACMfAUo52V" @@ -647,6 +651,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ diff --git a/notebooks/instant-id/instant-id.ipynb b/notebooks/instant-id/instant-id.ipynb index d96ec508a06..2fd13c41637 100644 --- a/notebooks/instant-id/instant-id.ipynb +++ b/notebooks/instant-id/instant-id.ipynb @@ -102,7 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -q \"openvino>=2023.3.0\" opencv-python transformers \"diffusers>=0.24.0\" accelerate gdown \"scikit-image>=0.19.2\" \"gradio>=4.19\" \"nncf>=2.9.0\" \"datasets>=2.14.6\" \"peft>=0.6.2\"" + "%pip install -q \"openvino>=2023.3.0\" opencv-python transformers \"diffusers>=0.24.0\" \"matplotlib>=3.4\" accelerate gdown \"scikit-image>=0.19.2\" \"gradio>=4.19\" \"nncf>=2.9.0\" \"datasets>=2.14.6\" \"peft>=0.6.2\"" ] }, { diff --git a/notebooks/instruct-pix2pix-image-editing/instruct-pix2pix-image-editing.ipynb b/notebooks/instruct-pix2pix-image-editing/instruct-pix2pix-image-editing.ipynb index d1e40a5dbc3..4526f399210 100644 --- a/notebooks/instruct-pix2pix-image-editing/instruct-pix2pix-image-editing.ipynb +++ b/notebooks/instruct-pix2pix-image-editing/instruct-pix2pix-image-editing.ipynb @@ -74,15 +74,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q \"transformers>=4.25.1\" torch accelerate \"gradio>4.19\" \"datasets>=2.14.6\" diffusers pillow opencv-python --extra-index-url https://download.pytorch.org/whl/cpu\n", - "%pip install -q \"openvino>=2023.1.0\"\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"transformers>=4.25.1\" torch accelerate \"gradio>4.19\" \"datasets>=2.14.6\" \"matplotlib>=3.4\" diffusers pillow opencv-python --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"openvino>=2023.1.0\"" ] }, { diff --git a/notebooks/jina-clip/jina-clip.ipynb b/notebooks/jina-clip/jina-clip.ipynb index 43637a508d6..af743d083c7 100644 --- a/notebooks/jina-clip/jina-clip.ipynb +++ b/notebooks/jina-clip/jina-clip.ipynb @@ -60,7 +60,7 @@ "outputs": [], "source": [ "%pip install -q \"openvino>=2024.2.0\" \"datasets>=2.20\" \"nncf>=2.11.0\"\n", - "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"gradio>=4.19\" \"pillow\" \"einops\" \"timm\" \"transformers[torch]>=4.39\" \"torch>=2.1\"" + "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"gradio>=4.19\" \"pillow\" \"einops\" \"timm\" \"transformers[torch]>=4.39\" \"torch>=2.1\" \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/kosmos2-multimodal-large-language-model/kosmos2-multimodal-large-language-model.ipynb b/notebooks/kosmos2-multimodal-large-language-model/kosmos2-multimodal-large-language-model.ipynb index 006c6c12496..860be7771e4 100644 --- a/notebooks/kosmos2-multimodal-large-language-model/kosmos2-multimodal-large-language-model.ipynb +++ b/notebooks/kosmos2-multimodal-large-language-model/kosmos2-multimodal-large-language-model.ipynb @@ -78,7 +78,7 @@ "source": [ "%pip install --upgrade pip\n", "%pip install -q \"openvino>=2024.0.0\" \"nncf>=2.11.0\" \"datasets>=2.20.0\"\n", - "%pip install -q \"transformers>=4.35\" Pillow \"gradio>=4.19\" opencv-python\n", + "%pip install -q \"transformers>=4.35\" Pillow \"gradio>=4.19\" opencv-python \"matplotlib>=3.4\"\n", "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu torch torchvision" ] }, diff --git a/notebooks/latent-consistency-models-image-generation/lcm-lora-controlnet.ipynb b/notebooks/latent-consistency-models-image-generation/lcm-lora-controlnet.ipynb index fb20a2a6f7e..b815f9ea593 100644 --- a/notebooks/latent-consistency-models-image-generation/lcm-lora-controlnet.ipynb +++ b/notebooks/latent-consistency-models-image-generation/lcm-lora-controlnet.ipynb @@ -128,7 +128,7 @@ "outputs": [], "source": [ "%pip install -q \"torch\" transformers \"diffusers>=0.24.0\" \"controlnet-aux>=0.0.6\" \"peft>=0.6.2\" accelerate --extra-index-url https://download.pytorch.org/whl/cpu\n", - "%pip install -q \"openvino>=2023.2.0\" pillow \"gradio>=4.19\" \"datasets>=2.14.6\" \"nncf>=2.7.0\"" + "%pip install -q \"openvino>=2023.2.0\" pillow \"gradio>=4.19\" \"datasets>=2.14.6\" \"nncf>=2.7.0\" \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/meter-reader/meter-reader.ipynb b/notebooks/meter-reader/meter-reader.ipynb index 92738ec95d7..7dc14e70ccd 100644 --- a/notebooks/meter-reader/meter-reader.ipynb +++ b/notebooks/meter-reader/meter-reader.ipynb @@ -48,15 +48,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "# Install openvino package\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/mobileclip-video-search/mobileclip-video-search.ipynb b/notebooks/mobileclip-video-search/mobileclip-video-search.ipynb index cb14e2535dc..d8f33bc9509 100644 --- a/notebooks/mobileclip-video-search/mobileclip-video-search.ipynb +++ b/notebooks/mobileclip-video-search/mobileclip-video-search.ipynb @@ -80,7 +80,7 @@ "\n", "%pip install -q \"clip-benchmark>=1.4.0\" \"datasets>=2.8.0\" \"open-clip-torch>=2.20.0\" \"timm>=0.9.5\" \"torch>=1.13.1\" \"torchvision>=0.14.1\" --extra-index-url https://download.pytorch.org/whl/cpu\n", "\n", - "%pip install -q \"openvino>=2024.0.0\" \"gradio>=4.19\" \"matplotlib\" \"Pillow\" \"altair\" \"pandas\" \"opencv-python\" \"tqdm\"" + "%pip install -q \"openvino>=2024.0.0\" \"gradio>=4.19\" \"matplotlib\" \"Pillow\" \"altair\" \"pandas\" \"opencv-python\" \"tqdm\" \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/model-server/model-server.ipynb b/notebooks/model-server/model-server.ipynb index d201165d98c..2728d3dbe21 100644 --- a/notebooks/model-server/model-server.ipynb +++ b/notebooks/model-server/model-server.ipynb @@ -169,14 +169,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2024.4.0\" opencv-python tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/oneformer-segmentation/oneformer-segmentation.ipynb b/notebooks/oneformer-segmentation/oneformer-segmentation.ipynb index 55014abcddd..54732c3e943 100644 --- a/notebooks/oneformer-segmentation/oneformer-segmentation.ipynb +++ b/notebooks/oneformer-segmentation/oneformer-segmentation.ipynb @@ -81,14 +81,7 @@ }, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"transformers>=4.26.0\" \"openvino>=2023.1.0\" \"nncf>=2.7.0\" \"gradio>=4.19\" \"torch>=2.1\" scipy ipywidgets Pillow tqdm\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"transformers>=4.26.0\" \"openvino>=2023.1.0\" \"nncf>=2.7.0\" \"gradio>=4.19\" \"torch>=2.1\" \"matplotlib>=3.4\" scipy ipywidgets Pillow tqdm" ] }, { diff --git a/notebooks/optical-character-recognition/optical-character-recognition.ipynb b/notebooks/optical-character-recognition/optical-character-recognition.ipynb index 620d3f5d62a..19ce1a91520 100644 --- a/notebooks/optical-character-recognition/optical-character-recognition.ipynb +++ b/notebooks/optical-character-recognition/optical-character-recognition.ipynb @@ -53,15 +53,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "# Install openvino-dev package\n", - "%pip install -q \"openvino-dev>=2024.0.0\" \"onnx<1.16.2\" torch torchvision pillow opencv-python --extra-index-url https://download.pytorch.org/whl/cpu\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino-dev>=2024.0.0\" \"onnx<1.16.2\" torch torchvision pillow opencv-python \"matplotlib>=3.4\" --extra-index-url https://download.pytorch.org/whl/cpu" ] }, { diff --git a/notebooks/optimize-preprocessing/optimize-preprocessing.ipynb b/notebooks/optimize-preprocessing/optimize-preprocessing.ipynb index 9f0af753d79..64008ded125 100644 --- a/notebooks/optimize-preprocessing/optimize-preprocessing.ipynb +++ b/notebooks/optimize-preprocessing/optimize-preprocessing.ipynb @@ -77,14 +77,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "# Install openvino package\n", - "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"\n", + "%pip install -q \"openvino>=2023.1.0\" opencv-python tqdm \"matplotlib>=3.4\"\n", "\n", "%pip install -q \"tensorflow-macos>=2.5; sys_platform == 'darwin' and platform_machine == 'arm64' and python_version > '3.8'\" # macOS M1 and M2\n", "%pip install -q \"tensorflow>=2.5; sys_platform == 'darwin' and platform_machine != 'arm64' and python_version > '3.8'\" # macOS x86\n", diff --git a/notebooks/paddle-to-openvino/paddle-to-openvino-classification.ipynb b/notebooks/paddle-to-openvino/paddle-to-openvino-classification.ipynb index ce5e7d74c0a..ca56f10041e 100644 --- a/notebooks/paddle-to-openvino/paddle-to-openvino-classification.ipynb +++ b/notebooks/paddle-to-openvino/paddle-to-openvino-classification.ipynb @@ -66,7 +66,7 @@ "else:\n", " %pip install -q \"paddlepaddle>=2.5.1\"\n", "%pip install -q \"paddleclas>=2.5.2\" --no-deps\n", - "%pip install -q \"prettytable\" \"ujson\" \"visualdl>=2.5.3\" \"faiss-cpu>=1.7.1\" Pillow tqdm\n", + "%pip install -q \"prettytable\" \"ujson\" \"visualdl>=2.5.3\" \"faiss-cpu>=1.7.1\" Pillow tqdm \"matplotlib>=3.4\"\n", "# Install openvino package\n", "%pip install -q \"openvino>=2023.1.0\"" ] diff --git a/notebooks/paint-by-example/paint-by-example.ipynb b/notebooks/paint-by-example/paint-by-example.ipynb index 9c81095af5b..8086558ba06 100644 --- a/notebooks/paint-by-example/paint-by-example.ipynb +++ b/notebooks/paint-by-example/paint-by-example.ipynb @@ -58,7 +58,7 @@ "outputs": [], "source": [ "%pip install -q \"torch>=2.1\" torchvision --extra-index-url \"https://download.pytorch.org/whl/cpu\"\n", - "%pip install -q \"diffusers>=0.25.0\" \"peft>=0.6.2\" \"openvino>=2023.2.0\" \"transformers>=4.25.1\" ipywidgets opencv-python pillow \"nncf>=2.7.0\" \"gradio==3.44.1\" tqdm" + "%pip install -q \"diffusers>=0.25.0\" \"peft>=0.6.2\" \"openvino>=2023.2.0\" \"transformers>=4.25.1\" \"matplotlib>=3.4\" ipywidgets opencv-python pillow \"nncf>=2.7.0\" \"gradio==3.44.1\" tqdm" ] }, { diff --git a/notebooks/person-tracking-webcam/person-tracking.ipynb b/notebooks/person-tracking-webcam/person-tracking.ipynb index d083649bef7..68f06801958 100644 --- a/notebooks/person-tracking-webcam/person-tracking.ipynb +++ b/notebooks/person-tracking-webcam/person-tracking.ipynb @@ -83,15 +83,8 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", "%pip install -q \"openvino-dev>=2024.0.0\"\n", - "%pip install -q opencv-python requests scipy tqdm\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q opencv-python requests scipy tqdm \"matplotlib>=3.4\"" ] }, { diff --git a/notebooks/pytorch-to-openvino/pytorch-onnx-to-openvino.ipynb b/notebooks/pytorch-to-openvino/pytorch-onnx-to-openvino.ipynb index 64bd4a1efe5..e440ed939ef 100644 --- a/notebooks/pytorch-to-openvino/pytorch-onnx-to-openvino.ipynb +++ b/notebooks/pytorch-to-openvino/pytorch-onnx-to-openvino.ipynb @@ -863,7 +863,7 @@ "\n", "* [Torchvision](https://pytorch.org/vision/stable/index.html)\n", "* [Pytorch ONNX Documentation](https://pytorch.org/docs/stable/onnx.html)\n", - "* [PIP install openvino-dev](https://pypi.org/project/openvino-dev/)\n", + "* [PIP install openvino](https://pypi.org/project/openvino/)\n", "* [OpenVINO ONNX support](https://docs.openvino.ai/2021.4/openvino_docs_IE_DG_ONNX_Support.html)\n", "* [Model Conversion API documentation](https://docs.openvino.ai/2024/openvino-workflow/model-preparation.html)\n", "* [Converting Pytorch model](https://docs.openvino.ai/2024/openvino-workflow/model-preparation/convert-model-pytorch.html)\n" diff --git a/notebooks/typo-detector/typo-detector.ipynb b/notebooks/typo-detector/typo-detector.ipynb index 7ebf1f596ac..29ea7c02ceb 100644 --- a/notebooks/typo-detector/typo-detector.ipynb +++ b/notebooks/typo-detector/typo-detector.ipynb @@ -63,7 +63,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"onnx>=1.11.0,<1.16.2\" \"transformers>=4.39.0\" \"torch>=2.1,<2.4\" \"torchvision<0.19.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n", + "%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"onnx>=1.11.0,!=1.16.2\" \"transformers>=4.39.0\" \"torch>=2.4.1\" --extra-index-url https://download.pytorch.org/whl/cpu\n", "%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\"" ] }, diff --git a/notebooks/vision-background-removal/vision-background-removal.ipynb b/notebooks/vision-background-removal/vision-background-removal.ipynb index b6267dbe5e1..4a8b274b638 100644 --- a/notebooks/vision-background-removal/vision-background-removal.ipynb +++ b/notebooks/vision-background-removal/vision-background-removal.ipynb @@ -659,7 +659,7 @@ "## References\n", "[back to top ⬆️](#Table-of-contents:)\n", "\n", - "* [PIP install openvino-dev](https://github.com/openvinotoolkit/openvino/blob/releases/2023/2/docs/install_guides/pypi-openvino-dev.md)\n", + "* [PIP install openvino](https://pypi.org/project/openvino/)\n", "* [Model Conversion API](https://docs.openvino.ai/2024/openvino-workflow/model-preparation.html)\n", "* [U^2-Net](https://github.com/xuebinqin/U-2-Net)\n", "* U^2-Net research paper: [U^2-Net: Going Deeper with Nested U-Structure for Salient Object Detection](https://arxiv.org/pdf/2005.09007.pdf)" diff --git a/notebooks/yolov9-optimization/yolov9-optimization.ipynb b/notebooks/yolov9-optimization/yolov9-optimization.ipynb index 6a862c10758..c30620f7516 100644 --- a/notebooks/yolov9-optimization/yolov9-optimization.ipynb +++ b/notebooks/yolov9-optimization/yolov9-optimization.ipynb @@ -64,14 +64,7 @@ "metadata": {}, "outputs": [], "source": [ - "import platform\n", - "\n", - "%pip install -q \"openvino>=2023.3.0\" \"nncf>=2.8.1\" \"opencv-python\" \"seaborn\" \"pandas\" \"scikit-learn\" \"torch\" \"torchvision\" \"tqdm\" --extra-index-url https://download.pytorch.org/whl/cpu\n", - "\n", - "if platform.system() != \"Windows\":\n", - " %pip install -q \"matplotlib>=3.4\"\n", - "else:\n", - " %pip install -q \"matplotlib>=3.4,<3.7\"" + "%pip install -q \"openvino>=2023.3.0\" \"nncf>=2.8.1\" \"opencv-python\" \"matplotlib>=3.4\" \"seaborn\" \"pandas\" \"scikit-learn\" \"torch\" \"torchvision\" \"tqdm\" --extra-index-url https://download.pytorch.org/whl/cpu" ] }, {