Skip to content

Commit 138ea22

Browse files
Pin transformers and torch versions (#3094)
1 parent 2e1a8f1 commit 138ea22

File tree

10 files changed

+40
-27
lines changed

10 files changed

+40
-27
lines changed

.ci/skipped_notebooks.yml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -618,3 +618,15 @@
618618
skips:
619619
- os:
620620
- macos-13
621+
- notebook: notebooks/llm-chatbot/llm-chatbot-generate-api.ipynb
622+
skips:
623+
- os:
624+
- macos-13
625+
- notebook: notebooks/llm-chatbot/llm-chatbot.ipynb
626+
skips:
627+
- os:
628+
- macos-13
629+
- notebook: notebooks/nuextract-structure-extraction/nuextract-structure-extraction.ipynb
630+
skips:
631+
- os:
632+
- macos-13

notebooks/llm-agent-functioncall/llm-agent-functioncall-qwen.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -67,16 +67,16 @@
6767
"\n",
6868
"%pip install -Uq pip\n",
6969
"%pip uninstall -q -y optimum optimum-intel\n",
70-
"%pip install --pre -Uq \"openvino>=2024.2.0\" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
70+
"%pip install --pre -Uq \"openvino>=2025.3.0\" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
7171
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \\\n",
72-
"\"torch>=2.1\" \\\n",
72+
"\"nncf>=2.18.0\" \\\n",
73+
"\"torch==2.8\" \\\n",
7374
"\"datasets<4.0.0\" \\\n",
7475
"\"accelerate\" \\\n",
75-
"\"transformers>=4.38.1\" \"langchain>=0.2.3\" \"langchain-community>=0.2.4\" \"wikipedia\" \\\n",
76+
"\"transformers==4.53.3\" \"langchain>=0.2.3\" \"langchain-community>=0.2.4\" \"wikipedia\" \\\n",
7677
"\"pydantic==2.9.2\" \"pydantic-core==2.23.4\" \"gradio>=5.0.0\" \"gradio-client==1.4.0\" \"modelscope_studio==1.0.0-beta.8\"\n",
7778
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \\\n",
7879
"\"git+https://github.com/huggingface/optimum-intel.git\"\n",
79-
"%pip install -q \"git+https://github.com/openvinotoolkit/nncf.git\"\n",
8080
" \n",
8181
"utility_files = [\"notebook_utils.py\", \"cmd_helper.py\"]\n",
8282
"\n",

notebooks/llm-agent-mcp/llm-agent-mcp.ipynb

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,11 +84,10 @@
8484
"%pip uninstall -q -y optimum optimum-intel\n",
8585
"%pip install --pre -Uq openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
8686
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \\\n",
87-
"\"torch>=2.1\" \"datasets<4.0.0\" \"accelerate\" \"transformers>=4.51.0\" \"mcp-server-time\" \"mcp-server-fetch\"\n",
87+
"\"torch==2.8\" \"nncf>=2.18.0\" \"datasets<4.0.0\" \"accelerate\" \"transformers==4.53.3\" \"mcp-server-time\" \"mcp-server-fetch\"\n",
8888
"\"pydantic==2.9.2\" \"pydantic-core==2.23.4\" \"gradio>=5.0.0\" \"gradio-client==1.4.0\" \"modelscope_studio==1.0.0-beta.8\"\n",
8989
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \\\n",
9090
"\"git+https://github.com/huggingface/optimum-intel.git\"\n",
91-
"%pip install -q \"git+https://github.com/openvinotoolkit/nncf.git\"\n",
9291
" \n",
9392
"utility_files = [\"notebook_utils.py\", \"cmd_helper.py\"]\n",
9493
"\n",

notebooks/llm-agent-react-langchain/llm-agent-react-langchain.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -93,16 +93,16 @@
9393
"\n",
9494
"%pip install -Uq pip\n",
9595
"%pip uninstall -q -y optimum optimum-intel\n",
96-
"%pip install --pre -Uq \"openvino>=2024.5.0\" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
97-
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"transformers>=4.38.1\" \"langchain>=0.2.3\" \"langchain-huggingface>=0.1.2\" \"langchain-community>=0.2.4\" \"Wikipedia\" \\\n",
98-
"\"torch>=2.1\" \\\n",
96+
"%pip install --pre -Uq \"openvino>=2025.3.0\" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
97+
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \"transformers==4.53.3\" \"langchain>=0.2.3\" \"langchain-huggingface>=0.1.2\" \"langchain-community>=0.2.4\" \"Wikipedia\" \\\n",
98+
"\"nncf>=2.18.0\" \\\n",
99+
"\"torch==2.8\" \\\n",
99100
"\"datasets<4.0.0\" \\\n",
100101
"\"accelerate\" \\\n",
101102
"\"pydantic<2.10.0\" \\\n",
102103
"\"gradio>=4.19\" \\\n",
103104
"\"huggingface-hub>=0.26.5\"\n",
104-
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" \\\n",
105-
"\"git+https://github.com/openvinotoolkit/nncf.git\""
105+
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu\n"
106106
]
107107
},
108108
{

notebooks/llm-chatbot/llm-chatbot-generate-api.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,15 +79,15 @@
7979
"\n",
8080
"os.environ[\"GIT_CLONE_PROTECTION_ACTIVE\"] = \"false\"\n",
8181
"\n",
82-
"%pip install -q -U --pre \"openvino>=2025.0.0\" openvino-tokenizers[transformers] openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
82+
"%pip install -q -U --pre \"openvino>=2025.3.0\" openvino-tokenizers[transformers] openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
8383
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \\\n",
8484
"\"git+https://github.com/huggingface/optimum-intel.git\" \\\n",
85-
"\"nncf==2.15.0\" \\\n",
86-
"\"torch>=2.1\" \\\n",
85+
"\"nncf>=2.18.0\" \\\n",
86+
"\"torch==2.8\" \\\n",
8787
"\"datasets<4.0.0\" \\\n",
8888
"\"accelerate\" \\\n",
8989
"\"gradio>=4.19\" \\\n",
90-
"\"transformers>=4.43.1\" \\\n",
90+
"\"transformers==4.53.3\" \\\n",
9191
"\"huggingface-hub>=0.26.5\" \\\n",
9292
"\"einops\" \"transformers_stream_generator\" \"tiktoken\" \"bitsandbytes\"\n",
9393
"\n",

notebooks/llm-chatbot/llm-chatbot.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,16 +79,16 @@
7979
"\n",
8080
"%pip install -Uq pip\n",
8181
"%pip uninstall -q -y optimum optimum-intel\n",
82-
"%pip install --pre -Uq \"openvino>=2024.2.0\" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
82+
"%pip install --pre -Uq \"openvino>=2025.3.0\" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
8383
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu\\\n",
8484
"\"git+https://github.com/huggingface/optimum-intel.git\"\\\n",
85-
"\"nncf==2.14.1\"\\\n",
86-
"\"torch>=2.1\"\\\n",
85+
"\"nncf>=2.18.0\"\\\n",
86+
"\"torch==2.8\" \\\n",
8787
"\"datasets<4.0.0\" \\\n",
8888
"\"accelerate\" \\\n",
8989
"\"gradio>=4.19\" \\\n",
9090
"\"huggingface-hub>=0.26.5\" \\\n",
91-
" \"einops\" \"transformers>=4.51.3\" \"transformers_stream_generator\" \"tiktoken\" \"bitsandbytes\"\n",
91+
" \"einops\" \"transformers==4.53.3\" \"transformers_stream_generator\" \"tiktoken\" \"bitsandbytes\"\n",
9292
"\n",
9393
"if platform.system() == \"Darwin\":\n",
9494
" %pip install -q \"numpy<2.0.0\""

notebooks/llm-question-answering/llm-question-answering.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@
6969
"source": [
7070
"%pip uninstall -q -y optimum optimum-intel\n",
7171
"%pip install -Uq \"openvino>=2025.3.0\" \"openvino-genai\"\n",
72-
"%pip install -q \"torch>=2.1\" \"nncf>=2.7\" \"transformers>=4.40.0\" \"huggingface-hub>=0.26.5\" \"onnx<1.16.2\" \"optimum>=1.16.1\" \"accelerate\" \"datasets>=2.14.6,<4.0.0\" \"gradio>=4.19\" \"git+https://github.com/huggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu"
72+
"%pip install -q \"torch==2.8\" \"nncf>=2.18.0\" \"transformers==4.53.3\" \"huggingface-hub>=0.26.5\" \"onnx<1.16.2\" \"optimum>=1.16.1\" \"accelerate\" \"datasets>=2.14.6,<4.0.0\" \"gradio>=4.19\" \"git+https://github.com/huggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu"
7373
]
7474
},
7575
{

notebooks/llm-rag-langchain/llm-rag-langchain-genai.ipynb

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -110,23 +110,24 @@
110110
"\n",
111111
"os.environ[\"GIT_CLONE_PROTECTION_ACTIVE\"] = \"false\"\n",
112112
"\n",
113-
"pip_install(\"--pre\", \"-U\", \"openvino>=2024.2.0\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
113+
"pip_install(\"--pre\", \"-U\", \"openvino>=2025.3.0\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
114114
"pip_install(\"--pre\", \"-U\", \"openvino-tokenizers[transformers]\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
115115
"pip_install(\"--pre\", \"-U\", \"openvino-genai\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
116116
"pip_install(\n",
117117
" \"-q\",\n",
118118
" \"--extra-index-url\",\n",
119119
" \"https://download.pytorch.org/whl/cpu\",\n",
120120
" \"git+https://github.com/huggingface/optimum-intel.git\",\n",
121-
" \"git+https://github.com/openvinotoolkit/nncf.git\",\n",
121+
" \"nncf>=2.18.0\",\n",
122+
" \"torch==2.8\",\n",
122123
" \"datasets<4.0.0\",\n",
123124
" \"accelerate\",\n",
124125
" \"gradio>=4.19\",\n",
125126
" \"onnx<1.16.2\",\n",
126127
" \"einops\",\n",
127128
" \"transformers_stream_generator\",\n",
128129
" \"tiktoken\",\n",
129-
" \"transformers>=4.43.1\",\n",
130+
" \"transformers==4.53.3\",\n",
130131
" \"faiss-cpu\",\n",
131132
" \"sentence_transformers\",\n",
132133
" \"langchain>=0.2.0\",\n",

notebooks/llm-rag-langchain/llm-rag-langchain.ipynb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@
9595
"\n",
9696
"os.environ[\"GIT_CLONE_PROTECTION_ACTIVE\"] = \"false\"\n",
9797
"\n",
98-
"pip_install(\"--pre\", \"-U\", \"openvino>=2024.2.0\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
98+
"pip_install(\"--pre\", \"-U\", \"openvino>=2025.3.0\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
9999
"pip_install(\"--pre\", \"-U\", \"openvino-tokenizers[transformers]\", \"--extra-index-url\", \"https://storage.openvinotoolkit.org/simple/wheels/nightly\")\n",
100100
"pip_install(\n",
101101
" \"-q\",\n",
@@ -104,14 +104,15 @@
104104
" \"--upgrade-strategy\",\n",
105105
" \"eager\",\n",
106106
" \"optimum[openvino,nncf,onnxruntime]\",\n",
107+
" \"torch==2.8\",\n",
107108
" \"nncf>=2.18.0\",\n",
108109
" \"accelerate\",\n",
109110
" \"gradio>=4.19\",\n",
110111
" \"onnx<1.16.2\",\n",
111112
" \"einops\",\n",
112113
" \"transformers_stream_generator\",\n",
113114
" \"tiktoken\",\n",
114-
" \"transformers>=4.43.1\",\n",
115+
" \"transformers==4.53.3\",\n",
115116
" \"faiss-cpu\",\n",
116117
" \"sentence_transformers\",\n",
117118
" \"langchain>=0.2.0\",\n",

notebooks/nuextract-structure-extraction/nuextract-structure-extraction.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,8 @@
6565
"outputs": [],
6666
"source": [
6767
"%pip uninstall -q -y optimum optimum-intel\n",
68-
"%pip install -q -U --pre \"openvino>=2025.0.0\" openvino-tokenizers[transformers] openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
69-
"%pip install -q \"torch>=2.1\" \"nncf>=2.12\" \"transformers>=4.40.0\" \"accelerate\" \"gradio>=4.19\" \"git+https://github.com/huggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu"
68+
"%pip install -q -U --pre \"openvino>=2025.3.0\" openvino-tokenizers[transformers] openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly\n",
69+
"%pip install -q \"torch==2.8\" \"nncf>=2.18.0\" \"transformers==4.53.3\" \"accelerate\" \"gradio>=4.19\" \"git+https://github.com/huggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu"
7070
]
7171
},
7272
{

0 commit comments

Comments
 (0)