Skip to content

Commit 88472d8

Browse files
committed
core: deprecate problematic dict() method
1 parent 925ad65 commit 88472d8

File tree

4 files changed

+71
-32
lines changed

4 files changed

+71
-32
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
366366

367367
@model_validator(mode="before")
368368
@classmethod
369-
def raise_deprecation(cls, values: dict) -> Any:
369+
def raise_deprecation(cls, values: typing.Dict) -> Any: # noqa: UP006
370370
"""Raise deprecation warning if ``callback_manager`` is used.
371371
372372
Args:
@@ -393,7 +393,7 @@ def raise_deprecation(cls, values: dict) -> Any:
393393
)
394394

395395
@cached_property
396-
def _serialized(self) -> dict[str, Any]:
396+
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
397397
return dumpd(self)
398398

399399
# --- Runnable methods ---
@@ -741,7 +741,10 @@ async def astream(
741741

742742
# --- Custom methods ---
743743

744-
def _combine_llm_outputs(self, llm_outputs: list[Optional[dict]]) -> dict: # noqa: ARG002
744+
def _combine_llm_outputs(
745+
self,
746+
llm_outputs: list[Optional[typing.Dict]], # noqa: ARG002, UP006
747+
) -> typing.Dict: # noqa: UP006
745748
return {}
746749

747750
def _convert_cached_generations(self, cache_val: list) -> list[ChatGeneration]:
@@ -787,8 +790,8 @@ def _get_invocation_params(
787790
self,
788791
stop: Optional[list[str]] = None,
789792
**kwargs: Any,
790-
) -> dict:
791-
params = self.dict()
793+
) -> typing.Dict: # noqa: UP006
794+
params = self.asdict()
792795
params["stop"] = stop
793796
return {**params, **kwargs}
794797

@@ -851,7 +854,7 @@ def generate(
851854
callbacks: Callbacks = None,
852855
*,
853856
tags: Optional[list[str]] = None,
854-
metadata: Optional[dict[str, Any]] = None,
857+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
855858
run_name: Optional[str] = None,
856859
run_id: Optional[uuid.UUID] = None,
857860
**kwargs: Any,
@@ -966,7 +969,7 @@ async def agenerate(
966969
callbacks: Callbacks = None,
967970
*,
968971
tags: Optional[list[str]] = None,
969-
metadata: Optional[dict[str, Any]] = None,
972+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
970973
run_name: Optional[str] = None,
971974
run_id: Optional[uuid.UUID] = None,
972975
**kwargs: Any,
@@ -1537,8 +1540,12 @@ async def apredict_messages(
15371540
def _llm_type(self) -> str:
15381541
"""Return type of chat model."""
15391542

1543+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
15401544
@override
1541-
def dict(self, **kwargs: Any) -> dict:
1545+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
1546+
return self.asdict()
1547+
1548+
def asdict(self) -> typing.Dict[str, Any]: # noqa: UP006
15421549
"""Return a dictionary of the LLM."""
15431550
starter_dict = dict(self._identifying_params)
15441551
starter_dict["_type"] = self._llm_type

libs/core/langchain_core/language_models/llms.py

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import inspect
88
import json
99
import logging
10+
import typing
1011
import warnings
1112
from abc import ABC, abstractmethod
1213
from collections.abc import AsyncIterator, Iterator, Sequence
@@ -304,7 +305,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
304305

305306
@model_validator(mode="before")
306307
@classmethod
307-
def raise_deprecation(cls, values: dict) -> Any:
308+
def raise_deprecation(cls, values: typing.Dict[str, Any]) -> Any: # noqa: UP006
308309
"""Raise deprecation warning if callback_manager is used."""
309310
if values.get("callback_manager") is not None:
310311
warnings.warn(
@@ -316,7 +317,7 @@ def raise_deprecation(cls, values: dict) -> Any:
316317
return values
317318

318319
@functools.cached_property
319-
def _serialized(self) -> dict[str, Any]:
320+
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
320321
return dumpd(self)
321322

322323
# --- Runnable methods ---
@@ -530,7 +531,7 @@ def stream(
530531
else:
531532
prompt = self._convert_input(input).to_string()
532533
config = ensure_config(config)
533-
params = self.dict()
534+
params = self.asdict()
534535
params["stop"] = stop
535536
params = {**params, **kwargs}
536537
options = {"stop": stop}
@@ -600,7 +601,7 @@ async def astream(
600601

601602
prompt = self._convert_input(input).to_string()
602603
config = ensure_config(config)
603-
params = self.dict()
604+
params = self.asdict()
604605
params["stop"] = stop
605606
params = {**params, **kwargs}
606607
options = {"stop": stop}
@@ -819,7 +820,9 @@ def generate(
819820
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
820821
*,
821822
tags: Optional[Union[list[str], list[list[str]]]] = None,
822-
metadata: Optional[Union[dict[str, Any], list[dict[str, Any]]]] = None,
823+
metadata: Optional[
824+
Union[typing.Dict[str, Any], list[typing.Dict[str, Any]]] # noqa: UP006
825+
] = None,
823826
run_name: Optional[Union[str, list[str]]] = None,
824827
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
825828
**kwargs: Any,
@@ -941,7 +944,7 @@ def generate(
941944
] * len(prompts)
942945
run_name_list = [cast("Optional[str]", run_name)] * len(prompts)
943946
run_ids_list = self._get_run_ids_list(run_id, prompts)
944-
params = self.dict()
947+
params = self.asdict()
945948
params["stop"] = stop
946949
options = {"stop": stop}
947950
(
@@ -1079,7 +1082,9 @@ async def agenerate(
10791082
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
10801083
*,
10811084
tags: Optional[Union[list[str], list[list[str]]]] = None,
1082-
metadata: Optional[Union[dict[str, Any], list[dict[str, Any]]]] = None,
1085+
metadata: Optional[
1086+
Union[typing.Dict[str, Any], list[typing.Dict[str, Any]]] # noqa: UP006
1087+
] = None,
10831088
run_name: Optional[Union[str, list[str]]] = None,
10841089
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
10851090
**kwargs: Any,
@@ -1191,7 +1196,7 @@ async def agenerate(
11911196
] * len(prompts)
11921197
run_name_list = [cast("Optional[str]", run_name)] * len(prompts)
11931198
run_ids_list = self._get_run_ids_list(run_id, prompts)
1194-
params = self.dict()
1199+
params = self.asdict()
11951200
params["stop"] = stop
11961201
options = {"stop": stop}
11971202
(
@@ -1280,7 +1285,7 @@ def __call__(
12801285
callbacks: Callbacks = None,
12811286
*,
12821287
tags: Optional[list[str]] = None,
1283-
metadata: Optional[dict[str, Any]] = None,
1288+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
12841289
**kwargs: Any,
12851290
) -> str:
12861291
"""Check Cache and run the LLM on the given prompt and input.
@@ -1329,7 +1334,7 @@ async def _call_async(
13291334
callbacks: Callbacks = None,
13301335
*,
13311336
tags: Optional[list[str]] = None,
1332-
metadata: Optional[dict[str, Any]] = None,
1337+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
13331338
**kwargs: Any,
13341339
) -> str:
13351340
"""Check Cache and run the LLM on the given prompt and input."""
@@ -1397,8 +1402,12 @@ def __str__(self) -> str:
13971402
def _llm_type(self) -> str:
13981403
"""Return type of llm."""
13991404

1405+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
14001406
@override
1401-
def dict(self, **kwargs: Any) -> dict:
1407+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
1408+
return self.asdict()
1409+
1410+
def asdict(self) -> typing.Dict[str, Any]: # noqa: UP006
14021411
"""Return a dictionary of the LLM."""
14031412
starter_dict = dict(self._identifying_params)
14041413
starter_dict["_type"] = self._llm_type
@@ -1427,7 +1436,7 @@ def save(self, file_path: Union[Path, str]) -> None:
14271436
directory_path.mkdir(parents=True, exist_ok=True)
14281437

14291438
# Fetch dictionary to save
1430-
prompt_dict = self.dict()
1439+
prompt_dict = self.asdict()
14311440

14321441
if save_path.suffix == ".json":
14331442
with save_path.open("w") as f:

libs/core/langchain_core/output_parsers/base.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from __future__ import annotations
44

55
import contextlib
6+
import typing
67
from abc import ABC, abstractmethod
78
from typing import (
89
TYPE_CHECKING,
@@ -15,6 +16,7 @@
1516

1617
from typing_extensions import override
1718

19+
from langchain_core._api import deprecated
1820
from langchain_core.language_models import LanguageModelOutput
1921
from langchain_core.messages import AnyMessage, BaseMessage
2022
from langchain_core.outputs import ChatGeneration, Generation
@@ -325,7 +327,12 @@ def _type(self) -> str:
325327
)
326328
raise NotImplementedError(msg)
327329

328-
def dict(self, **kwargs: Any) -> dict:
330+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
331+
@override
332+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
333+
return self.asdict()
334+
335+
def asdict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
329336
"""Return dictionary representation of output parser."""
330337
output_parser_dict = super().model_dump(**kwargs)
331338
with contextlib.suppress(NotImplementedError):

libs/core/langchain_core/prompts/base.py

Lines changed: 27 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from pydantic import BaseModel, ConfigDict, Field, model_validator
2424
from typing_extensions import Self, override
2525

26+
from langchain_core._api import deprecated
2627
from langchain_core.exceptions import ErrorCode, create_message
2728
from langchain_core.load import dumpd
2829
from langchain_core.output_parsers.base import BaseOutputParser
@@ -118,7 +119,7 @@ def is_lc_serializable(cls) -> bool:
118119
)
119120

120121
@cached_property
121-
def _serialized(self) -> dict[str, Any]:
122+
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
122123
return dumpd(self)
123124

124125
@property
@@ -151,7 +152,7 @@ def get_input_schema(
151152
field_definitions={**required_input_variables, **optional_input_variables},
152153
)
153154

154-
def _validate_input(self, inner_input: Any) -> dict:
155+
def _validate_input(self, inner_input: Any) -> typing.Dict: # noqa: UP006
155156
if not isinstance(inner_input, dict):
156157
if len(self.input_variables) == 1:
157158
var_name = self.input_variables[0]
@@ -185,19 +186,26 @@ def _validate_input(self, inner_input: Any) -> dict:
185186
)
186187
return inner_input
187188

188-
def _format_prompt_with_error_handling(self, inner_input: dict) -> PromptValue:
189+
def _format_prompt_with_error_handling(
190+
self,
191+
inner_input: typing.Dict, # noqa: UP006
192+
) -> PromptValue:
189193
inner_input_ = self._validate_input(inner_input)
190194
return self.format_prompt(**inner_input_)
191195

192196
async def _aformat_prompt_with_error_handling(
193-
self, inner_input: dict
197+
self,
198+
inner_input: typing.Dict, # noqa: UP006
194199
) -> PromptValue:
195200
inner_input_ = self._validate_input(inner_input)
196201
return await self.aformat_prompt(**inner_input_)
197202

198203
@override
199204
def invoke(
200-
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
205+
self,
206+
input: typing.Dict, # noqa: UP006
207+
config: Optional[RunnableConfig] = None,
208+
**kwargs: Any,
201209
) -> PromptValue:
202210
"""Invoke the prompt.
203211
@@ -223,7 +231,10 @@ def invoke(
223231

224232
@override
225233
async def ainvoke(
226-
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
234+
self,
235+
input: typing.Dict, # noqa: UP006
236+
config: Optional[RunnableConfig] = None,
237+
**kwargs: Any,
227238
) -> PromptValue:
228239
"""Async invoke the prompt.
229240
@@ -285,7 +296,7 @@ def partial(self, **kwargs: Union[str, Callable[[], str]]) -> BasePromptTemplate
285296
prompt_dict["partial_variables"] = {**self.partial_variables, **kwargs}
286297
return type(self)(**prompt_dict)
287298

288-
def _merge_partial_and_user_variables(self, **kwargs: Any) -> dict[str, Any]:
299+
def _merge_partial_and_user_variables(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
289300
# Get partial params:
290301
partial_kwargs = {
291302
k: v if not callable(v) else v() for k, v in self.partial_variables.items()
@@ -333,14 +344,19 @@ def _prompt_type(self) -> str:
333344
"""Return the prompt type key."""
334345
raise NotImplementedError
335346

336-
def dict(self, **kwargs: Any) -> dict:
347+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
348+
@override
349+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
350+
return self.asdict(**kwargs)
351+
352+
def asdict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
337353
"""Return dictionary representation of prompt.
338354
339355
Args:
340-
kwargs: Any additional arguments to pass to the dictionary.
356+
**kwargs: Any additional arguments to pass to the dictionary.
341357
342358
Returns:
343-
Dict: Dictionary representation of the prompt.
359+
Dictionary representation of the prompt.
344360
345361
Raises:
346362
NotImplementedError: If the prompt type is not implemented.
@@ -372,7 +388,7 @@ def save(self, file_path: Union[Path, str]) -> None:
372388
raise ValueError(msg)
373389

374390
# Fetch dictionary to save
375-
prompt_dict = self.dict()
391+
prompt_dict = self.asdict()
376392
if "_type" not in prompt_dict:
377393
msg = f"Prompt {self} does not support saving."
378394
raise NotImplementedError(msg)

0 commit comments

Comments
 (0)