Skip to content

Commit 3dba8d5

Browse files
committed
core: deprecate problematic dict() method
1 parent 188c015 commit 3dba8d5

File tree

4 files changed

+74
-32
lines changed

4 files changed

+74
-32
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -373,7 +373,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
373373

374374
@model_validator(mode="before")
375375
@classmethod
376-
def raise_deprecation(cls, values: dict) -> Any:
376+
def raise_deprecation(cls, values: typing.Dict) -> Any: # noqa: UP006
377377
"""Emit deprecation warning if ``callback_manager`` is used.
378378
379379
Args:
@@ -397,7 +397,7 @@ def raise_deprecation(cls, values: dict) -> Any:
397397
)
398398

399399
@cached_property
400-
def _serialized(self) -> dict[str, Any]:
400+
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
401401
return dumpd(self)
402402

403403
# --- Runnable methods ---
@@ -745,7 +745,10 @@ async def astream(
745745

746746
# --- Custom methods ---
747747

748-
def _combine_llm_outputs(self, llm_outputs: list[Optional[dict]]) -> dict: # noqa: ARG002
748+
def _combine_llm_outputs(
749+
self,
750+
llm_outputs: list[Optional[typing.Dict]], # noqa: ARG002, UP006
751+
) -> typing.Dict: # noqa: UP006
749752
return {}
750753

751754
def _convert_cached_generations(self, cache_val: list) -> list[ChatGeneration]:
@@ -791,8 +794,8 @@ def _get_invocation_params(
791794
self,
792795
stop: Optional[list[str]] = None,
793796
**kwargs: Any,
794-
) -> dict:
795-
params = self.dict()
797+
) -> typing.Dict: # noqa: UP006
798+
params = self.asdict()
796799
params["stop"] = stop
797800
return {**params, **kwargs}
798801

@@ -855,7 +858,7 @@ def generate(
855858
callbacks: Callbacks = None,
856859
*,
857860
tags: Optional[list[str]] = None,
858-
metadata: Optional[dict[str, Any]] = None,
861+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
859862
run_name: Optional[str] = None,
860863
run_id: Optional[uuid.UUID] = None,
861864
**kwargs: Any,
@@ -971,7 +974,7 @@ async def agenerate(
971974
callbacks: Callbacks = None,
972975
*,
973976
tags: Optional[list[str]] = None,
974-
metadata: Optional[dict[str, Any]] = None,
977+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
975978
run_name: Optional[str] = None,
976979
run_id: Optional[uuid.UUID] = None,
977980
**kwargs: Any,
@@ -1591,8 +1594,12 @@ async def apredict_messages(
15911594
def _llm_type(self) -> str:
15921595
"""Return type of chat model."""
15931596

1597+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
15941598
@override
1595-
def dict(self, **kwargs: Any) -> dict:
1599+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
1600+
return self.asdict()
1601+
1602+
def asdict(self) -> typing.Dict[str, Any]: # noqa: UP006
15961603
"""Return a dictionary of the LLM."""
15971604
starter_dict = dict(self._identifying_params)
15981605
starter_dict["_type"] = self._llm_type

libs/core/langchain_core/language_models/llms.py

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import inspect
88
import json
99
import logging
10+
import typing
1011
import warnings
1112
from abc import ABC, abstractmethod
1213
from collections.abc import AsyncIterator, Iterator, Sequence
@@ -305,7 +306,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
305306

306307
@model_validator(mode="before")
307308
@classmethod
308-
def raise_deprecation(cls, values: dict) -> Any:
309+
def raise_deprecation(cls, values: typing.Dict[str, Any]) -> Any: # noqa: UP006
309310
"""Raise deprecation warning if callback_manager is used."""
310311
if values.get("callback_manager") is not None:
311312
warnings.warn(
@@ -317,7 +318,7 @@ def raise_deprecation(cls, values: dict) -> Any:
317318
return values
318319

319320
@functools.cached_property
320-
def _serialized(self) -> dict[str, Any]:
321+
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
321322
return dumpd(self)
322323

323324
# --- Runnable methods ---
@@ -531,7 +532,7 @@ def stream(
531532
else:
532533
prompt = self._convert_input(input).to_string()
533534
config = ensure_config(config)
534-
params = self.dict()
535+
params = self.asdict()
535536
params["stop"] = stop
536537
params = {**params, **kwargs}
537538
options = {"stop": stop}
@@ -601,7 +602,7 @@ async def astream(
601602

602603
prompt = self._convert_input(input).to_string()
603604
config = ensure_config(config)
604-
params = self.dict()
605+
params = self.asdict()
605606
params["stop"] = stop
606607
params = {**params, **kwargs}
607608
options = {"stop": stop}
@@ -842,7 +843,9 @@ def generate(
842843
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
843844
*,
844845
tags: Optional[Union[list[str], list[list[str]]]] = None,
845-
metadata: Optional[Union[dict[str, Any], list[dict[str, Any]]]] = None,
846+
metadata: Optional[
847+
Union[typing.Dict[str, Any], list[typing.Dict[str, Any]]] # noqa: UP006
848+
] = None,
846849
run_name: Optional[Union[str, list[str]]] = None,
847850
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
848851
**kwargs: Any,
@@ -970,7 +973,7 @@ def generate(
970973
] * len(prompts)
971974
run_name_list = [cast("Optional[str]", run_name)] * len(prompts)
972975
run_ids_list = self._get_run_ids_list(run_id, prompts)
973-
params = self.dict()
976+
params = self.asdict()
974977
params["stop"] = stop
975978
options = {"stop": stop}
976979
(
@@ -1108,7 +1111,9 @@ async def agenerate(
11081111
callbacks: Optional[Union[Callbacks, list[Callbacks]]] = None,
11091112
*,
11101113
tags: Optional[Union[list[str], list[list[str]]]] = None,
1111-
metadata: Optional[Union[dict[str, Any], list[dict[str, Any]]]] = None,
1114+
metadata: Optional[
1115+
Union[typing.Dict[str, Any], list[typing.Dict[str, Any]]] # noqa: UP006
1116+
] = None,
11121117
run_name: Optional[Union[str, list[str]]] = None,
11131118
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
11141119
**kwargs: Any,
@@ -1225,7 +1230,7 @@ async def agenerate(
12251230
] * len(prompts)
12261231
run_name_list = [cast("Optional[str]", run_name)] * len(prompts)
12271232
run_ids_list = self._get_run_ids_list(run_id, prompts)
1228-
params = self.dict()
1233+
params = self.asdict()
12291234
params["stop"] = stop
12301235
options = {"stop": stop}
12311236
(
@@ -1314,7 +1319,7 @@ def __call__(
13141319
callbacks: Callbacks = None,
13151320
*,
13161321
tags: Optional[list[str]] = None,
1317-
metadata: Optional[dict[str, Any]] = None,
1322+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
13181323
**kwargs: Any,
13191324
) -> str:
13201325
"""Check Cache and run the LLM on the given prompt and input.
@@ -1363,7 +1368,7 @@ async def _call_async(
13631368
callbacks: Callbacks = None,
13641369
*,
13651370
tags: Optional[list[str]] = None,
1366-
metadata: Optional[dict[str, Any]] = None,
1371+
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
13671372
**kwargs: Any,
13681373
) -> str:
13691374
"""Check Cache and run the LLM on the given prompt and input."""
@@ -1431,8 +1436,12 @@ def __str__(self) -> str:
14311436
def _llm_type(self) -> str:
14321437
"""Return type of llm."""
14331438

1439+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
14341440
@override
1435-
def dict(self, **kwargs: Any) -> dict:
1441+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
1442+
return self.asdict()
1443+
1444+
def asdict(self) -> typing.Dict[str, Any]: # noqa: UP006
14361445
"""Return a dictionary of the LLM."""
14371446
starter_dict = dict(self._identifying_params)
14381447
starter_dict["_type"] = self._llm_type
@@ -1461,7 +1470,7 @@ def save(self, file_path: Union[Path, str]) -> None:
14611470
directory_path.mkdir(parents=True, exist_ok=True)
14621471

14631472
# Fetch dictionary to save
1464-
prompt_dict = self.dict()
1473+
prompt_dict = self.asdict()
14651474

14661475
if save_path.suffix == ".json":
14671476
with save_path.open("w", encoding="utf-8") as f:

libs/core/langchain_core/output_parsers/base.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from __future__ import annotations
44

55
import contextlib
6+
import typing
67
from abc import ABC, abstractmethod
78
from typing import (
89
TYPE_CHECKING,
@@ -15,6 +16,7 @@
1516

1617
from typing_extensions import override
1718

19+
from langchain_core._api import deprecated
1820
from langchain_core.language_models import LanguageModelOutput
1921
from langchain_core.messages import AnyMessage, BaseMessage
2022
from langchain_core.outputs import ChatGeneration, Generation
@@ -328,7 +330,12 @@ def _type(self) -> str:
328330
)
329331
raise NotImplementedError(msg)
330332

331-
def dict(self, **kwargs: Any) -> dict:
333+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
334+
@override
335+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
336+
return self.asdict()
337+
338+
def asdict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
332339
"""Return dictionary representation of output parser."""
333340
output_parser_dict = super().model_dump(**kwargs)
334341
with contextlib.suppress(NotImplementedError):

libs/core/langchain_core/prompts/base.py

Lines changed: 30 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from pydantic import BaseModel, ConfigDict, Field, model_validator
2424
from typing_extensions import Self, override
2525

26+
from langchain_core._api import deprecated
2627
from langchain_core.exceptions import ErrorCode, create_message
2728
from langchain_core.load import dumpd
2829
from langchain_core.output_parsers.base import BaseOutputParser
@@ -116,7 +117,7 @@ def is_lc_serializable(cls) -> bool:
116117
)
117118

118119
@cached_property
119-
def _serialized(self) -> dict[str, Any]:
120+
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
120121
return dumpd(self)
121122

122123
@property
@@ -149,7 +150,7 @@ def get_input_schema(
149150
field_definitions={**required_input_variables, **optional_input_variables},
150151
)
151152

152-
def _validate_input(self, inner_input: Any) -> dict:
153+
def _validate_input(self, inner_input: Any) -> typing.Dict: # noqa: UP006
153154
if not isinstance(inner_input, dict):
154155
if len(self.input_variables) == 1:
155156
var_name = self.input_variables[0]
@@ -183,19 +184,26 @@ def _validate_input(self, inner_input: Any) -> dict:
183184
)
184185
return inner_input
185186

186-
def _format_prompt_with_error_handling(self, inner_input: dict) -> PromptValue:
187+
def _format_prompt_with_error_handling(
188+
self,
189+
inner_input: typing.Dict, # noqa: UP006
190+
) -> PromptValue:
187191
inner_input_ = self._validate_input(inner_input)
188192
return self.format_prompt(**inner_input_)
189193

190194
async def _aformat_prompt_with_error_handling(
191-
self, inner_input: dict
195+
self,
196+
inner_input: typing.Dict, # noqa: UP006
192197
) -> PromptValue:
193198
inner_input_ = self._validate_input(inner_input)
194199
return await self.aformat_prompt(**inner_input_)
195200

196201
@override
197202
def invoke(
198-
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
203+
self,
204+
input: typing.Dict, # noqa: UP006
205+
config: Optional[RunnableConfig] = None,
206+
**kwargs: Any,
199207
) -> PromptValue:
200208
"""Invoke the prompt.
201209
@@ -221,7 +229,10 @@ def invoke(
221229

222230
@override
223231
async def ainvoke(
224-
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
232+
self,
233+
input: typing.Dict, # noqa: UP006
234+
config: Optional[RunnableConfig] = None,
235+
**kwargs: Any,
225236
) -> PromptValue:
226237
"""Async invoke the prompt.
227238
@@ -283,7 +294,7 @@ def partial(self, **kwargs: Union[str, Callable[[], str]]) -> BasePromptTemplate
283294
prompt_dict["partial_variables"] = {**self.partial_variables, **kwargs}
284295
return type(self)(**prompt_dict)
285296

286-
def _merge_partial_and_user_variables(self, **kwargs: Any) -> dict[str, Any]:
297+
def _merge_partial_and_user_variables(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
287298
# Get partial params:
288299
partial_kwargs = {
289300
k: v if not callable(v) else v() for k, v in self.partial_variables.items()
@@ -331,14 +342,22 @@ def _prompt_type(self) -> str:
331342
"""Return the prompt type key."""
332343
raise NotImplementedError
333344

334-
def dict(self, **kwargs: Any) -> dict:
345+
@deprecated("0.3.61", alternative="asdict", removal="1.0")
346+
@override
347+
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
348+
return self.asdict(**kwargs)
349+
350+
def asdict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
335351
"""Return dictionary representation of prompt.
336352
337353
Args:
338-
kwargs: Any additional arguments to pass to the dictionary.
354+
**kwargs: Any additional arguments to pass to the dictionary.
339355
340356
Returns:
341-
Dict: Dictionary representation of the prompt.
357+
Dictionary representation of the prompt.
358+
359+
Raises:
360+
NotImplementedError: If the prompt type is not implemented.
342361
"""
343362
prompt_dict = super().model_dump(**kwargs)
344363
with contextlib.suppress(NotImplementedError):
@@ -367,7 +386,7 @@ def save(self, file_path: Union[Path, str]) -> None:
367386
raise ValueError(msg)
368387

369388
# Fetch dictionary to save
370-
prompt_dict = self.dict()
389+
prompt_dict = self.asdict()
371390
if "_type" not in prompt_dict:
372391
msg = f"Prompt {self} does not support saving."
373392
raise NotImplementedError(msg)

0 commit comments

Comments
 (0)