Skip to content

Commit 21ee441

Browse files
committed
Use builtins.dict instead of typing.Dict
1 parent 3dba8d5 commit 21ee441

File tree

4 files changed

+39
-39
lines changed

4 files changed

+39
-39
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
from __future__ import annotations
44

55
import asyncio
6+
import builtins
67
import inspect
78
import json
8-
import typing
99
import warnings
1010
from abc import ABC, abstractmethod
1111
from collections.abc import AsyncIterator, Iterator, Sequence
@@ -373,7 +373,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
373373

374374
@model_validator(mode="before")
375375
@classmethod
376-
def raise_deprecation(cls, values: typing.Dict) -> Any: # noqa: UP006
376+
def raise_deprecation(cls, values: builtins.dict) -> Any:
377377
"""Emit deprecation warning if ``callback_manager`` is used.
378378
379379
Args:
@@ -397,7 +397,7 @@ def raise_deprecation(cls, values: typing.Dict) -> Any: # noqa: UP006
397397
)
398398

399399
@cached_property
400-
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
400+
def _serialized(self) -> builtins.dict[str, Any]:
401401
return dumpd(self)
402402

403403
# --- Runnable methods ---
@@ -747,8 +747,8 @@ async def astream(
747747

748748
def _combine_llm_outputs(
749749
self,
750-
llm_outputs: list[Optional[typing.Dict]], # noqa: ARG002, UP006
751-
) -> typing.Dict: # noqa: UP006
750+
llm_outputs: list[Optional[builtins.dict]], # noqa: ARG002
751+
) -> builtins.dict:
752752
return {}
753753

754754
def _convert_cached_generations(self, cache_val: list) -> list[ChatGeneration]:
@@ -794,7 +794,7 @@ def _get_invocation_params(
794794
self,
795795
stop: Optional[list[str]] = None,
796796
**kwargs: Any,
797-
) -> typing.Dict: # noqa: UP006
797+
) -> builtins.dict:
798798
params = self.asdict()
799799
params["stop"] = stop
800800
return {**params, **kwargs}
@@ -858,7 +858,7 @@ def generate(
858858
callbacks: Callbacks = None,
859859
*,
860860
tags: Optional[list[str]] = None,
861-
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
861+
metadata: Optional[builtins.dict[str, Any]] = None,
862862
run_name: Optional[str] = None,
863863
run_id: Optional[uuid.UUID] = None,
864864
**kwargs: Any,
@@ -974,7 +974,7 @@ async def agenerate(
974974
callbacks: Callbacks = None,
975975
*,
976976
tags: Optional[list[str]] = None,
977-
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
977+
metadata: Optional[builtins.dict[str, Any]] = None,
978978
run_name: Optional[str] = None,
979979
run_id: Optional[uuid.UUID] = None,
980980
**kwargs: Any,
@@ -1596,20 +1596,18 @@ def _llm_type(self) -> str:
15961596

15971597
@deprecated("0.3.61", alternative="asdict", removal="1.0")
15981598
@override
1599-
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
1599+
def dict(self, **kwargs: Any) -> builtins.dict[str, Any]:
16001600
return self.asdict()
16011601

1602-
def asdict(self) -> typing.Dict[str, Any]: # noqa: UP006
1602+
def asdict(self) -> builtins.dict[str, Any]:
16031603
"""Return a dictionary of the LLM."""
16041604
starter_dict = dict(self._identifying_params)
16051605
starter_dict["_type"] = self._llm_type
16061606
return starter_dict
16071607

16081608
def bind_tools(
16091609
self,
1610-
tools: Sequence[
1611-
Union[typing.Dict[str, Any], type, Callable, BaseTool] # noqa: UP006
1612-
],
1610+
tools: Sequence[Union[builtins.dict[str, Any], type, Callable, BaseTool]],
16131611
*,
16141612
tool_choice: Optional[Union[str]] = None,
16151613
**kwargs: Any,
@@ -1628,11 +1626,11 @@ def bind_tools(
16281626

16291627
def with_structured_output(
16301628
self,
1631-
schema: Union[typing.Dict, type], # noqa: UP006
1629+
schema: Union[builtins.dict, type],
16321630
*,
16331631
include_raw: bool = False,
16341632
**kwargs: Any,
1635-
) -> Runnable[LanguageModelInput, Union[typing.Dict, BaseModel]]: # noqa: UP006
1633+
) -> Runnable[LanguageModelInput, Union[builtins.dict, BaseModel]]:
16361634
"""Model wrapper that returns outputs formatted to match the given schema.
16371635
16381636
Args:

libs/core/langchain_core/language_models/llms.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@
33
from __future__ import annotations
44

55
import asyncio
6+
import builtins
67
import functools
78
import inspect
89
import json
910
import logging
10-
import typing
1111
import warnings
1212
from abc import ABC, abstractmethod
1313
from collections.abc import AsyncIterator, Iterator, Sequence
@@ -306,7 +306,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
306306

307307
@model_validator(mode="before")
308308
@classmethod
309-
def raise_deprecation(cls, values: typing.Dict[str, Any]) -> Any: # noqa: UP006
309+
def raise_deprecation(cls, values: builtins.dict[str, Any]) -> Any:
310310
"""Raise deprecation warning if callback_manager is used."""
311311
if values.get("callback_manager") is not None:
312312
warnings.warn(
@@ -318,7 +318,7 @@ def raise_deprecation(cls, values: typing.Dict[str, Any]) -> Any: # noqa: UP006
318318
return values
319319

320320
@functools.cached_property
321-
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
321+
def _serialized(self) -> builtins.dict[str, Any]:
322322
return dumpd(self)
323323

324324
# --- Runnable methods ---
@@ -844,7 +844,7 @@ def generate(
844844
*,
845845
tags: Optional[Union[list[str], list[list[str]]]] = None,
846846
metadata: Optional[
847-
Union[typing.Dict[str, Any], list[typing.Dict[str, Any]]] # noqa: UP006
847+
Union[builtins.dict[str, Any], list[builtins.dict[str, Any]]]
848848
] = None,
849849
run_name: Optional[Union[str, list[str]]] = None,
850850
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
@@ -1112,7 +1112,7 @@ async def agenerate(
11121112
*,
11131113
tags: Optional[Union[list[str], list[list[str]]]] = None,
11141114
metadata: Optional[
1115-
Union[typing.Dict[str, Any], list[typing.Dict[str, Any]]] # noqa: UP006
1115+
Union[builtins.dict[str, Any], list[builtins.dict[str, Any]]]
11161116
] = None,
11171117
run_name: Optional[Union[str, list[str]]] = None,
11181118
run_id: Optional[Union[uuid.UUID, list[Optional[uuid.UUID]]]] = None,
@@ -1319,7 +1319,7 @@ def __call__(
13191319
callbacks: Callbacks = None,
13201320
*,
13211321
tags: Optional[list[str]] = None,
1322-
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
1322+
metadata: Optional[builtins.dict[str, Any]] = None,
13231323
**kwargs: Any,
13241324
) -> str:
13251325
"""Check Cache and run the LLM on the given prompt and input.
@@ -1368,7 +1368,7 @@ async def _call_async(
13681368
callbacks: Callbacks = None,
13691369
*,
13701370
tags: Optional[list[str]] = None,
1371-
metadata: Optional[typing.Dict[str, Any]] = None, # noqa: UP006
1371+
metadata: Optional[builtins.dict[str, Any]] = None,
13721372
**kwargs: Any,
13731373
) -> str:
13741374
"""Check Cache and run the LLM on the given prompt and input."""
@@ -1438,10 +1438,10 @@ def _llm_type(self) -> str:
14381438

14391439
@deprecated("0.3.61", alternative="asdict", removal="1.0")
14401440
@override
1441-
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
1441+
def dict(self, **kwargs: Any) -> builtins.dict[str, Any]:
14421442
return self.asdict()
14431443

1444-
def asdict(self) -> typing.Dict[str, Any]: # noqa: UP006
1444+
def asdict(self) -> builtins.dict[str, Any]:
14451445
"""Return a dictionary of the LLM."""
14461446
starter_dict = dict(self._identifying_params)
14471447
starter_dict["_type"] = self._llm_type

libs/core/langchain_core/output_parsers/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
from __future__ import annotations
44

5+
import builtins
56
import contextlib
6-
import typing
77
from abc import ABC, abstractmethod
88
from typing import (
99
TYPE_CHECKING,
@@ -332,10 +332,10 @@ def _type(self) -> str:
332332

333333
@deprecated("0.3.61", alternative="asdict", removal="1.0")
334334
@override
335-
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
335+
def dict(self, **kwargs: Any) -> builtins.dict[str, Any]:
336336
return self.asdict()
337337

338-
def asdict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
338+
def asdict(self, **kwargs: Any) -> builtins.dict[str, Any]:
339339
"""Return dictionary representation of output parser."""
340340
output_parser_dict = super().model_dump(**kwargs)
341341
with contextlib.suppress(NotImplementedError):

libs/core/langchain_core/prompts/base.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22

33
from __future__ import annotations
44

5+
import builtins
56
import contextlib
67
import json
7-
import typing
88
from abc import ABC, abstractmethod
99
from collections.abc import Mapping
1010
from functools import cached_property
@@ -55,7 +55,7 @@ class BasePromptTemplate(
5555
"""optional_variables: A list of the names of the variables for placeholder
5656
or MessagePlaceholder that are optional. These variables are auto inferred
5757
from the prompt and user need not provide them."""
58-
input_types: typing.Dict[str, Any] = Field(default_factory=dict, exclude=True) # noqa: UP006
58+
input_types: builtins.dict[str, Any] = Field(default_factory=dict, exclude=True)
5959
"""A dictionary of the types of the variables the prompt template expects.
6060
If not provided, all variables are assumed to be strings."""
6161
output_parser: Optional[BaseOutputParser] = None
@@ -65,7 +65,7 @@ class BasePromptTemplate(
6565
6666
Partial variables populate the template so that you don't need to
6767
pass them in every time you call the prompt."""
68-
metadata: Optional[typing.Dict[str, Any]] = None # noqa: UP006
68+
metadata: Optional[builtins.dict[str, Any]] = None
6969
"""Metadata to be used for tracing."""
7070
tags: Optional[list[str]] = None
7171
"""Tags to be used for tracing."""
@@ -117,7 +117,7 @@ def is_lc_serializable(cls) -> bool:
117117
)
118118

119119
@cached_property
120-
def _serialized(self) -> typing.Dict[str, Any]: # noqa: UP006
120+
def _serialized(self) -> builtins.dict[str, Any]:
121121
return dumpd(self)
122122

123123
@property
@@ -150,7 +150,7 @@ def get_input_schema(
150150
field_definitions={**required_input_variables, **optional_input_variables},
151151
)
152152

153-
def _validate_input(self, inner_input: Any) -> typing.Dict: # noqa: UP006
153+
def _validate_input(self, inner_input: Any) -> builtins.dict:
154154
if not isinstance(inner_input, dict):
155155
if len(self.input_variables) == 1:
156156
var_name = self.input_variables[0]
@@ -186,22 +186,22 @@ def _validate_input(self, inner_input: Any) -> typing.Dict: # noqa: UP006
186186

187187
def _format_prompt_with_error_handling(
188188
self,
189-
inner_input: typing.Dict, # noqa: UP006
189+
inner_input: builtins.dict,
190190
) -> PromptValue:
191191
inner_input_ = self._validate_input(inner_input)
192192
return self.format_prompt(**inner_input_)
193193

194194
async def _aformat_prompt_with_error_handling(
195195
self,
196-
inner_input: typing.Dict, # noqa: UP006
196+
inner_input: builtins.dict,
197197
) -> PromptValue:
198198
inner_input_ = self._validate_input(inner_input)
199199
return await self.aformat_prompt(**inner_input_)
200200

201201
@override
202202
def invoke(
203203
self,
204-
input: typing.Dict, # noqa: UP006
204+
input: builtins.dict,
205205
config: Optional[RunnableConfig] = None,
206206
**kwargs: Any,
207207
) -> PromptValue:
@@ -230,7 +230,7 @@ def invoke(
230230
@override
231231
async def ainvoke(
232232
self,
233-
input: typing.Dict, # noqa: UP006
233+
input: builtins.dict,
234234
config: Optional[RunnableConfig] = None,
235235
**kwargs: Any,
236236
) -> PromptValue:
@@ -294,7 +294,9 @@ def partial(self, **kwargs: Union[str, Callable[[], str]]) -> BasePromptTemplate
294294
prompt_dict["partial_variables"] = {**self.partial_variables, **kwargs}
295295
return type(self)(**prompt_dict)
296296

297-
def _merge_partial_and_user_variables(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
297+
def _merge_partial_and_user_variables(
298+
self, **kwargs: Any
299+
) -> builtins.dict[str, Any]:
298300
# Get partial params:
299301
partial_kwargs = {
300302
k: v if not callable(v) else v() for k, v in self.partial_variables.items()
@@ -344,10 +346,10 @@ def _prompt_type(self) -> str:
344346

345347
@deprecated("0.3.61", alternative="asdict", removal="1.0")
346348
@override
347-
def dict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
349+
def dict(self, **kwargs: Any) -> builtins.dict[str, Any]:
348350
return self.asdict(**kwargs)
349351

350-
def asdict(self, **kwargs: Any) -> typing.Dict[str, Any]: # noqa: UP006
352+
def asdict(self, **kwargs: Any) -> builtins.dict[str, Any]:
351353
"""Return dictionary representation of prompt.
352354
353355
Args:

0 commit comments

Comments
 (0)