File tree Expand file tree Collapse file tree 2 files changed +17
-1
lines changed
langchain_oci/chat_models Expand file tree Collapse file tree 2 files changed +17
-1
lines changed Original file line number Diff line number Diff line change @@ -30,7 +30,14 @@ This repository includes two main integration categories:
3030``` python
3131from langchain_oci import ChatOCIGenAI
3232
33- llm = ChatOCIGenAI()
33+ llm = ChatOCIGenAI(
34+ model_id = " MY_MODEL_ID" ,
35+ service_endpoint = " MY_SERVICE_ENDPOINT" ,
36+ compartment_id = " MY_COMPARTMENT_ID" ,
37+ model_kwargs = {" max_tokens" : 1024 }, # Use max_completion_tokens instead of max_tokens for OpenAI models
38+ auth_profile = " MY_AUTH_PROFILE" ,
39+ is_stream = True ,
40+ auth_type = " SECURITY_TOKEN"
3441llm.invoke(" Sing a ballad of LangChain." )
3542```
3643
Original file line number Diff line number Diff line change @@ -1058,6 +1058,15 @@ def _prepare_request(
10581058 if stop is not None :
10591059 _model_kwargs [self ._provider .stop_sequence_key ] = stop
10601060
1061+ # Warn if using max_tokens with OpenAI models
1062+ if self .model_id and self .model_id .startswith ("openai." ) and "max_tokens" in _model_kwargs :
1063+ import warnings
1064+ warnings .warn (
1065+ f"OpenAI models require 'max_completion_tokens' instead of 'max_tokens'." ,
1066+ UserWarning ,
1067+ stacklevel = 2
1068+ )
1069+
10611070 chat_params = {** _model_kwargs , ** kwargs , ** oci_params }
10621071
10631072 if not self .model_id :
You can’t perform that action at this time.
0 commit comments