Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
06c428d
SDK: normalize LLM errors for clients\n\n- Add typed SDK exceptions: …
enyst Oct 31, 2025
69e2879
SDK: cleanup unused import in _map_exception\n\nCo-authored-by: openh…
enyst Oct 31, 2025
074b0af
Merge branch 'main' into sdk/error-normalization-context-auth
enyst Nov 2, 2025
5672b8c
chore: satisfy pre-commit (ruff lint/format) and fix long-line commen…
enyst Nov 2, 2025
7938e86
chore: format after pre-commit hooks
enyst Nov 2, 2025
35b0b36
feat(sdk): treat SDK-raised LLMContextWindowExceedError as context-ex…
enyst Nov 2, 2025
ba8c74d
merge exception submodule refactor into existing branch
enyst Nov 2, 2025
fb86cba
sdk: correct context-window exceeded guidance and tests; simplify aut…
enyst Nov 2, 2025
69c5c32
tests: add classifier/mapping tests; wrap long comment; keep hooks pa…
enyst Nov 2, 2025
df40b40
sdk: preserve APIConnectionError for retry tests; map only ServiceUna…
enyst Nov 2, 2025
99c856c
sdk: map APIConnectionError to LLMServiceUnavailableError; update ret…
enyst Nov 2, 2025
b4fcfee
tests: dedup context-window exceeded mapping test in exception_mapping
enyst Nov 2, 2025
70e64af
tests: move LLMServiceUnavailableError import to module top and asser…
enyst Nov 2, 2025
e7293b6
agent: remove redundant exception mapping; rely on LLM to map provide…
enyst Nov 2, 2025
aaa9590
llm: use explicit names for exception helpers; remove abbreviations
enyst Nov 2, 2025
d68e0a2
llm: remove _map_exception wrapper; call map_provider_exception direc…
enyst Nov 2, 2025
b36a68b
agent: catch LLMContextWindowExceedError directly; remove LLM.is_cont…
enyst Nov 3, 2025
d97a1a7
style: shorten test name to satisfy ruff E501; pre-commit fixes\n\nCo…
enyst Nov 3, 2025
23e5631
agent: remove redundant catch-and-map; rely on LLM to map provider er…
enyst Nov 3, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions openhands-sdk/openhands/sdk/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,10 @@
TextContent,
ThinkingBlock,
)
from openhands.sdk.llm.exceptions import FunctionCallValidationError
from openhands.sdk.llm.exceptions import (
FunctionCallValidationError,
LLMContextWindowExceedError,
)
from openhands.sdk.logger import get_logger
from openhands.sdk.security.confirmation_policy import NeverConfirm
from openhands.sdk.security.llm_analyzer import LLMSecurityAnalyzer
Expand Down Expand Up @@ -168,22 +171,19 @@ def step(
)
on_event(error_message)
return
except Exception as e:
# If there is a condenser registered and the exception is a context window
# exceeded, we can recover by triggering a condensation request.
except LLMContextWindowExceedError:
# If condenser is available and handles requests, trigger condensation
if (
self.condenser is not None
and self.condenser.handles_condensation_requests()
and self.llm.is_context_window_exceeded_exception(e)
):
logger.warning(
"LLM raised context window exceeded error, triggering condensation"
)
on_event(CondensationRequest())
return
# If the error isn't recoverable, keep propagating it up the stack.
else:
raise e
# No condenser available; re-raise for client handling
raise

# LLMResponse already contains the converted message and metrics snapshot
message: Message = llm_response.message
Expand Down
110 changes: 0 additions & 110 deletions openhands-sdk/openhands/sdk/llm/exceptions.py

This file was deleted.

45 changes: 45 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from .classifier import is_context_window_exceeded, looks_like_auth_error
from .mapping import map_provider_exception
from .types import (
FunctionCallConversionError,
FunctionCallNotExistsError,
FunctionCallValidationError,
LLMAuthenticationError,
LLMBadRequestError,
LLMContextWindowExceedError,
LLMError,
LLMMalformedActionError,
LLMNoActionError,
LLMNoResponseError,
LLMRateLimitError,
LLMResponseError,
LLMServiceUnavailableError,
LLMTimeoutError,
OperationCancelled,
UserCancelledError,
)


__all__ = [
# Types
"LLMError",
"LLMMalformedActionError",
"LLMNoActionError",
"LLMResponseError",
"FunctionCallConversionError",
"FunctionCallValidationError",
"FunctionCallNotExistsError",
"LLMNoResponseError",
"LLMContextWindowExceedError",
"LLMAuthenticationError",
"LLMRateLimitError",
"LLMTimeoutError",
"LLMServiceUnavailableError",
"LLMBadRequestError",
"UserCancelledError",
"OperationCancelled",
# Helpers
"is_context_window_exceeded",
"looks_like_auth_error",
"map_provider_exception",
]
49 changes: 49 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/classifier.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from __future__ import annotations

from litellm.exceptions import BadRequestError, ContextWindowExceededError, OpenAIError

from .types import LLMContextWindowExceedError


# Minimal, provider-agnostic context-window detection
LONG_PROMPT_PATTERNS: list[str] = [
"contextwindowexceedederror",
"prompt is too long",
"input length and `max_tokens` exceed context limit",
"please reduce the length of",
"the request exceeds the available context size",
"context length exceeded",
]


def is_context_window_exceeded(exception: Exception) -> bool:
if isinstance(exception, (ContextWindowExceededError, LLMContextWindowExceedError)):
return True

if not isinstance(exception, (BadRequestError, OpenAIError)):
return False

s = str(exception).lower()
return any(p in s for p in LONG_PROMPT_PATTERNS)


AUTH_PATTERNS: list[str] = [
"invalid api key",
"unauthorized",
"missing api key",
"invalid authentication",
"access denied",
]


def looks_like_auth_error(exception: Exception) -> bool:
if not isinstance(exception, (BadRequestError, OpenAIError)):
return False
s = str(exception).lower()
if any(p in s for p in AUTH_PATTERNS):
return True
# Some providers include explicit status codes in message text
for code in ("status 401", "status 403"):
if code in s:
return True
return False
54 changes: 54 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/mapping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
from __future__ import annotations

from litellm.exceptions import (
APIConnectionError,
BadRequestError,
InternalServerError,
RateLimitError,
ServiceUnavailableError,
Timeout as LiteLLMTimeout,
)

from .classifier import is_context_window_exceeded, looks_like_auth_error
from .types import (
LLMAuthenticationError,
LLMBadRequestError,
LLMContextWindowExceedError,
LLMRateLimitError,
LLMServiceUnavailableError,
LLMTimeoutError,
)


def map_provider_exception(exception: Exception) -> Exception:
"""
Map provider/LiteLLM exceptions to SDK-typed exceptions.

Returns original exception if no mapping applies.
"""
# Context window exceeded first (highest priority)
if is_context_window_exceeded(exception):
return LLMContextWindowExceedError(str(exception))

# Auth-like errors often appear as BadRequest/OpenAIError with specific text
if looks_like_auth_error(exception):
return LLMAuthenticationError(str(exception))

if isinstance(exception, RateLimitError):
return LLMRateLimitError(str(exception))

if isinstance(exception, LiteLLMTimeout):
return LLMTimeoutError(str(exception))

# Connectivity and service-side availability issues → service unavailable
if isinstance(
exception, (APIConnectionError, ServiceUnavailableError, InternalServerError)
):
return LLMServiceUnavailableError(str(exception))

# Generic client-side 4xx errors
if isinstance(exception, BadRequestError):
return LLMBadRequestError(str(exception))

# Unknown: let caller re-raise original
return exception
Loading
Loading