Skip to content
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
06c428d
SDK: normalize LLM errors for clients\n\n- Add typed SDK exceptions: …
enyst Oct 31, 2025
69e2879
SDK: cleanup unused import in _map_exception\n\nCo-authored-by: openh…
enyst Oct 31, 2025
074b0af
Merge branch 'main' into sdk/error-normalization-context-auth
enyst Nov 2, 2025
5672b8c
chore: satisfy pre-commit (ruff lint/format) and fix long-line commen…
enyst Nov 2, 2025
7938e86
chore: format after pre-commit hooks
enyst Nov 2, 2025
35b0b36
feat(sdk): treat SDK-raised LLMContextWindowExceedError as context-ex…
enyst Nov 2, 2025
ba8c74d
merge exception submodule refactor into existing branch
enyst Nov 2, 2025
fb86cba
sdk: correct context-window exceeded guidance and tests; simplify aut…
enyst Nov 2, 2025
69c5c32
tests: add classifier/mapping tests; wrap long comment; keep hooks pa…
enyst Nov 2, 2025
df40b40
sdk: preserve APIConnectionError for retry tests; map only ServiceUna…
enyst Nov 2, 2025
99c856c
sdk: map APIConnectionError to LLMServiceUnavailableError; update ret…
enyst Nov 2, 2025
b4fcfee
tests: dedup context-window exceeded mapping test in exception_mapping
enyst Nov 2, 2025
70e64af
tests: move LLMServiceUnavailableError import to module top and asser…
enyst Nov 2, 2025
e7293b6
agent: remove redundant exception mapping; rely on LLM to map provide…
enyst Nov 2, 2025
aaa9590
llm: use explicit names for exception helpers; remove abbreviations
enyst Nov 2, 2025
d68e0a2
llm: remove _map_exception wrapper; call map_provider_exception direc…
enyst Nov 2, 2025
b36a68b
agent: catch LLMContextWindowExceedError directly; remove LLM.is_cont…
enyst Nov 3, 2025
d97a1a7
style: shorten test name to satisfy ruff E501; pre-commit fixes\n\nCo…
enyst Nov 3, 2025
23e5631
agent: remove redundant catch-and-map; rely on LLM to map provider er…
enyst Nov 3, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 7 additions & 5 deletions openhands-sdk/openhands/sdk/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ def step(
on_event(error_message)
return
except Exception as e:
# If there is a condenser registered and the exception is a context window
# exceeded, we can recover by triggering a condensation request.
# If condenser is available and error is context window exceeded, trigger
# condensation
if (
self.condenser is not None
and self.condenser.handles_condensation_requests()
Expand All @@ -181,9 +181,11 @@ def step(
)
on_event(CondensationRequest())
return
# If the error isn't recoverable, keep propagating it up the stack.
else:
raise e
# Otherwise, map to SDK-typed errors and rethrow for clients
mapped = self.llm._map_exception(e)
if mapped is not e:
raise mapped from e
raise

# LLMResponse already contains the converted message and metrics snapshot
message: Message = llm_response.message
Expand Down
110 changes: 0 additions & 110 deletions openhands-sdk/openhands/sdk/llm/exceptions.py

This file was deleted.

45 changes: 45 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from .classifier import is_context_window_exceeded, looks_like_auth_error
from .mapping import map_provider_exception
from .types import (
FunctionCallConversionError,
FunctionCallNotExistsError,
FunctionCallValidationError,
LLMAuthenticationError,
LLMBadRequestError,
LLMContextWindowExceedError,
LLMError,
LLMMalformedActionError,
LLMNoActionError,
LLMNoResponseError,
LLMRateLimitError,
LLMResponseError,
LLMServiceUnavailableError,
LLMTimeoutError,
OperationCancelled,
UserCancelledError,
)


__all__ = [
# Types
"LLMError",
"LLMMalformedActionError",
"LLMNoActionError",
"LLMResponseError",
"FunctionCallConversionError",
"FunctionCallValidationError",
"FunctionCallNotExistsError",
"LLMNoResponseError",
"LLMContextWindowExceedError",
"LLMAuthenticationError",
"LLMRateLimitError",
"LLMTimeoutError",
"LLMServiceUnavailableError",
"LLMBadRequestError",
"UserCancelledError",
"OperationCancelled",
# Helpers
"is_context_window_exceeded",
"looks_like_auth_error",
"map_provider_exception",
]
49 changes: 49 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/classifier.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from __future__ import annotations

from litellm.exceptions import BadRequestError, ContextWindowExceededError, OpenAIError

from .types import LLMContextWindowExceedError


# Minimal, provider-agnostic context-window detection
LONG_PROMPT_PATTERNS: list[str] = [
"contextwindowexceedederror",
"prompt is too long",
"input length and `max_tokens` exceed context limit",
"please reduce the length of",
"the request exceeds the available context size",
"context length exceeded",
]


def is_context_window_exceeded(exception: Exception) -> bool:
if isinstance(exception, (ContextWindowExceededError, LLMContextWindowExceedError)):
return True

if not isinstance(exception, (BadRequestError, OpenAIError)):
return False

s = str(exception).lower()
return any(p in s for p in LONG_PROMPT_PATTERNS)


AUTH_PATTERNS: list[str] = [
"invalid api key",
"unauthorized",
"missing api key",
"invalid authentication",
"access denied",
]


def looks_like_auth_error(exception: Exception) -> bool:
if not isinstance(exception, (BadRequestError, OpenAIError)):
return False
s = str(exception).lower()
if any(p in s for p in AUTH_PATTERNS):
return True
# Some providers include explicit status codes in message text
for code in ("status 401", "status 403"):
if code in s:
return True
return False
54 changes: 54 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/mapping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
from __future__ import annotations

from litellm.exceptions import (
APIConnectionError,
BadRequestError,
InternalServerError,
RateLimitError,
ServiceUnavailableError,
Timeout as LiteLLMTimeout,
)

from .classifier import is_context_window_exceeded, looks_like_auth_error
from .types import (
LLMAuthenticationError,
LLMBadRequestError,
LLMContextWindowExceedError,
LLMRateLimitError,
LLMServiceUnavailableError,
LLMTimeoutError,
)


def map_provider_exception(exception: Exception) -> Exception:
"""
Map provider/LiteLLM exceptions to SDK-typed exceptions.

Returns original exception if no mapping applies.
"""
# Context window exceeded first (highest priority)
if is_context_window_exceeded(exception):
return LLMContextWindowExceedError(str(exception))

# Auth-like errors often appear as BadRequest/OpenAIError with specific text
if looks_like_auth_error(exception):
return LLMAuthenticationError(str(exception))

if isinstance(exception, RateLimitError):
return LLMRateLimitError(str(exception))

if isinstance(exception, LiteLLMTimeout):
return LLMTimeoutError(str(exception))

# Connectivity and service-side availability issues → service unavailable
if isinstance(
exception, (APIConnectionError, ServiceUnavailableError, InternalServerError)
):
return LLMServiceUnavailableError(str(exception))

# Generic client-side 4xx errors
if isinstance(exception, BadRequestError):
return LLMBadRequestError(str(exception))

# Unknown: let caller re-raise original
return exception
101 changes: 101 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
class LLMError(Exception):
message: str

def __init__(self, message: str) -> None:
super().__init__(message)
self.message = message

def __str__(self) -> str:
return self.message


# General response parsing/validation errors
class LLMMalformedActionError(LLMError):
def __init__(self, message: str = "Malformed response") -> None:
super().__init__(message)


class LLMNoActionError(LLMError):
def __init__(self, message: str = "Agent must return an action") -> None:
super().__init__(message)


class LLMResponseError(LLMError):
def __init__(
self, message: str = "Failed to retrieve action from LLM response"
) -> None:
super().__init__(message)


# Function-calling conversion/validation
class FunctionCallConversionError(LLMError):
def __init__(self, message: str) -> None:
super().__init__(message)


class FunctionCallValidationError(LLMError):
def __init__(self, message: str) -> None:
super().__init__(message)


class FunctionCallNotExistsError(LLMError):
def __init__(self, message: str) -> None:
super().__init__(message)


# Provider/transport related
class LLMNoResponseError(LLMError):
def __init__(
self,
message: str = (
"LLM did not return a response. This is only seen in Gemini models so far."
),
) -> None:
super().__init__(message)


class LLMContextWindowExceedError(LLMError):
def __init__(
self,
message: str = (
"Conversation history longer than LLM context window limit. "
"Consider enabling a condenser or shortening inputs."
),
) -> None:
super().__init__(message)


class LLMAuthenticationError(LLMError):
def __init__(self, message: str = "Invalid or missing API credentials") -> None:
super().__init__(message)


class LLMRateLimitError(LLMError):
def __init__(self, message: str = "Rate limit exceeded") -> None:
super().__init__(message)


class LLMTimeoutError(LLMError):
def __init__(self, message: str = "LLM request timed out") -> None:
super().__init__(message)


class LLMServiceUnavailableError(LLMError):
def __init__(self, message: str = "LLM service unavailable") -> None:
super().__init__(message)


class LLMBadRequestError(LLMError):
def __init__(self, message: str = "Bad request to LLM provider") -> None:
super().__init__(message)


# Other
class UserCancelledError(Exception):
def __init__(self, message: str = "User cancelled the request") -> None:
super().__init__(message)


class OperationCancelled(Exception):
def __init__(self, message: str = "Operation was cancelled") -> None:
super().__init__(message)
Loading
Loading