mirror of
https://github.com/we-promise/sure.git
synced 2026-05-07 12:54:04 +00:00
fix: Display helpful error when LLM model lacks function calling support
When users configure an OpenAI-compatible provider (like OpenRouter) with a model that doesn't support function calling, they previously saw only a generic "404" error. This made troubleshooting difficult. Changes: - Add FunctionCallingNotSupportedError class with a clear, actionable message - Detect 404 errors and tool-related error messages when using custom providers - Update error partial to display the actual error message instead of generic text - Add i18n support for the error message Fixes #830 https://claude.ai/code/session_01EpuAVyy5qRV4hYjwPELff4
This commit is contained in:
@@ -4,6 +4,18 @@ class Provider::Openai < Provider
|
||||
# Subclass so errors caught in this provider are raised as Provider::Openai::Error
|
||||
Error = Class.new(Provider::Error)
|
||||
|
||||
# Specific error for models that don't support function calling
|
||||
class FunctionCallingNotSupportedError < Error
|
||||
def initialize(model:, provider_url:)
|
||||
message = I18n.t(
|
||||
"errors.llm.function_calling_not_supported",
|
||||
model: model,
|
||||
provider_url: provider_url
|
||||
)
|
||||
super(message)
|
||||
end
|
||||
end
|
||||
|
||||
# Supported OpenAI model prefixes (e.g., "gpt-4" matches "gpt-4", "gpt-4.1", "gpt-4-turbo", etc.)
|
||||
DEFAULT_OPENAI_MODEL_PREFIXES = %w[gpt-4 gpt-5 o1 o3]
|
||||
DEFAULT_MODEL = "gpt-4.1"
|
||||
@@ -318,11 +330,37 @@ class Provider::Openai < Provider
|
||||
user_identifier: user_identifier
|
||||
)
|
||||
record_llm_usage(family: family, model: model, operation: "chat", error: e)
|
||||
|
||||
# Detect function calling not supported error (404 with tools)
|
||||
if tools.present? && function_calling_not_supported_error?(e)
|
||||
raise FunctionCallingNotSupportedError.new(model: model, provider_url: @uri_base)
|
||||
end
|
||||
|
||||
raise
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def function_calling_not_supported_error?(error)
|
||||
# Check if this is a 404 error which often indicates the endpoint doesn't support tools
|
||||
# Different providers may return different error formats
|
||||
http_status = extract_http_status_code(error)
|
||||
return true if http_status == 404
|
||||
|
||||
# Also check error message for common tool-related error patterns
|
||||
error_message = error.message.to_s.downcase
|
||||
tool_error_patterns = [
|
||||
"tools",
|
||||
"tool_choice",
|
||||
"function",
|
||||
"does not support",
|
||||
"not supported",
|
||||
"invalid parameter",
|
||||
"unknown parameter"
|
||||
]
|
||||
tool_error_patterns.any? { |pattern| error_message.include?(pattern) }
|
||||
end
|
||||
|
||||
def build_generic_messages(prompt:, instructions: nil, function_results: [])
|
||||
messages = []
|
||||
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
<%# locals: (chat:) %>
|
||||
|
||||
<%
|
||||
# Try to extract a meaningful error message
|
||||
error_message = nil
|
||||
begin
|
||||
if chat.error.present?
|
||||
parsed = JSON.parse(chat.error) rescue nil
|
||||
if parsed.is_a?(Hash) && parsed["message"].present?
|
||||
error_message = parsed["message"]
|
||||
end
|
||||
end
|
||||
rescue
|
||||
# Fall back to generic message
|
||||
end
|
||||
%>
|
||||
|
||||
<div id="chat-error" class="px-3 py-2 bg-red-100 border border-red-500 rounded-lg">
|
||||
<% if chat.debug_mode? %>
|
||||
<div class="overflow-x-auto text-xs p-4 bg-red-200 rounded-md mb-2">
|
||||
@@ -7,8 +22,12 @@
|
||||
</div>
|
||||
<% end %>
|
||||
|
||||
<div class="flex items-center justify-between gap-2">
|
||||
<p class="text-xs text-red-500">Failed to generate response. Please try again.</p>
|
||||
<div class="flex flex-col gap-2">
|
||||
<% if error_message.present? %>
|
||||
<p class="text-xs text-red-500"><%= error_message %></p>
|
||||
<% else %>
|
||||
<p class="text-xs text-red-500">Failed to generate response. Please try again.</p>
|
||||
<% end %>
|
||||
|
||||
<%= render DS::Button.new(
|
||||
text: "Retry",
|
||||
|
||||
@@ -3,3 +3,6 @@ en:
|
||||
chats:
|
||||
demo_banner_title: "Demo Mode Active"
|
||||
demo_banner_message: "You are using an open-weights Qwen3 LLM with credits provided by Cloudflare Workers AI. Result may vary since the codebase was mostly tested on `gpt-4.1` but your tokens don't go anywhere else to be trained with! 🤖"
|
||||
errors:
|
||||
llm:
|
||||
function_calling_not_supported: "The model '%{model}' does not appear to support function calling (tools). The AI assistant requires a model that supports function calling to retrieve your financial data. Please select a different model that supports this feature in your AI settings, or contact your provider (%{provider_url}) to verify model capabilities."
|
||||
|
||||
@@ -286,4 +286,72 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
|
||||
|
||||
assert_equal "configured model: custom-model", custom_provider.supported_models_description
|
||||
end
|
||||
|
||||
test "FunctionCallingNotSupportedError provides helpful message" do
|
||||
error = Provider::Openai::FunctionCallingNotSupportedError.new(
|
||||
model: "test-model",
|
||||
provider_url: "https://openrouter.ai/api/v1"
|
||||
)
|
||||
|
||||
assert_includes error.message, "test-model"
|
||||
assert_includes error.message, "function calling"
|
||||
assert_includes error.message, "openrouter.ai"
|
||||
end
|
||||
|
||||
test "detects 404 error as function calling not supported" do
|
||||
custom_provider = Provider::Openai.new(
|
||||
"test-token",
|
||||
uri_base: "https://openrouter.ai/api/v1",
|
||||
model: "some-model-without-tools"
|
||||
)
|
||||
|
||||
# Create a mock error with 404 status
|
||||
mock_error = OpenStruct.new(
|
||||
message: "404 Not Found",
|
||||
http_status: 404
|
||||
)
|
||||
|
||||
assert custom_provider.send(:function_calling_not_supported_error?, mock_error)
|
||||
end
|
||||
|
||||
test "detects tool-related error messages as function calling not supported" do
|
||||
custom_provider = Provider::Openai.new(
|
||||
"test-token",
|
||||
uri_base: "https://openrouter.ai/api/v1",
|
||||
model: "some-model-without-tools"
|
||||
)
|
||||
|
||||
# Test various error message patterns
|
||||
tool_errors = [
|
||||
OpenStruct.new(message: "tools parameter is not supported", http_status: nil),
|
||||
OpenStruct.new(message: "Invalid parameter: tool_choice", http_status: nil),
|
||||
OpenStruct.new(message: "function calling does not support this model", http_status: nil),
|
||||
OpenStruct.new(message: "This model does not support tools", http_status: nil)
|
||||
]
|
||||
|
||||
tool_errors.each do |error|
|
||||
assert custom_provider.send(:function_calling_not_supported_error?, error),
|
||||
"Expected '#{error.message}' to be detected as function calling not supported"
|
||||
end
|
||||
end
|
||||
|
||||
test "does not flag unrelated errors as function calling not supported" do
|
||||
custom_provider = Provider::Openai.new(
|
||||
"test-token",
|
||||
uri_base: "https://openrouter.ai/api/v1",
|
||||
model: "some-model"
|
||||
)
|
||||
|
||||
# Test unrelated error messages
|
||||
unrelated_errors = [
|
||||
OpenStruct.new(message: "Rate limit exceeded", http_status: 429),
|
||||
OpenStruct.new(message: "Internal server error", http_status: 500),
|
||||
OpenStruct.new(message: "Invalid API key", http_status: 401)
|
||||
]
|
||||
|
||||
unrelated_errors.each do |error|
|
||||
assert_not custom_provider.send(:function_calling_not_supported_error?, error),
|
||||
"Did not expect '#{error.message}' to be detected as function calling not supported"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user