diff --git a/app/models/provider/openai.rb b/app/models/provider/openai.rb index 9ba1d23b0..d4045020a 100644 --- a/app/models/provider/openai.rb +++ b/app/models/provider/openai.rb @@ -4,6 +4,18 @@ class Provider::Openai < Provider # Subclass so errors caught in this provider are raised as Provider::Openai::Error Error = Class.new(Provider::Error) + # Specific error for models that don't support function calling + class FunctionCallingNotSupportedError < Error + def initialize(model:, provider_url:) + message = I18n.t( + "errors.llm.function_calling_not_supported", + model: model, + provider_url: provider_url + ) + super(message) + end + end + # Supported OpenAI model prefixes (e.g., "gpt-4" matches "gpt-4", "gpt-4.1", "gpt-4-turbo", etc.) DEFAULT_OPENAI_MODEL_PREFIXES = %w[gpt-4 gpt-5 o1 o3] DEFAULT_MODEL = "gpt-4.1" @@ -318,11 +330,37 @@ class Provider::Openai < Provider user_identifier: user_identifier ) record_llm_usage(family: family, model: model, operation: "chat", error: e) + + # Detect function calling not supported error (404 with tools) + if tools.present? && function_calling_not_supported_error?(e) + raise FunctionCallingNotSupportedError.new(model: model, provider_url: @uri_base) + end + raise end end end + def function_calling_not_supported_error?(error) + # Check if this is a 404 error which often indicates the endpoint doesn't support tools + # Different providers may return different error formats + http_status = extract_http_status_code(error) + return true if http_status == 404 + + # Also check error message for common tool-related error patterns + error_message = error.message.to_s.downcase + tool_error_patterns = [ + "tools", + "tool_choice", + "function", + "does not support", + "not supported", + "invalid parameter", + "unknown parameter" + ] + tool_error_patterns.any? { |pattern| error_message.include?(pattern) } + end + def build_generic_messages(prompt:, instructions: nil, function_results: []) messages = [] diff --git a/app/views/chats/_error.html.erb b/app/views/chats/_error.html.erb index bbcb75818..bf12e61bc 100644 --- a/app/views/chats/_error.html.erb +++ b/app/views/chats/_error.html.erb @@ -1,5 +1,20 @@ <%# locals: (chat:) %> +<% + # Try to extract a meaningful error message + error_message = nil + begin + if chat.error.present? + parsed = JSON.parse(chat.error) rescue nil + if parsed.is_a?(Hash) && parsed["message"].present? + error_message = parsed["message"] + end + end + rescue + # Fall back to generic message + end +%> +
Failed to generate response. Please try again.
+<%= error_message %>
+ <% else %> +Failed to generate response. Please try again.
+ <% end %> <%= render DS::Button.new( text: "Retry", diff --git a/config/locales/views/chats/en.yml b/config/locales/views/chats/en.yml index 89bd1d152..33561d046 100644 --- a/config/locales/views/chats/en.yml +++ b/config/locales/views/chats/en.yml @@ -3,3 +3,6 @@ en: chats: demo_banner_title: "Demo Mode Active" demo_banner_message: "You are using an open-weights Qwen3 LLM with credits provided by Cloudflare Workers AI. Result may vary since the codebase was mostly tested on `gpt-4.1` but your tokens don't go anywhere else to be trained with! 🤖" + errors: + llm: + function_calling_not_supported: "The model '%{model}' does not appear to support function calling (tools). The AI assistant requires a model that supports function calling to retrieve your financial data. Please select a different model that supports this feature in your AI settings, or contact your provider (%{provider_url}) to verify model capabilities." diff --git a/test/models/provider/openai_test.rb b/test/models/provider/openai_test.rb index 83b2b787e..7512a61c0 100644 --- a/test/models/provider/openai_test.rb +++ b/test/models/provider/openai_test.rb @@ -286,4 +286,72 @@ class Provider::OpenaiTest < ActiveSupport::TestCase assert_equal "configured model: custom-model", custom_provider.supported_models_description end + + test "FunctionCallingNotSupportedError provides helpful message" do + error = Provider::Openai::FunctionCallingNotSupportedError.new( + model: "test-model", + provider_url: "https://openrouter.ai/api/v1" + ) + + assert_includes error.message, "test-model" + assert_includes error.message, "function calling" + assert_includes error.message, "openrouter.ai" + end + + test "detects 404 error as function calling not supported" do + custom_provider = Provider::Openai.new( + "test-token", + uri_base: "https://openrouter.ai/api/v1", + model: "some-model-without-tools" + ) + + # Create a mock error with 404 status + mock_error = OpenStruct.new( + message: "404 Not Found", + http_status: 404 + ) + + assert custom_provider.send(:function_calling_not_supported_error?, mock_error) + end + + test "detects tool-related error messages as function calling not supported" do + custom_provider = Provider::Openai.new( + "test-token", + uri_base: "https://openrouter.ai/api/v1", + model: "some-model-without-tools" + ) + + # Test various error message patterns + tool_errors = [ + OpenStruct.new(message: "tools parameter is not supported", http_status: nil), + OpenStruct.new(message: "Invalid parameter: tool_choice", http_status: nil), + OpenStruct.new(message: "function calling does not support this model", http_status: nil), + OpenStruct.new(message: "This model does not support tools", http_status: nil) + ] + + tool_errors.each do |error| + assert custom_provider.send(:function_calling_not_supported_error?, error), + "Expected '#{error.message}' to be detected as function calling not supported" + end + end + + test "does not flag unrelated errors as function calling not supported" do + custom_provider = Provider::Openai.new( + "test-token", + uri_base: "https://openrouter.ai/api/v1", + model: "some-model" + ) + + # Test unrelated error messages + unrelated_errors = [ + OpenStruct.new(message: "Rate limit exceeded", http_status: 429), + OpenStruct.new(message: "Internal server error", http_status: 500), + OpenStruct.new(message: "Invalid API key", http_status: 401) + ] + + unrelated_errors.each do |error| + assert_not custom_provider.send(:function_calling_not_supported_error?, error), + "Did not expect '#{error.message}' to be detected as function calling not supported" + end + end end