mirror of
https://github.com/we-promise/sure.git
synced 2026-04-19 03:54:08 +00:00
Fix "Messages is invalid" error for Ollama/custom LLM providers and add comprehensive AI documentation (#225)
* Add comprehensive AI/LLM configuration documentation * Fix Chat.start! to use default model when model is nil or empty * Ensure all controllers use Chat.default_model for consistency * Move AI doc inside `hosting/` * Probably too much error handling --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: jjmata <187772+jjmata@users.noreply.github.com> Co-authored-by: Juan José Mata <juanjo.mata@gmail.com>
This commit is contained in:
@@ -28,4 +28,36 @@ class ChatTest < ActiveSupport::TestCase
|
||||
assert_equal 1, chat.messages.where(type: "UserMessage").count
|
||||
end
|
||||
end
|
||||
|
||||
test "creates with default model when model is nil" do
|
||||
prompt = "Test prompt"
|
||||
|
||||
assert_difference "@user.chats.count", 1 do
|
||||
chat = @user.chats.start!(prompt, model: nil)
|
||||
|
||||
assert_equal 1, chat.messages.count
|
||||
assert_equal Provider::Openai::DEFAULT_MODEL, chat.messages.first.ai_model
|
||||
end
|
||||
end
|
||||
|
||||
test "creates with default model when model is empty string" do
|
||||
prompt = "Test prompt"
|
||||
|
||||
assert_difference "@user.chats.count", 1 do
|
||||
chat = @user.chats.start!(prompt, model: "")
|
||||
|
||||
assert_equal 1, chat.messages.count
|
||||
assert_equal Provider::Openai::DEFAULT_MODEL, chat.messages.first.ai_model
|
||||
end
|
||||
end
|
||||
|
||||
test "creates with configured model when OPENAI_MODEL env is set" do
|
||||
prompt = "Test prompt"
|
||||
|
||||
with_env_overrides OPENAI_MODEL: "custom-model" do
|
||||
chat = @user.chats.start!(prompt, model: "")
|
||||
|
||||
assert_equal "custom-model", chat.messages.first.ai_model
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user