mirror of
https://github.com/we-promise/sure.git
synced 2026-04-08 06:44:52 +00:00
* Implement support for generic OpenAI api - Implements support to route requests to any openAI capable provider ( Deepsek, Qwen, VLLM, LM Studio, Ollama ). - Keeps support for pure OpenAI and uses the new better responses api - Uses the /chat/completions api for the generic providers - If uri_base is not set, uses default implementation. * Fix json handling and indentation * Fix linter error indent * Fix tests to set env vars * Fix updating settings * Change to prefix checking for OAI models * FIX check model if custom uri is set * Change chat to sync calls Some local models don't support streaming. Revert to sync calls for generic OAI api * Fix tests * Fix tests * Fix for gpt5 message extraction - Finds the message output by filtering for "type" == "message" instead of assuming it's at index 0 - Safely extracts the text using safe navigation operators (&.) - Raises a clear error if no message content is found - Parses the JSON as before * Add more langfuse logging - Add Langfuse to auto categorizer and merchant detector - Fix monitoring on streaming chat responses - Add Langfuse traces also for model errors now * Update app/models/provider/openai.rb Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> Signed-off-by: soky srm <sokysrm@gmail.com> * handle nil function results explicitly * Exposing some config vars. * Linter and nitpick comments * Drop back to `gpt-4.1` as default for now * Linter * Fix for strict tool schema in Gemini - This fixes tool calling in Gemini OpenAI api - Fix for getTransactions function, page size is not used. --------- Signed-off-by: soky srm <sokysrm@gmail.com> Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> Co-authored-by: Juan José Mata <juanjo.mata@gmail.com>
136 lines
4.1 KiB
Ruby
136 lines
4.1 KiB
Ruby
require "test_helper"
|
|
require "ostruct"
|
|
|
|
class Settings::HostingsControllerTest < ActionDispatch::IntegrationTest
|
|
include ProviderTestHelper
|
|
|
|
setup do
|
|
sign_in users(:family_admin)
|
|
|
|
@provider = mock
|
|
Provider::Registry.stubs(:get_provider).with(:twelve_data).returns(@provider)
|
|
@usage_response = provider_success_response(
|
|
OpenStruct.new(
|
|
used: 10,
|
|
limit: 100,
|
|
utilization: 10,
|
|
plan: "free",
|
|
)
|
|
)
|
|
end
|
|
|
|
test "cannot edit when self hosting is disabled" do
|
|
@provider.stubs(:usage).returns(@usage_response)
|
|
|
|
with_env_overrides SELF_HOSTED: "false" do
|
|
get settings_hosting_url
|
|
assert_response :forbidden
|
|
|
|
patch settings_hosting_url, params: { setting: { require_invite_for_signup: true } }
|
|
assert_response :forbidden
|
|
end
|
|
end
|
|
|
|
test "should get edit when self hosting is enabled" do
|
|
@provider.expects(:usage).returns(@usage_response)
|
|
|
|
with_self_hosting do
|
|
get settings_hosting_url
|
|
assert_response :success
|
|
end
|
|
end
|
|
|
|
test "can update settings when self hosting is enabled" do
|
|
with_self_hosting do
|
|
patch settings_hosting_url, params: { setting: { twelve_data_api_key: "1234567890" } }
|
|
|
|
assert_equal "1234567890", Setting.twelve_data_api_key
|
|
end
|
|
end
|
|
|
|
test "can update openai access token when self hosting is enabled" do
|
|
with_self_hosting do
|
|
patch settings_hosting_url, params: { setting: { openai_access_token: "token" } }
|
|
|
|
assert_equal "token", Setting.openai_access_token
|
|
end
|
|
end
|
|
|
|
test "can update openai uri base and model together when self hosting is enabled" do
|
|
with_self_hosting do
|
|
patch settings_hosting_url, params: { setting: { openai_uri_base: "https://api.example.com/v1", openai_model: "gpt-4" } }
|
|
|
|
assert_equal "https://api.example.com/v1", Setting.openai_uri_base
|
|
assert_equal "gpt-4", Setting.openai_model
|
|
end
|
|
end
|
|
|
|
test "cannot update openai uri base without model when self hosting is enabled" do
|
|
with_self_hosting do
|
|
Setting.openai_model = ""
|
|
|
|
patch settings_hosting_url, params: { setting: { openai_uri_base: "https://api.example.com/v1" } }
|
|
|
|
assert_response :unprocessable_entity
|
|
assert_match(/OpenAI model is required/, flash[:alert])
|
|
assert_nil Setting.openai_uri_base
|
|
end
|
|
end
|
|
|
|
test "can update openai model alone when self hosting is enabled" do
|
|
with_self_hosting do
|
|
patch settings_hosting_url, params: { setting: { openai_model: "gpt-4" } }
|
|
|
|
assert_equal "gpt-4", Setting.openai_model
|
|
end
|
|
end
|
|
|
|
test "cannot clear openai model when custom uri base is set" do
|
|
with_self_hosting do
|
|
Setting.openai_uri_base = "https://api.example.com/v1"
|
|
Setting.openai_model = "gpt-4"
|
|
|
|
patch settings_hosting_url, params: { setting: { openai_model: "" } }
|
|
|
|
assert_response :unprocessable_entity
|
|
assert_match(/OpenAI model is required/, flash[:alert])
|
|
assert_equal "gpt-4", Setting.openai_model
|
|
end
|
|
end
|
|
|
|
test "can clear data cache when self hosting is enabled" do
|
|
account = accounts(:investment)
|
|
holding = account.holdings.first
|
|
exchange_rate = exchange_rates(:one)
|
|
security_price = holding.security.prices.first
|
|
account_balance = account.balances.create!(date: Date.current, balance: 1000, currency: "USD")
|
|
|
|
with_self_hosting do
|
|
perform_enqueued_jobs(only: DataCacheClearJob) do
|
|
delete clear_cache_settings_hosting_url
|
|
end
|
|
end
|
|
|
|
assert_redirected_to settings_hosting_url
|
|
assert_equal I18n.t("settings.hostings.clear_cache.cache_cleared"), flash[:notice]
|
|
|
|
assert_not ExchangeRate.exists?(exchange_rate.id)
|
|
assert_not Security::Price.exists?(security_price.id)
|
|
assert_not Holding.exists?(holding.id)
|
|
assert_not Balance.exists?(account_balance.id)
|
|
end
|
|
|
|
test "can clear data only when admin" do
|
|
with_self_hosting do
|
|
sign_in users(:family_member)
|
|
|
|
assert_no_enqueued_jobs do
|
|
delete clear_cache_settings_hosting_url
|
|
end
|
|
|
|
assert_redirected_to settings_hosting_url
|
|
assert_equal I18n.t("settings.hostings.not_authorized"), flash[:alert]
|
|
end
|
|
end
|
|
end
|