Use Langfuse client trace upsert API (#1041)

Replace direct trace.update calls with client trace upserts so OpenAI provider is compatible with langfuse-ruby 0.1.6 behavior. Add richer warning logs that include full exception details for trace creation, trace upserts, and generation logging failures. Add tests for client-based trace upserts and detailed error logging.
This commit is contained in:
Juan José Mata
2026-02-23 09:29:21 -05:00
committed by GitHub
parent 1f9a934c59
commit 90e94f0ad1
2 changed files with 68 additions and 8 deletions

View File

@@ -82,7 +82,7 @@ class Provider::Openai < Provider
json_mode: json_mode json_mode: json_mode
).auto_categorize ).auto_categorize
trace&.update(output: result.map(&:to_h)) upsert_langfuse_trace(trace: trace, output: result.map(&:to_h))
result result
end end
@@ -110,7 +110,7 @@ class Provider::Openai < Provider
json_mode: json_mode json_mode: json_mode
).auto_detect_merchants ).auto_detect_merchants
trace&.update(output: result.map(&:to_h)) upsert_langfuse_trace(trace: trace, output: result.map(&:to_h))
result result
end end
@@ -147,7 +147,7 @@ class Provider::Openai < Provider
family: family family: family
).process ).process
trace&.update(output: result.to_h) upsert_langfuse_trace(trace: trace, output: result.to_h)
result result
end end
@@ -168,7 +168,7 @@ class Provider::Openai < Provider
model: effective_model model: effective_model
).extract ).extract
trace&.update(output: { transaction_count: result[:transactions].size }) upsert_langfuse_trace(trace: trace, output: { transaction_count: result[:transactions].size })
result result
end end
@@ -480,7 +480,7 @@ class Provider::Openai < Provider
environment: Rails.env environment: Rails.env
) )
rescue => e rescue => e
Rails.logger.warn("Langfuse trace creation failed: #{e.message}") Rails.logger.warn("Langfuse trace creation failed: #{e.message}\n#{e.full_message}")
nil nil
end end
@@ -505,16 +505,32 @@ class Provider::Openai < Provider
output: { error: error.message, details: error.respond_to?(:details) ? error.details : nil }, output: { error: error.message, details: error.respond_to?(:details) ? error.details : nil },
level: "ERROR" level: "ERROR"
) )
trace&.update( upsert_langfuse_trace(
trace: trace,
output: { error: error.message }, output: { error: error.message },
level: "ERROR" level: "ERROR"
) )
else else
generation&.end(output: output, usage: usage) generation&.end(output: output, usage: usage)
trace&.update(output: output) upsert_langfuse_trace(trace: trace, output: output)
end end
rescue => e rescue => e
Rails.logger.warn("Langfuse logging failed: #{e.message}") Rails.logger.warn("Langfuse logging failed: #{e.message}\n#{e.full_message}")
end
def upsert_langfuse_trace(trace:, output:, level: nil)
return unless langfuse_client && trace&.id
payload = {
id: trace.id,
output: output
}
payload[:level] = level if level.present?
langfuse_client.trace(**payload)
rescue => e
Rails.logger.warn("Langfuse trace upsert failed for trace_id=#{trace&.id}: #{e.message}\n#{e.full_message}")
nil
end end
def record_llm_usage(family:, model:, operation:, usage: nil, error: nil) def record_llm_usage(family:, model:, operation:, usage: nil, error: nil)

View File

@@ -286,4 +286,48 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
assert_equal "configured model: custom-model", custom_provider.supported_models_description assert_equal "configured model: custom-model", custom_provider.supported_models_description
end end
test "upsert_langfuse_trace uses client trace upsert" do
trace = Struct.new(:id).new("trace_123")
fake_client = mock
fake_client.expects(:trace).with(id: "trace_123", output: { ok: true }, level: "ERROR")
@subject.stubs(:langfuse_client).returns(fake_client)
@subject.send(:upsert_langfuse_trace, trace: trace, output: { ok: true }, level: "ERROR")
end
test "log_langfuse_generation upserts trace through client" do
trace = Struct.new(:id).new("trace_456")
generation = mock
fake_client = mock
@subject.stubs(:langfuse_client).returns(fake_client)
@subject.stubs(:create_langfuse_trace).returns(trace)
fake_client.expects(:trace).with(id: "trace_456", output: "hello")
trace.expects(:generation).returns(generation)
generation.expects(:end).with(output: "hello", usage: { "total_tokens" => 10 })
@subject.send(
:log_langfuse_generation,
name: "chat",
model: "gpt-4.1",
input: { prompt: "Hi" },
output: "hello",
usage: { "total_tokens" => 10 }
)
end
test "create_langfuse_trace logs full error details" do
fake_client = mock
error = StandardError.new("boom")
@subject.stubs(:langfuse_client).returns(fake_client)
fake_client.expects(:trace).raises(error)
Rails.logger.expects(:warn).with(regexp_matches(/Langfuse trace creation failed: boom.*test\/models\/provider\/openai_test\.rb/m))
@subject.send(:create_langfuse_trace, name: "openai.test", input: { foo: "bar" })
end
end end