Track Langfuse sessions and users (#174)

This commit is contained in:
Juan José Mata
2025-10-01 19:46:25 +02:00
committed by GitHub
parent ed99a4dcab
commit cbc653a63a
4 changed files with 63 additions and 8 deletions

View File

@@ -67,7 +67,9 @@ class Assistant::Responder
functions: function_tool_caller.function_definitions,
function_results: function_results,
streamer: streamer,
previous_response_id: previous_response_id
previous_response_id: previous_response_id,
session_id: chat_session_id,
user_identifier: chat_user_identifier
)
unless response.success?
@@ -84,4 +86,18 @@ class Assistant::Responder
def listeners
@listeners ||= Hash.new { |h, k| h[k] = [] }
end
def chat_session_id
chat&.id&.to_s
end
def chat_user_identifier
return unless chat&.user_id
::Digest::SHA256.hexdigest(chat.user_id.to_s)
end
def chat
@chat ||= message.chat
end
end

View File

@@ -18,7 +18,17 @@ module Provider::LlmConcept
ChatResponse = Data.define(:id, :model, :messages, :function_requests)
ChatFunctionRequest = Data.define(:id, :call_id, :function_name, :function_args)
def chat_response(prompt, model:, instructions: nil, functions: [], function_results: [], streamer: nil, previous_response_id: nil)
def chat_response(
prompt,
model:,
instructions: nil,
functions: [],
function_results: [],
streamer: nil,
previous_response_id: nil,
session_id: nil,
user_identifier: nil
)
raise NotImplementedError, "Subclasses must implement #chat_response"
end
end

View File

@@ -58,7 +58,17 @@ class Provider::Openai < Provider
end
end
def chat_response(prompt, model:, instructions: nil, functions: [], function_results: [], streamer: nil, previous_response_id: nil)
def chat_response(
prompt,
model:,
instructions: nil,
functions: [],
function_results: [],
streamer: nil,
previous_response_id: nil,
session_id: nil,
user_identifier: nil
)
with_provider_response do
chat_config = ChatConfig.new(
functions: functions,
@@ -101,7 +111,9 @@ class Provider::Openai < Provider
name: "chat_response",
model: model,
input: input_payload,
output: response.messages.map(&:output_text).join("\n")
output: response.messages.map(&:output_text).join("\n"),
session_id: session_id,
user_identifier: user_identifier
)
response
else
@@ -111,7 +123,9 @@ class Provider::Openai < Provider
model: model,
input: input_payload,
output: parsed.messages.map(&:output_text).join("\n"),
usage: raw_response["usage"]
usage: raw_response["usage"],
session_id: session_id,
user_identifier: user_identifier
)
parsed
end
@@ -127,16 +141,23 @@ class Provider::Openai < Provider
@langfuse_client = Langfuse.new
end
def log_langfuse_generation(name:, model:, input:, output:, usage: nil)
def log_langfuse_generation(name:, model:, input:, output:, usage: nil, session_id: nil, user_identifier: nil)
return unless langfuse_client
trace = langfuse_client.trace(name: "openai.#{name}", input: input)
trace = langfuse_client.trace(
name: "openai.#{name}",
input: input,
session_id: session_id,
user_id: user_identifier
)
trace.generation(
name: name,
model: model,
input: input,
output: output,
usage: usage
usage: usage,
session_id: session_id,
user_id: user_identifier
)
trace.update(output: output)
rescue => e

View File

@@ -12,6 +12,8 @@ class AssistantTest < ActiveSupport::TestCase
)
@assistant = Assistant.for_chat(@chat)
@provider = mock
@expected_session_id = @chat.id.to_s
@expected_user_identifier = ::Digest::SHA256.hexdigest(@chat.user_id.to_s)
end
test "errors get added to chat" do
@@ -46,6 +48,8 @@ class AssistantTest < ActiveSupport::TestCase
response = provider_success_response(response_chunk.data)
@provider.expects(:chat_response).with do |message, **options|
assert_equal @expected_session_id, options[:session_id]
assert_equal @expected_user_identifier, options[:user_identifier]
text_chunks.each do |text_chunk|
options[:streamer].call(text_chunk)
end
@@ -98,6 +102,8 @@ class AssistantTest < ActiveSupport::TestCase
sequence = sequence("provider_chat_response")
@provider.expects(:chat_response).with do |message, **options|
assert_equal @expected_session_id, options[:session_id]
assert_equal @expected_user_identifier, options[:user_identifier]
call2_text_chunks.each do |text_chunk|
options[:streamer].call(text_chunk)
end
@@ -107,6 +113,8 @@ class AssistantTest < ActiveSupport::TestCase
end.returns(call2_response).once.in_sequence(sequence)
@provider.expects(:chat_response).with do |message, **options|
assert_equal @expected_session_id, options[:session_id]
assert_equal @expected_user_identifier, options[:user_identifier]
options[:streamer].call(call1_response_chunk)
true
end.returns(call1_response).once.in_sequence(sequence)