Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions app/models/assistant.rb
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
class Assistant
include Provided, Configurable, Broadcastable

attr_reader :chat, :instructions
attr_reader :chat, :instructions, :instructions_prompt

class << self
def for_chat(chat)
config = config_for(chat)
new(chat, instructions: config[:instructions], functions: config[:functions])
new(
chat,
instructions: config[:instructions],
instructions_prompt: config[:instructions_prompt],
functions: config[:functions]
)
end
end

def initialize(chat, instructions: nil, functions: [])
def initialize(chat, instructions: nil, instructions_prompt: nil, functions: [])
@chat = chat
@instructions = instructions
@instructions_prompt = instructions_prompt
@functions = functions
end

Expand All @@ -26,6 +32,7 @@ def respond_to(message)
responder = Assistant::Responder.new(
message: message,
instructions: instructions,
instructions_prompt: instructions_prompt,
function_tool_caller: function_tool_caller,
llm: get_model_provider(message.ai_model)
)
Expand Down
112 changes: 111 additions & 1 deletion app/models/assistant/configurable.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,11 @@ def config_for(chat)
preferred_currency = Money::Currency.new(chat.user.family.currency)
preferred_date_format = chat.user.family.date_format

instructions_config = default_instructions(preferred_currency, preferred_date_format)

{
instructions: default_instructions(preferred_currency, preferred_date_format),
instructions: instructions_config[:content],
instructions_prompt: instructions_config[:prompt],
functions: default_functions
}
end
Expand All @@ -23,6 +26,104 @@ def default_functions
end

def default_instructions(preferred_currency, preferred_date_format)
langfuse_instructions = langfuse_default_instructions(preferred_currency, preferred_date_format)

if langfuse_instructions.present?
{
content: langfuse_instructions[:content],
prompt: langfuse_instructions
}
else
{
content: fallback_default_instructions(preferred_currency, preferred_date_format),
prompt: nil
}
end
end

def langfuse_default_instructions(preferred_currency, preferred_date_format)
return unless langfuse_client

prompt = langfuse_client.get_prompt("default_instructions")
return if prompt.nil?

# TODO: remove after we make the code resilient to chat vs. text types of prompts
Rails.logger.warn("Langfuse prompt retrieved: #{prompt.name} #{prompt.version}")
Rails.logger.warn("Langfuse prompt retrieved: #{prompt.prompt}")

compiled_prompt = compile_langfuse_prompt(
prompt.prompt.dig(0, "content"),
preferred_currency: preferred_currency,
preferred_date_format: preferred_date_format
)
Comment on lines +54 to +58
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Validate prompt structure before accessing nested content.

Line 55 assumes prompt.prompt is an array with a hash at index 0 containing a "content" key. If the Langfuse prompt structure differs, this will silently return nil or raise an error.

Consider adding validation:

-        compiled_prompt = compile_langfuse_prompt(
-          prompt.prompt.dig(0, "content"),
+        raw_content = prompt.prompt.dig(0, "content")
+        unless raw_content
+          Rails.logger.warn("Langfuse prompt structure unexpected: missing content at prompt.prompt[0]['content']")
+          return nil
+        end
+
+        compiled_prompt = compile_langfuse_prompt(
+          raw_content,
           preferred_currency: preferred_currency,
           preferred_date_format: preferred_date_format
         )
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
compiled_prompt = compile_langfuse_prompt(
prompt.prompt.dig(0, "content"),
preferred_currency: preferred_currency,
preferred_date_format: preferred_date_format
)
raw_content = prompt.prompt.dig(0, "content")
unless raw_content
Rails.logger.warn("Langfuse prompt structure unexpected: missing content at prompt.prompt[0]['content']")
return nil
end
compiled_prompt = compile_langfuse_prompt(
raw_content,
preferred_currency: preferred_currency,
preferred_date_format: preferred_date_format
)
🤖 Prompt for AI Agents
In app/models/assistant/configurable.rb around lines 54 to 58, the code assumes
prompt.prompt is an array with a hash at index 0 containing "content"; add
validation to check that prompt.prompt is an Array, that it has at least one
element, and that the first element responds to [] with a "content" key (or use
safe navigation), and then either raise a clear ArgumentError or return a
sensible default if the structure is invalid; update the call to
compile_langfuse_prompt to use the validated content (or fallback) and include a
short error message referencing the prompt id/context when raising.


content = extract_prompt_content(compiled_prompt)
return if content.blank?

{
id: prompt.respond_to?(:id) ? prompt.id : (prompt[:id] rescue nil),
name: prompt.name,
version: prompt.version,
template: prompt.prompt,
content: content
}
rescue => e
Rails.logger.warn("Langfuse prompt retrieval failed: #{e.message}")
nil
end

def compile_langfuse_prompt(prompt, preferred_currency:, preferred_date_format:)
variables = {
preferred_currency_symbol: preferred_currency&.symbol,
preferred_currency_iso_code: preferred_currency&.iso_code,
preferred_currency_default_precision: preferred_currency&.default_precision,
preferred_currency_default_format: preferred_currency&.default_format,
preferred_currency_separator: preferred_currency&.separator,
preferred_currency_delimiter: preferred_currency&.delimiter,
preferred_date_format: preferred_date_format,
current_date: Date.current
}.transform_values { |value| value.nil? ? "" : value.to_s }

# If the prompt object supports compilation, use it. Otherwise, perform
# a lightweight local interpolation for String/Array/Hash templates.
if prompt.respond_to?(:compile)
prompt.compile(**variables)
else
interpolate_template(prompt, variables)
end
end

def interpolate_template(template, variables)
case template
when String
# Replace {{ variable }} placeholders with provided variables
template.gsub(/\{\{\s*(\w+)\s*\}\}/) do
key = Regexp.last_match(1).to_sym
variables[key] || ""
end
when Array
template.map { |item| interpolate_template(item, variables) }
when Hash
template.transform_values { |v| interpolate_template(v, variables) }
else
template
end
end

def extract_prompt_content(compiled_prompt)
case compiled_prompt
when String
compiled_prompt
when Array
compiled_prompt.filter_map do |message|
message[:content] || message["content"]
end.join("\n\n")
else
nil
end
end

def fallback_default_instructions(preferred_currency, preferred_date_format)
<<~PROMPT
## Your identity

Expand Down Expand Up @@ -78,5 +179,14 @@ def default_instructions(preferred_currency, preferred_date_format)
the data you're presenting represents and what context it is in (i.e. date range, account, etc.)
PROMPT
end

def langfuse_client
return unless ENV["LANGFUSE_PUBLIC_KEY"].present? && ENV["LANGFUSE_SECRET_KEY"].present?

@langfuse_client ||= Langfuse.new
rescue => e
Rails.logger.warn("Langfuse client initialization failed: #{e.message}")
nil
end
Comment on lines +183 to +190
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion | 🟠 Major

Extract duplicated langfuse_client to a shared concern.

This method is nearly identical to Provider::OpenAI#langfuse_client (lines 140-144 in app/models/provider/openai.rb). To follow DRY principles and maintain consistency, consider extracting this to a shared concern (e.g., LangfuseClientConcern) that both classes can include.

Create a new concern at app/models/concerns/langfuse_client_concern.rb:

module LangfuseClientConcern
  extend ActiveSupport::Concern

  class_methods do
    def langfuse_client
      return unless ENV["LANGFUSE_PUBLIC_KEY"].present? && ENV["LANGFUSE_SECRET_KEY"].present?

      @langfuse_client ||= Langfuse.new
    rescue => e
      Rails.logger.warn("Langfuse client initialization failed: #{e.message}")
      nil
    end
  end
end

Then include it in both Assistant::Configurable and Provider::OpenAI:

 module Assistant::Configurable
   extend ActiveSupport::Concern
+  include LangfuseClientConcern

   class_methods do
     # ...
-
-      def langfuse_client
-        return unless ENV["LANGFUSE_PUBLIC_KEY"].present? && ENV["LANGFUSE_SECRET_KEY"].present?
-
-        @langfuse_client ||= Langfuse.new
-      rescue => e
-        Rails.logger.warn("Langfuse client initialization failed: #{e.message}")
-        nil
-      end
   end
 end

As per coding guidelines.

🤖 Prompt for AI Agents
In app/models/assistant/configurable.rb around lines 154 to 161 and
app/models/provider/openai.rb around lines 140 to 144, the langfuse_client
method is duplicated; extract it into a new concern at
app/models/concerns/langfuse_client_concern.rb that defines the class-level
langfuse_client (using ActiveSupport::Concern with class_methods), move the
existing implementation there (keeping the same ENV checks, memoization and
rescue logging), then remove the method from both Assistant::Configurable and
Provider::OpenAI and include the new LangfuseClientConcern in each class so they
share the single implementation.

end
end
6 changes: 4 additions & 2 deletions app/models/assistant/responder.rb
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
class Assistant::Responder
def initialize(message:, instructions:, function_tool_caller:, llm:)
def initialize(message:, instructions:, function_tool_caller:, llm:, instructions_prompt: nil)
@message = message
@instructions = instructions
@function_tool_caller = function_tool_caller
@llm = llm
@instructions_prompt = instructions_prompt
end

def on(event_name, &block)
Expand Down Expand Up @@ -31,7 +32,7 @@ def respond(previous_response_id: nil)
end

private
attr_reader :message, :instructions, :function_tool_caller, :llm
attr_reader :message, :instructions, :function_tool_caller, :llm, :instructions_prompt

def handle_follow_up_response(response)
streamer = proc do |chunk|
Expand Down Expand Up @@ -64,6 +65,7 @@ def get_llm_response(streamer:, function_results: [], previous_response_id: nil)
message.content,
model: message.ai_model,
instructions: instructions,
instructions_prompt: instructions_prompt,
functions: function_tool_caller.function_definitions,
function_results: function_results,
streamer: streamer,
Expand Down
1 change: 1 addition & 0 deletions app/models/provider/llm_concept.rb
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ def chat_response(
prompt,
model:,
instructions: nil,
instructions_prompt: nil,
functions: [],
function_results: [],
streamer: nil,
Expand Down
28 changes: 25 additions & 3 deletions app/models/provider/openai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def chat_response(
prompt,
model:,
instructions: nil,
instructions_prompt: nil,
functions: [],
function_results: [],
streamer: nil,
Expand Down Expand Up @@ -112,6 +113,7 @@ def chat_response(
model: model,
input: input_payload,
output: response.messages.map(&:output_text).join("\n"),
prompt: instructions_prompt,
session_id: session_id,
user_identifier: user_identifier
)
Expand All @@ -123,6 +125,7 @@ def chat_response(
model: model,
input: input_payload,
output: parsed.messages.map(&:output_text).join("\n"),
prompt: instructions_prompt,
usage: raw_response["usage"],
session_id: session_id,
user_identifier: user_identifier
Expand All @@ -141,7 +144,7 @@ def langfuse_client
@langfuse_client = Langfuse.new
end

def log_langfuse_generation(name:, model:, input:, output:, usage: nil, session_id: nil, user_identifier: nil)
def log_langfuse_generation(name:, model:, input:, output:, usage: nil, session_id: nil, user_identifier: nil, prompt: nil)
return unless langfuse_client

trace = langfuse_client.trace(
Expand All @@ -150,15 +153,34 @@ def log_langfuse_generation(name:, model:, input:, output:, usage: nil, session_
session_id: session_id,
user_id: user_identifier
)
trace.generation(
generation_options = {
name: name,
model: model,
input: input,
output: output,
usage: usage,
session_id: session_id,
user_id: user_identifier
)
}

if prompt.present?
generation_options[:prompt_name] = prompt[:name] if prompt[:name]
generation_options[:prompt_version] = prompt[:version] if prompt[:version]
generation_options[:prompt_id] = prompt[:id] if prompt[:id]

metadata = {
prompt: {
id: prompt[:id],
name: prompt[:name],
version: prompt[:version],
content: prompt[:content],
template: prompt[:template]
}.compact
}
generation_options[:metadata] = metadata
end

trace.generation(**generation_options)
trace.update(output: output)
rescue => e
Rails.logger.warn("Langfuse logging failed: #{e.message}")
Expand Down