Class: LangGraphRB::LLMNode

Inherits:
Node
  • Object
show all
Defined in:
lib/langgraph_rb/node.rb

Overview

Specialized node for LLM calls

Instance Attribute Summary collapse

Attributes inherited from Node

#block, #name

Instance Method Summary collapse

Methods inherited from Node

#inspect, #to_s

Constructor Details

#initialize(name, llm_client:, system_prompt: nil, &block) ⇒ LLMNode

Returns a new instance of LLMNode.



40
41
42
43
44
45
46
# File 'lib/langgraph_rb/node.rb', line 40

def initialize(name, llm_client:, system_prompt: nil, &block)
  @llm_client = llm_client
  @system_prompt = system_prompt

  # Use default LLM behavior if no custom block provided
  super(name, &(block || method(:default_llm_call)))
end

Instance Attribute Details

#llm_clientObject (readonly)

Returns the value of attribute llm_client.



38
39
40
# File 'lib/langgraph_rb/node.rb', line 38

def llm_client
  @llm_client
end

#system_promptObject (readonly)

Returns the value of attribute system_prompt.



38
39
40
# File 'lib/langgraph_rb/node.rb', line 38

def system_prompt
  @system_prompt
end

Instance Method Details

#call(state, context: nil, observers: []) ⇒ Object



48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# File 'lib/langgraph_rb/node.rb', line 48

def call(state, context: nil, observers: [])
  # Auto-inject LLM config into the context for both default and custom blocks
  merged_context = (context || {}).merge(
    llm_client: @llm_client,
    system_prompt: @system_prompt
  )

  begin
    @llm_client&.set_observers(observers, @name) if observers.any?
  rescue => e
    raise NodeError, "Error setting observers for LLM client: #{e.message}"
  end

  # Delegate to Node's dispatcher so arity (0/1/2) is handled uniformly
  case @callable.arity
  when 0
    @callable.call
  when 1
    @callable.call(state)
  else
    @callable.call(state, merged_context)
  end
rescue => e
  raise NodeError, "Error executing node '#{@name}': #{e.message}"
end