Class: Outhad::Integrations::Source::GenericOpenAI::Client

Inherits:
SourceConnector
  • Object
show all
Defined in:
lib/outhad/integrations/source/generic_open_ai/client.rb

Instance Method Summary collapse

Instance Method Details

#check_connection(connection_config) ⇒ Object



7
8
9
10
11
12
13
14
15
16
17
18
19
20
# File 'lib/outhad/integrations/source/generic_open_ai/client.rb', line 7

def check_connection(connection_config)
  connection_config = prepare_config(connection_config)
  response = send_request(
    url: connection_config[:url],
    http_method: HTTP_POST,
    payload: JSON.parse(connection_config[:request_format]),
    headers: auth_headers(connection_config[:api_key]),
    config: connection_config[:config]
  )
  success?(response) ? success_status : failure_status(nil)
rescue StandardError => e
  handle_exception(e, { context: "GENERIC OPEN AI:CHECK_CONNECTION:EXCEPTION", type: "error" })
  failure_status(e)
end

#discover(_connection_config = nil) ⇒ Object



22
23
24
25
26
27
28
# File 'lib/outhad/integrations/source/generic_open_ai/client.rb', line 22

def discover(_connection_config = nil)
  catalog_json = read_json(CATALOG_SPEC_PATH)
  catalog = build_catalog(catalog_json)
  catalog.to_outhad_message
rescue StandardError => e
  handle_exception(e, { context: "GENERIC OPEN AI:DISCOVER:EXCEPTION", type: "error" })
end

#read(sync_config) ⇒ Object



30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# File 'lib/outhad/integrations/source/generic_open_ai/client.rb', line 30

def read(sync_config)
  connection_config = prepare_config(sync_config.source.connection_specification)
  stream = connection_config[:is_stream] ||= false
  # The server checks the ConnectorQueryType.
  # If it's "ai_ml," the server calculates the payload and passes it as a query in the sync config model protocol.
  # This query is then sent to the AI/ML model.
  payload = parse_json(sync_config.model.query)

  if stream
    run_model_stream(connection_config, payload) { |message| yield message if block_given? }
  else
    run_model(connection_config, payload)
  end
rescue StandardError => e
  handle_exception(e, { context: "GENERIC OPEN AI:READ:EXCEPTION", type: "error" })
end