Class: Durable::Llm::Providers::AzureOpenai

Inherits:
Base
  • Object
show all
Defined in:
lib/durable/llm/providers/azure_openai.rb

Overview

Azure OpenAI provider for accessing Azure OpenAI’s language models

This provider implements the Azure OpenAI API for chat completions, embeddings, and streaming. It handles authentication via API keys, deployment-based routing, and response normalization.

Defined Under Namespace

Classes: AzureOpenaiChoice, AzureOpenaiEmbeddingResponse, AzureOpenaiMessage, AzureOpenaiResponse, AzureOpenaiStreamChoice, AzureOpenaiStreamDelta, AzureOpenaiStreamResponse

Constant Summary collapse

BASE_URL_TEMPLATE =
'https://%s.openai.azure.com/openai/deployments/%s'

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Base

models, options, #stream?

Constructor Details

#initialize(api_key: nil, resource_name: nil, api_version: '2024-02-01') ⇒ AzureOpenai

Returns a new instance of AzureOpenai.



32
33
34
35
36
37
# File 'lib/durable/llm/providers/azure_openai.rb', line 32

def initialize(api_key: nil, resource_name: nil, api_version: '2024-02-01')
  super(api_key: api_key)
  @resource_name = resource_name || ENV['AZURE_OPENAI_RESOURCE_NAME']
  @api_version = api_version
  # NOTE: BASE_URL will be constructed per request since deployment is in model
end

Instance Attribute Details

#api_keyObject

Returns the value of attribute api_key.



30
31
32
# File 'lib/durable/llm/providers/azure_openai.rb', line 30

def api_key
  @api_key
end

#api_versionObject

Returns the value of attribute api_version.



30
31
32
# File 'lib/durable/llm/providers/azure_openai.rb', line 30

def api_version
  @api_version
end

#resource_nameObject

Returns the value of attribute resource_name.



30
31
32
# File 'lib/durable/llm/providers/azure_openai.rb', line 30

def resource_name
  @resource_name
end

Class Method Details

.stream?Boolean

Returns:

  • (Boolean)


133
134
135
# File 'lib/durable/llm/providers/azure_openai.rb', line 133

def self.stream?
  true
end

Instance Method Details

#completion(options) ⇒ Object



39
40
41
42
43
44
45
46
47
48
49
50
51
# File 'lib/durable/llm/providers/azure_openai.rb', line 39

def completion(options)
  model = options.delete(:model) || options.delete('model')
  base_url = format(BASE_URL_TEMPLATE, @resource_name, model)
  conn = build_connection(base_url)

  response = conn.post('chat/completions') do |req|
    req.headers['api-key'] = @api_key
    req.params['api-version'] = @api_version
    req.body = options
  end

  handle_response(response)
end

#default_api_keyObject



22
23
24
25
26
27
28
# File 'lib/durable/llm/providers/azure_openai.rb', line 22

def default_api_key
  begin
    Durable::Llm.configuration.azure_openai&.api_key
  rescue NoMethodError
    nil
  end || ENV['AZURE_OPENAI_API_KEY']
end

#embedding(model:, input:, **options) ⇒ Object



53
54
55
56
57
58
59
60
61
62
63
64
# File 'lib/durable/llm/providers/azure_openai.rb', line 53

def embedding(model:, input:, **options)
  base_url = format(BASE_URL_TEMPLATE, @resource_name, model)
  conn = build_connection(base_url)

  response = conn.post('embeddings') do |req|
    req.headers['api-key'] = @api_key
    req.params['api-version'] = @api_version
    req.body = { input: input, **options }
  end

  handle_response(response, AzureOpenaiEmbeddingResponse)
end

#modelsObject



66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
# File 'lib/durable/llm/providers/azure_openai.rb', line 66

def models
  # Azure OpenAI doesn't have a public models endpoint, return hardcoded list
  [
    # GPT-5 series
    'gpt-5',
    'gpt-5-mini',
    'gpt-5-nano',
    'gpt-5-chat',
    'gpt-5-codex',
    'gpt-5-pro',
    # GPT-4.1 series
    'gpt-4.1',
    'gpt-4.1-mini',
    'gpt-4.1-nano',
    # GPT-4o series
    'gpt-4o',
    'gpt-4o-mini',
    'gpt-4o-audio-preview',
    'gpt-4o-mini-audio-preview',
    'gpt-4o-realtime-preview',
    'gpt-4o-mini-realtime-preview',
    'gpt-4o-transcribe',
    'gpt-4o-mini-transcribe',
    'gpt-4o-mini-tts',
    # GPT-4 Turbo
    'gpt-4-turbo',
    # GPT-4
    'gpt-4',
    'gpt-4-32k',
    # GPT-3.5
    'gpt-3.5-turbo',
    'gpt-35-turbo',
    'gpt-35-turbo-instruct',
    # O-series
    'o3',
    'o3-mini',
    'o3-pro',
    'o4-mini',
    'o1',
    'o1-mini',
    'o1-preview',
    'codex-mini',
    # Embeddings
    'text-embedding-ada-002',
    'text-embedding-3-small',
    'text-embedding-3-large',
    # Audio
    'whisper',
    'gpt-4o-transcribe',
    'gpt-4o-mini-transcribe',
    'tts',
    'tts-hd',
    'gpt-4o-mini-tts',
    # Image generation
    'dall-e-3',
    'gpt-image-1',
    'gpt-image-1-mini',
    # Video generation
    'sora',
    # Other
    'model-router',
    'computer-use-preview',
    'gpt-oss-120b',
    'gpt-oss-20b'
  ]
end

#setup_stream_request(req, options) ⇒ Object



154
155
156
157
158
159
160
161
162
163
164
165
# File 'lib/durable/llm/providers/azure_openai.rb', line 154

def setup_stream_request(req, options)
  req.headers['api-key'] = @api_key
  req.params['api-version'] = @api_version
  req.headers['Accept'] = 'text/event-stream'
  req.body = options

  user_proc = proc do |chunk, _size, _total|
    yield chunk
  end

  req.options.on_data = to_json_stream(user_proc: user_proc)
end

#stream(options) ⇒ Object



137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# File 'lib/durable/llm/providers/azure_openai.rb', line 137

def stream(options)
  model = options[:model] || options['model']
  base_url = format(BASE_URL_TEMPLATE, @resource_name, model)
  conn = build_connection(base_url)

  options[:stream] = true
  options['temperature'] = options['temperature'].to_f if options['temperature']

  response = conn.post('chat/completions') do |req|
    setup_stream_request(req, options) do |chunk|
      yield AzureOpenaiStreamResponse.new(chunk)
    end
  end

  handle_response(response)
end