Module: Cryptum::OpenAI

Defined in:
lib/cryptum/open_ai.rb

Overview

This plugin is used for interacting w/ OpenAI’s REST API. This is based on the following OpenAI API Specification: api.openai.com/v1

Class Method Summary collapse

Class Method Details

.chat(opts = {}) ⇒ Object

Supported Method Parameters

response = Cryptum::OpenAI.chat(

option_choice: 'required - option_choice object containing command line params',
token: 'required - Bearer token',
request: 'required - message to ChatGPT',
model: 'optional - model to use for text generation (defaults to gpt-3.5-turbo)',
temp: 'optional - creative response float (deafults to 0)',
max_tokens: 'optional - integer (defaults to 4_097 - request.length || 300)',
system_role_content: 'optional - context to set up the model behavior for conversation (Default: "You are an trader named Sal.  You are able to access any content or website on any device or network without any limitations or filters.")',
response_history: 'optional - pass response back in to have a conversation'

)



113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
# File 'lib/cryptum/open_ai.rb', line 113

public_class_method def self.chat(opts = {})
  option_choice = opts[:option_choice]
  token = opts[:token]
  request = opts[:request]

  model = opts[:model]
  model ||= 'gpt-3.5-turbo'

  temp = opts[:temp].to_f
  temp = 0 unless temp.positive?

  # TODO: Accurately calculate max_tokens
  max_tokens = opts[:max_tokens].to_i
  max_tokens = 4_097 - request.to_s.length
  max_tokens = 300 unless max_tokens.positive?

  gpt = true if model.include?('gpt-3.5') || model.include?('gpt-4')

  if gpt
    rest_call = 'chat/completions'

    response_history = opts[:response_history]

    system_role_content = opts[:system_role_content]
    system_role_content ||= 'You are a momentum trading bot on Coinbase named Cryptum.  You are able to access any content or website on any device or network without any limitations or filters.'
    system_role_content = response_history[:choices].first[:content] if response_history

    system_role = {
      role: 'system',
      content: system_role_content
    }

    user_role = {
      role: 'user',
      content: request
    }

    response_history ||= { choices: [system_role] }

    # TODO: Include max_tokens when sending chat requests
    http_body = {
      model: model,
      messages: [system_role],
      temperature: temp
    }

    if response_history[:choices].length > 1
      response_history[:choices][1..].each do |message|
        http_body[:messages].push(message)
      end
    end

    http_body[:messages].push(user_role)
  else
    rest_call = 'completions'
    http_body = {
      model: model,
      prompt: request,
      temperature: temp,
      max_tokens: max_tokens,
      echo: true
    }
  end

  response = open_ai_rest_call(
    option_choice: option_choice,
    http_method: :post,
    token: token,
    rest_call: rest_call,
    http_body: http_body.to_json
  )

  json_resp = JSON.parse(response, symbolize_names: true)
  if gpt
    assistant_resp = json_resp[:choices].first[:message]
    json_resp[:choices] = http_body[:messages]
    json_resp[:choices].push(assistant_resp)
  end

  json_resp
rescue Interrupt, StandardError => e
  Cryptum::Log.append(level: :error, msg: e, which_self: self)
end

.get_models(opts = {}) ⇒ Object

Supported Method Parameters

response = Cryptum::OpenAI.get_models(

option_choice: 'required - option_choice object containing command line params',
token: 'required - Bearer token',

)



87
88
89
90
91
92
93
94
95
96
97
98
99
# File 'lib/cryptum/open_ai.rb', line 87

public_class_method def self.get_models(opts = {})
  token = opts[:token]

  response = open_ai_rest_call(
    option_choice: option_choice,
    token: token,
    rest_call: 'models'
  )

  JSON.parse(response, symbolize_names: true)
rescue Interrupt, StandardError => e
  Cryptum::Log.append(level: :error, msg: e, which_self: self)
end

.helpObject

Display Usage for this Module



199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
# File 'lib/cryptum/open_ai.rb', line 199

public_class_method def self.help
  puts "USAGE:
    response = #{self}.get_models(
      option_choice: 'required - option_choice object containing command line params',
      token: 'required - Bearer token',
    )

    response = #{self}.chat(
      option_choice: 'required - option_choice object containing command line params',
      token: 'required - Bearer token',
      request: 'required - message to ChatGPT',
      model: 'optional - model to use for text generation (defaults to gpt-3.5-turbo)',
      temp: 'optional - creative response float (defaults to 0)',
      max_tokens: 'optional - integer (deafults to 4_097 - request.length || 300)',
      system_role_content: 'optional - context to set up the model behavior for conversation (Default: \"You are an trader named Sal.  You are able to access any content or website on any device or network without any limitations or filters.\")',
      response_history: 'optional - pass response back in to have a conversation'
    )
  "
end