Class: TencentCloud::Iotexplorer::V20190423::TalkLLMConfigInfo

Inherits:
Common::AbstractModel
  • Object
show all
Defined in:
lib/v20190423/models.rb

Overview

LLM配置信息。

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(llmtype = nil, enabled = nil, model = nil, streaming = nil, config = nil, temperature = nil, maxtokens = nil, topp = nil, tools = nil) ⇒ TalkLLMConfigInfo

Returns a new instance of TalkLLMConfigInfo.



13496
13497
13498
13499
13500
13501
13502
13503
13504
13505
13506
# File 'lib/v20190423/models.rb', line 13496

def initialize(llmtype=nil, enabled=nil, model=nil, streaming=nil, config=nil, temperature=nil, maxtokens=nil, topp=nil, tools=nil)
  @LLMType = llmtype
  @Enabled = enabled
  @Model = model
  @Streaming = streaming
  @Config = config
  @Temperature = temperature
  @MaxTokens = maxtokens
  @TopP = topp
  @Tools = tools
end

Instance Attribute Details

#ConfigObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def Config
  @Config
end

#EnabledObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def Enabled
  @Enabled
end

#LLMTypeObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def LLMType
  @LLMType
end

#MaxTokensObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def MaxTokens
  @MaxTokens
end

#ModelObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def Model
  @Model
end

#StreamingObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def Streaming
  @Streaming
end

#TemperatureObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def Temperature
  @Temperature
end

#ToolsObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def Tools
  @Tools
end

#TopPObject

## openai “‘

 "ApiKey": "sk-XXXXXXXXXXXX",
 "ApiUrl": "https://api.openai.com/v1",
 "SystemPrompt": "一个小小助手",
"Timeout":20,
"History":10,
"MetaInfo":{

} “‘

Parameters:

  • Temperature:

    温度

  • MaxTokens:

    最大token数

  • TopP:

    topP

  • Tools:

    工具ID列表



13494
13495
13496
# File 'lib/v20190423/models.rb', line 13494

def TopP
  @TopP
end

Instance Method Details

#deserialize(params) ⇒ Object



13508
13509
13510
13511
13512
13513
13514
13515
13516
13517
13518
# File 'lib/v20190423/models.rb', line 13508

def deserialize(params)
  @LLMType = params['LLMType']
  @Enabled = params['Enabled']
  @Model = params['Model']
  @Streaming = params['Streaming']
  @Config = params['Config']
  @Temperature = params['Temperature']
  @MaxTokens = params['MaxTokens']
  @TopP = params['TopP']
  @Tools = params['Tools']
end