Class: Pangea::Models::AiGuard::TextGuardResult::Detectors

Inherits:
Internal::Type::BaseModel show all
Defined in:
lib/pangea/models/ai_guard/text_guard_result.rb

Defined Under Namespace

Classes: CodeDetection, Competitors, CustomEntity, Gibberish, Hardening, LanguageDetection, MaliciousEntity, PiiEntity, ProfanityAndToxicity, PromptInjection, SecretsDetection, SelfHarm, Sentiment, Topic

Instance Attribute Summary collapse

Method Summary

Methods inherited from Internal::Type::BaseModel

#==, coerce, #deconstruct_keys, dump, fields, #initialize, #inspect, known_fields, optional, required, #to_h

Methods included from Internal::Type::Converter

#coerce, coerce, #dump, dump, type_info

Constructor Details

This class inherits a constructor from Pangea::Internal::Type::BaseModel

Instance Attribute Details

#code_detectionCodeDetection?

Returns:



113
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 113

optional :code_detection, -> { CodeDetection }, nil?: true

#competitorsCompetitors?

Returns:



88
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 88

optional :competitors, -> { Competitors }, nil?: true

#custom_entityCustomEntity?

Returns:



78
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 78

optional :custom_entity, -> { CustomEntity }, nil?: true

#gibberishGibberish?

Returns:



53
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 53

optional :gibberish, -> { Gibberish }, nil?: true

#hardeningHardening?

Returns:



98
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 98

optional :hardening, -> { Hardening }, nil?: true

#language_detectionLanguageDetection?

Returns:



103
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 103

optional :language_detection, -> { LanguageDetection }, nil?: true

#malicious_entityMaliciousEntity?

Returns:



73
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 73

optional :malicious_entity, -> { MaliciousEntity }, nil?: true

#pii_entityPiiEntity?

Returns:



68
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 68

optional :pii_entity, -> { PiiEntity }, nil?: true

#profanity_and_toxicityProfanityAndToxicity?

Returns:



93
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 93

optional :profanity_and_toxicity, -> { ProfanityAndToxicity }, nil?: true

#prompt_injectionPromptInjectionResult?

Returns:



48
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 48

optional :prompt_injection, -> { PromptInjection }, nil?: true

#secrets_detectionSecretsDetection?

Returns:



83
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 83

optional :secrets_detection, -> { SecretsDetection }, nil?: true

#selfharmSelfHarm?

Returns:



63
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 63

optional :selfharm, -> { SelfHarm }, nil?: true

#sentimentSentiment?

Returns:



58
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 58

optional :sentiment, -> { Sentiment }, nil?: true

#topicTopic?

Returns:



108
# File 'lib/pangea/models/ai_guard/text_guard_result.rb', line 108

optional :topic, -> { Topic }, nil?: true