Class: ObsceneGpt::TestDetector

Inherits:
Object
  • Object
show all
Defined in:
lib/obscene_gpt/test_detector.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(schema: nil) ⇒ TestDetector

Returns a new instance of TestDetector.



5
6
7
# File 'lib/obscene_gpt/test_detector.rb', line 5

def initialize(schema: nil)
  @schema = schema || ObsceneGpt::Prompts::SIMPLE_SCHEMA
end

Instance Attribute Details

#schemaObject (readonly)

Returns the value of attribute schema.



3
4
5
# File 'lib/obscene_gpt/test_detector.rb', line 3

def schema
  @schema
end

Instance Method Details

#detect(text) ⇒ Hash

Detects whether the given text contains obscene content

Parameters:

  • text (String)

    The text to analyze

Returns:

  • (Hash)

    Detection result



36
37
38
# File 'lib/obscene_gpt/test_detector.rb', line 36

def detect(text)
  detect_many([text])[0]
end

#detect_many(texts) ⇒ Array<Hash>

Detects whether the given texts contain obscene content using test mode

Parameters:

  • texts (Array<String>)

    The texts to analyze

Returns:

  • (Array<Hash>)

    An array of hashes containing the detection result



12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# File 'lib/obscene_gpt/test_detector.rb', line 12

def detect_many(texts) # rubocop:disable Metrics/MethodLength
  texts.map do |text|
    # Simple heuristic for test mode: detect common profanity patterns
    is_obscene = detect_obscene_patterns(text)
    confidence = is_obscene ? 0.85 : 0.95

    result = {
      obscene: is_obscene,
      confidence: confidence,
    }

    # Add additional fields if using full schema
    if @schema == Prompts::FULL_SCHEMA
      result[:reasoning] = is_obscene ? "Contains inappropriate content" : "Clean text"
      result[:categories] = is_obscene ? ["profanity"] : []
    end

    result
  end
end