Class: SiteMapper::Robots::ParsedRobots

Inherits:
Object
  • Object
show all
Defined in:
lib/site_mapper/robots.rb

Overview

Parses robots.txt

Instance Method Summary collapse

Constructor Details

#initialize(body, user_agent) ⇒ ParsedRobots

Returns a new instance of ParsedRobots


9
10
11
12
13
14
15
16
# File 'lib/site_mapper/robots.rb', line 9

def initialize(body, user_agent)
  @other     = {}
  @disallows = {}
  @allows    = {}
  @delays    = {}
  @sitemaps  = []
  parse(body)
end

Instance Method Details

#allowed?(uri, user_agent) ⇒ Boolean

Returns true if uri is allowed to be crawled

Examples:

Check if www.google.com/googlesites is allowed to be crawled

uri = URI.parse('http://www.google.com/googlesites')
robots.allowed?(uri, 'SiteMapper')
# => false (as of 2014-10-22)

Parameters:

  • uri (URI)

    to be checked

  • user_agent (String)

    to be checked

Returns:

  • (Boolean)

    true if uri is allowed to be crawled


58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
# File 'lib/site_mapper/robots.rb', line 58

def allowed?(uri, user_agent)
  return true unless @parsed
  allowed = true
  path    = uri.request_uri

  user_agent.downcase!
  
  @disallows.each do |key, value|
    if user_agent =~ key
      value.each do |rule|
        if path =~ rule
          allowed = false
        end
      end
    end
  end
  
  @allows.each do |key, value|
    unless allowed      
      if user_agent =~ key
        value.each do |rule|
          if path =~ rule
            allowed = true
          end
        end
      end
    end
  end
  allowed
end

#crawl_delay(user_agent) ⇒ Integer

Returns crawl delay for user_agent

Parameters:

  • user_agent (String)

Returns:

  • (Integer)

    crawl delay for user_agent


91
92
93
94
95
# File 'lib/site_mapper/robots.rb', line 91

def crawl_delay(user_agent)
  agent = user_agent.dup
  agent = to_regex(agent.downcase) if user_agent.is_a?(String)
  @delays[agent]
end

#other_valuesHash

Returns key/value pairs from robots.txt

Returns:

  • (Hash)

    key/value pairs from robots.txt


98
99
100
# File 'lib/site_mapper/robots.rb', line 98

def other_values
  @other
end

#parse(body) ⇒ Object

Parse robots.txt body.

Parameters:

  • body (String)

    the webpage body HTML


20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/site_mapper/robots.rb', line 20

def parse(body)
  agent = /.*/
  body  = body || "User-agent: *\nAllow: /\n"
  body  = body.downcase
  body.each_line.each do |line|
    next if line =~ /^\s*(#.*|$)/
    arr   = line.split(':')
    key   = arr.shift
    value = arr.join(':').strip
    value.strip!
    case key
    when 'user-agent'
      agent = to_regex(value)
    when 'allow'
      @allows[agent] ||= []
      @allows[agent] << to_regex(value)
    when 'disallow'
      @disallows[agent] ||= []
      @disallows[agent] << to_regex(value)
    when 'crawl-delay'
      @delays[agent] = value.to_i
    when 'sitemap'
      @sitemaps << value
    else
      @other[key] ||= []
      @other[key] << value
    end
  end
  @parsed = true
end

#sitemapsArray

Returns sitemaps defined in robots.txt

Returns:

  • (Array)

    returns sitemaps defined in robots.txt


103
104
105
# File 'lib/site_mapper/robots.rb', line 103

def sitemaps
  @sitemaps
end