Class: Cobweb

Inherits:
Object
  • Object
show all
Defined in:
lib/cobweb.rb

Overview

Cobweb class is used to perform get and head requests. You can use this on its own if you wish without the crawler

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ Cobweb

See readme for more information on options available



32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# File 'lib/cobweb.rb', line 32

def initialize(options = {})
  @options = options
  default_use_encoding_safe_process_job_to  false
  default_follow_redirects_to               true
  default_redirect_limit_to                 10
  default_queue_system_to                   :resque
  if @options[:queue_system] == :resque
    default_processing_queue_to               "CobwebProcessJob"
    default_crawl_finished_queue_to           "CobwebFinishedJob"
  else
    default_processing_queue_to               "CrawlProcessWorker"
    default_crawl_finished_queue_to           "CrawlFinishedWorker"
  end
  default_quiet_to                          true
  default_debug_to                          false
  default_cache_to                          300
  default_cache_type_to                     :crawl_based # other option is :full
  default_timeout_to                        10
  default_redis_options_to                  Hash.new
  default_internal_urls_to                  []
  default_external_urls_to                  []
  default_seed_urls_to                  []
  default_first_page_redirect_internal_to   true
  default_text_mime_types_to                ["text/*", "application/xhtml+xml"]
  default_obey_robots_to                    false
  default_user_agent_to                     "cobweb/#{Cobweb.version} (ruby/#{RUBY_VERSION} nokogiri/#{Nokogiri::VERSION})"
  default_valid_mime_types_to                ["*/*"]
  default_raise_exceptions_to               false
  default_store_inbound_links_to            false
  default_proxy_addr_to                     nil
  default_proxy_port_to                     nil
  default_additional_tags_to                nil
  default_treat_https_as_http_to            true


end

Dynamic Method Handling

This class handles dynamic methods through the method_missing method

#method_missing(method_sym, *arguments, &block) ⇒ Object

used for setting default options



22
23
24
25
26
27
28
29
# File 'lib/cobweb.rb', line 22

def method_missing(method_sym, *arguments, &block)
  if method_sym.to_s =~ /^default_(.*)_to$/
    tag_name = method_sym.to_s.split("_")[1..-2].join("_").to_sym
    @options[tag_name] = arguments[0] unless @options.has_key?(tag_name)
  else
    super
  end
end

Class Method Details

.escape_pattern_for_regex(pattern, options = {}) ⇒ Object

escapes characters with meaning in regular expressions and adds wildcard expression



474
475
476
477
478
479
480
481
482
483
# File 'lib/cobweb.rb', line 474

def self.escape_pattern_for_regex(pattern, options={})
  pattern = pattern.gsub(".", "\\.")
  pattern = pattern.gsub("?", "\\?")
  pattern = pattern.gsub("+", "\\\\+")
  pattern = pattern.gsub("*", ".*?")
  if options[:treat_https_as_http] || !options.has_key?(:treat_https_as_http)
    pattern = pattern.gsub("http:", "https?:")
  end
  pattern
end

.versionObject

retrieves current version



17
18
19
# File 'lib/cobweb.rb', line 17

def self.version
  CobwebVersion.version
end

Instance Method Details

#clear_cacheObject



485
486
487
# File 'lib/cobweb.rb', line 485

def clear_cache

end

#get(url, options = @options) ⇒ Object

Performs a HTTP GET request to the specified url applying the options supplied



140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
# File 'lib/cobweb.rb', line 140

def get(url, options = @options)
  raise "url cannot be nil" if url.nil?
  uri = Addressable::URI.parse(url)
  uri.normalize!
  uri.fragment=nil
  url = uri.to_s

  # get the unique id for this request
  unique_id = Digest::SHA1.hexdigest(url.to_s)
  if options.has_key?(:redirect_limit) and !options[:redirect_limit].nil?
    redirect_limit = options[:redirect_limit].to_i
  else
    redirect_limit = 10
  end

  # connect to redis
  if options.has_key? :crawl_id
    redis = Redis::Namespace.new("cobweb-#{Cobweb.version}-#{options[:crawl_id]}", :redis => RedisConnection.new(@options[:redis_options]))
  else
    redis = Redis::Namespace.new("cobweb-#{Cobweb.version}", :redis => RedisConnection.new(@options[:redis_options]))
  end
  full_redis = Redis::Namespace.new("cobweb-#{Cobweb.version}", :redis => RedisConnection.new(@options[:redis_options]))

  content = {:base_url => url}

  # check if it has already been cached
  if @options[:cache] && ((@options[:cache_type] == :crawl_based && redis.get(unique_id)) || (@options[:cache_type] == :full && full_redis.get(unique_id)))
    if @options[:cache_type] == :crawl_based
      puts "Cache hit in crawl for #{url}" unless @options[:quiet]
      content = HashUtil.deep_symbolize_keys(Marshal.load(redis.get(unique_id)))
    else
      puts "Cache hit for #{url}" unless @options[:quiet]
      content = HashUtil.deep_symbolize_keys(Marshal.load(full_redis.get(unique_id)))
    end
  else
    # retrieve data
    #unless @http && @http.address == uri.host && @http.port == uri.inferred_port
      puts "Creating connection to #{uri.host}..." if @options[:debug]
      @http = Net::HTTP.new(uri.host, uri.inferred_port, @options[:proxy_addr], @options[:proxy_port])
    #end
    if uri.scheme == "https"
      @http.use_ssl = true
      @http.verify_mode = OpenSSL::SSL::VERIFY_NONE
    end

    request_time = Time.now.to_f
    @http.read_timeout = @options[:timeout].to_i
    @http.open_timeout = @options[:timeout].to_i
    begin
      puts "Retrieving #{uri}... " unless @options[:quiet]
      request_options={}
      request_options['Cookie']= options[:cookies].map{|k,v| [k,v].join("=") }.join("&") if options[:cookies]
      request_options['User-Agent']= options[:user_agent] if options.has_key?(:user_agent)

      request = Net::HTTP::Get.new uri.request_uri, request_options
      # authentication
      if @options[:authentication] == "basic"
        raise ":username and :password are required if using basic authentication" unless @options[:username] && @options[:password]
        request.basic_auth @options[:username], @options[:password]
      end
      if @options[:range]
        request.set_range(@options[:range])
      end

      response = @http.request request

      cookies = Hash[get_cookies(response).to_s.split("; ").map{|s| [CGI.unescape(s.split("=")[0]), s.split("=")[1]]}].merge(options[:cookies] || {})

      if @options[:follow_redirects] and response.code.to_i >= 300 and response.code.to_i < 400

        # get location to redirect to
        uri = UriHelper.join_no_fragment(uri, response['location'])
        puts "Following Redirect to #{uri}... " unless @options[:quiet]

        # decrement redirect limit
        redirect_limit = redirect_limit - 1

        raise RedirectError, "Redirect Limit reached" if redirect_limit == 0
        
        # get the content from redirect location
        content = get(uri, options.merge(:redirect_limit => redirect_limit, :cookies => cookies))

        content[:redirect_through] = [uri.to_s] if content[:redirect_through].nil?
        content[:redirect_through].insert(0, url)
        content[:url] = content[:redirect_through].last

        content[:response_time] = Time.now.to_f - request_time
      else
        content[:response_time] = Time.now.to_f - request_time

        puts "Retrieved." unless @options[:quiet]

        # create the content container
        content[:url] = uri.to_s
        content[:status_code] = response.code.to_i
        content[:cookies] = cookies
        content[:mime_type] = ""
        content[:mime_type] = response.content_type.split(";")[0].strip unless response.content_type.nil?
        if !response["Content-Type"].nil? && response["Content-Type"].include?(";")
          charset = response["Content-Type"][response["Content-Type"].index(";")+2..-1] if !response["Content-Type"].nil? and response["Content-Type"].include?(";")
          charset = charset[charset.index("=")+1..-1] if charset and charset.include?("=")
          content[:character_set] = charset
        end
        content[:length] = response.content_length
        content[:text_content] = text_content?(content[:mime_type])
        if text_content?(content[:mime_type])
          if response["Content-Encoding"]=="gzip"
            content[:body] = Zlib::GzipReader.new(StringIO.new(response.body)).read
          else
            content[:body] = response.body
          end
        else
          content[:body] = Base64.encode64(response.body)
        end
        content[:location] = response["location"]
        content[:headers] = HashUtil.deep_symbolize_keys(response.to_hash)
        # parse data for links
        link_parser = ContentLinkParser.new(content[:url], content[:body])
        content[:links] = link_parser.link_data

      end
      # add content to cache if required
      if @options[:cache]
        if @options[:cache_type] == :crawl_based
          redis.set(unique_id, Marshal.dump(content))
          redis.expire unique_id, @options[:cache].to_i
        else
          full_redis.set(unique_id, Marshal.dump(content))
          full_redis.expire unique_id, @options[:cache].to_i
        end
      end
    rescue RedirectError => e
      if @options[:raise_exceptions]
        puts "Re-Raising error #{e.message} on #{uri.to_s}"
        raise e
      end
      puts "ERROR RedirectError: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/dnslookup"
      content[:headers] = {}
      content[:links] = {}

    rescue SocketError => e
      raise e if @options[:raise_exceptions]
      puts "ERROR SocketError: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/dnslookup"
      content[:headers] = {}
      content[:links] = {}

    rescue Timeout::Error => e
      raise e if @options[:raise_exceptions]
      puts "ERROR Timeout::Error: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/serverdown"
      content[:headers] = {}
      content[:links] = {}
    end
    content
  end
end

#get_cookies(response) ⇒ Object

Returns array of cookies from content



128
129
130
131
132
133
134
135
136
137
# File 'lib/cobweb.rb', line 128

def get_cookies(response)
  all_cookies = response.get_fields('set-cookie')
  unless all_cookies.nil?
    cookies_array = Array.new
    all_cookies.each { |cookie|
      cookies_array.push(cookie.split('; ')[0])
    }
    cookies = cookies_array.join('; ')
  end
end

#head(url, options = @options) ⇒ Object

Performs a HTTP HEAD request to the specified url applying the options supplied



327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
# File 'lib/cobweb.rb', line 327

def head(url, options = @options)
  raise "url cannot be nil" if url.nil?
  uri = Addressable::URI.parse(url)
  uri.normalize!
  uri.fragment=nil
  url = uri.to_s

  # get the unique id for this request
  unique_id = Digest::SHA1.hexdigest(url)
  if options.has_key?(:redirect_limit) and !options[:redirect_limit].nil?
    redirect_limit = options[:redirect_limit].to_i
  else
    redirect_limit = 10
  end

  # connect to redis
  if options.has_key? :crawl_id
    redis = Redis::Namespace.new("cobweb-#{Cobweb.version}-#{options[:crawl_id]}", :redis => RedisConnection.new(@options[:redis_options]))
  else
    redis = Redis::Namespace.new("cobweb-#{Cobweb.version}", :redis => RedisConnection.new(@options[:redis_options]))
  end

  content = {:base_url => url}

  # check if it has already been cached
  if @options[:cache] && redis.get("head-#{unique_id}")
    puts "Cache hit for #{url}" unless @options[:quiet]
    content = HashUtil.deep_symbolize_keys(Marshal.load(redis.get("head-#{unique_id}")))
  else
    # retrieve data
    unless @http && @http.address == uri.host && @http.port == uri.inferred_port
      puts "Creating connection to #{uri.host}..." unless @options[:quiet]
      @http = Net::HTTP.new(uri.host, uri.inferred_port, @options[:proxy_addr], @options[:proxy_port])
    end
    if uri.scheme == "https"
      @http.use_ssl = true
      @http.verify_mode = OpenSSL::SSL::VERIFY_NONE
    end

    request_time = Time.now.to_f
    @http.read_timeout = @options[:timeout].to_i
    @http.open_timeout = @options[:timeout].to_i
    begin
      print "Retrieving #{url }... " unless @options[:quiet]
      request_options={}
      if options[:cookies]
        request_options[ 'Cookie']= options[:cookies]
      end
      request = Net::HTTP::Head.new uri.request_uri, request_options
      # authentication
      if @options[:authentication] == "basic"
        raise ":username and :password are required if using basic authentication" unless @options[:username] && @options[:password]
        request.basic_auth @options[:username], @options[:password]
      end

      response = @http.request request

      if @options[:follow_redirects] and response.code.to_i >= 300 and response.code.to_i < 400
        puts "redirected... " unless @options[:quiet]

        uri = UriHelper.join_no_fragment(uri, response['location'])

        redirect_limit = redirect_limit - 1

        raise RedirectError, "Redirect Limit reached" if redirect_limit == 0
        cookies = get_cookies(response)

        content = head(uri, options.merge(:redirect_limit => redirect_limit, :cookies => cookies))
        content[:url] = uri.to_s
        content[:redirect_through] = [] if content[:redirect_through].nil?
        content[:redirect_through].insert(0, url)
      else
        content[:url] = uri.to_s
        content[:status_code] = response.code.to_i
        unless response.content_type.nil?
          content[:mime_type] = response.content_type.split(";")[0].strip
          if response["Content-Type"].include? ";"
            charset = response["Content-Type"][response["Content-Type"].index(";")+2..-1] if !response["Content-Type"].nil? and response["Content-Type"].include?(";")
            charset = charset[charset.index("=")+1..-1] if charset and charset.include?("=")
            content[:character_set] = charset
          end
        end

        # add content to cache if required
        if @options[:cache]
          puts "Stored in cache [head-#{unique_id}]" if @options[:debug]
          redis.set("head-#{unique_id}", Marshal.dump(content))
          redis.expire "head-#{unique_id}", @options[:cache].to_i
        else
          puts "Not storing in cache as cache disabled" if @options[:debug]
        end
      end
    rescue RedirectError => e
      raise e if @options[:raise_exceptions]
      puts "ERROR RedirectError: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/dnslookup"
      content[:headers] = {}
      content[:links] = {}

    rescue SocketError => e
      raise e if @options[:raise_exceptions]
      puts "ERROR SocketError: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/dnslookup"
      content[:headers] = {}
      content[:links] = {}

    rescue Timeout::Error => e
      raise e if @options[:raise_exceptions]
      puts "ERROR Timeout::Error: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/serverdown"
      content[:headers] = {}
      content[:links] = {}
    end

    content
  end

end

#start(base_url) ⇒ Object

This method starts the resque based crawl and enqueues the base_url



70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
# File 'lib/cobweb.rb', line 70

def start(base_url)
  raise ":base_url is required" unless base_url
  request = {
    :crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"),
    :url => base_url
  }

  if @options[:internal_urls].nil? || @options[:internal_urls].empty?
    uri = Addressable::URI.parse(base_url)
    @options[:internal_urls] = []

    if @options[:treat_https_as_http_to]
      @options[:internal_urls] << ["http://", uri.host, "/*"].join
      @options[:internal_urls] << ["http://", uri.host, ":", uri.inferred_port, "/*"].join
      @options[:internal_urls] << ["https://", uri.host, "/*"].join
      @options[:internal_urls] << ["https://", uri.host, ":", uri.inferred_port, "/*"].join
    else
      @options[:internal_urls] << [uri.scheme, "://", uri.host, "/*"].join
      @options[:internal_urls] << [uri.scheme, "://", uri.host, ":", uri.inferred_port, "/*"].join
    end

  end

  request.merge!(@options)
  @redis = Redis::Namespace.new("cobweb-#{Cobweb.version}-#{request[:crawl_id]}", :redis => RedisConnection.new(request[:redis_options]))
  @redis.set("original_base_url", base_url)
  @redis.hset "statistics", "queued_at", DateTime.now
  @redis.set("crawl-counter", 0)
  queue_counter = @options[:seed_urls].count + 1
  puts "queue_counter being init to #{queue_counter}"
  @redis.set("queue-counter", queue_counter)


  @options[:seed_urls].map{|link| @redis.sadd "queued", link }

  @stats = Stats.new(request)
  @stats.start_crawl(request)

  # add internal_urls into redis
  @options[:internal_urls].map{|url| @redis.sadd("internal_urls", url)}
  if @options[:queue_system] == :resque
    Resque.enqueue(CrawlJob, request)
  elsif @options[:queue_system] == :sidekiq
    CrawlWorker.perform_async(request)
    @options[:seed_urls].map{|url| 
      new_request = request.clone
      new_request[:url] = url
      CrawlWorker.perform_async(new_request)
    }

  else
    raise "Unknown queue system: #{content_request[:queue_system]}"
  end

  request
end