Method: Cobweb#start

Defined in:
lib/cobweb.rb

#start(base_url) ⇒ Object

This method starts the resque based crawl and enqueues the base_url



70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
# File 'lib/cobweb.rb', line 70

def start(base_url)
  raise ":base_url is required" unless base_url
  request = {
    :crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"),
    :url => base_url
  }

  if @options[:internal_urls].nil? || @options[:internal_urls].empty?
    uri = Addressable::URI.parse(base_url)
    @options[:internal_urls] = []
    @options[:internal_urls] << [uri.scheme, "://", uri.host, "/*"].join
    @options[:internal_urls] << [uri.scheme, "://", uri.host, ":", uri.inferred_port, "/*"].join
  end

  request.merge!(@options)
  @redis = Redis::Namespace.new("cobweb-#{Cobweb.version}-#{request[:crawl_id]}", :redis => RedisConnection.new(request[:redis_options]))
  @redis.set("original_base_url", base_url)
  @redis.hset "statistics", "queued_at", DateTime.now
  @redis.set("crawl-counter", 0)
  @redis.set("queue-counter", 1)

  @options[:seed_urls].map{|link| @redis.sadd "queued", link }

  @stats = Stats.new(request)
  @stats.start_crawl(request)

  # add internal_urls into redis
  @options[:internal_urls].map{|url| @redis.sadd("internal_urls", url)}
  if @options[:queue_system] == :resque
    Resque.enqueue(CrawlJob, request)
  elsif @options[:queue_system] == :sidekiq
    CrawlWorker.perform_async(request)
  else
    raise "Unknown queue system: #{content_request[:queue_system]}"
  end

  request
end