Method: Cobweb#get

Defined in:
lib/cobweb.rb

#get(url, options = @options) ⇒ Object

Performs a HTTP GET request to the specified url applying the options supplied



140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
# File 'lib/cobweb.rb', line 140

def get(url, options = @options)
  raise "url cannot be nil" if url.nil?
  uri = Addressable::URI.parse(url)
  uri.normalize!
  uri.fragment=nil
  url = uri.to_s

  # get the unique id for this request
  unique_id = Digest::SHA1.hexdigest(url.to_s)
  if options.has_key?(:redirect_limit) and !options[:redirect_limit].nil?
    redirect_limit = options[:redirect_limit].to_i
  else
    redirect_limit = 10
  end

  # connect to redis
  if options.has_key? :crawl_id
    redis = Redis::Namespace.new("cobweb-#{Cobweb.version}-#{options[:crawl_id]}", :redis => RedisConnection.new(@options[:redis_options]))
  else
    redis = Redis::Namespace.new("cobweb-#{Cobweb.version}", :redis => RedisConnection.new(@options[:redis_options]))
  end
  full_redis = Redis::Namespace.new("cobweb-#{Cobweb.version}", :redis => RedisConnection.new(@options[:redis_options]))

  content = {:base_url => url}

  # check if it has already been cached
  if @options[:cache] && ((@options[:cache_type] == :crawl_based && redis.get(unique_id)) || (@options[:cache_type] == :full && full_redis.get(unique_id)))
    if @options[:cache_type] == :crawl_based
      puts "Cache hit in crawl for #{url}" unless @options[:quiet]
      content = HashUtil.deep_symbolize_keys(Marshal.load(redis.get(unique_id)))
    else
      puts "Cache hit for #{url}" unless @options[:quiet]
      content = HashUtil.deep_symbolize_keys(Marshal.load(full_redis.get(unique_id)))
    end
  else
    # retrieve data
    #unless @http && @http.address == uri.host && @http.port == uri.inferred_port
      puts "Creating connection to #{uri.host}..." if @options[:debug]
      @http = Net::HTTP.new(uri.host, uri.inferred_port, @options[:proxy_addr], @options[:proxy_port])
    #end
    if uri.scheme == "https"
      @http.use_ssl = true
      @http.verify_mode = OpenSSL::SSL::VERIFY_NONE
    end

    request_time = Time.now.to_f
    @http.read_timeout = @options[:timeout].to_i
    @http.open_timeout = @options[:timeout].to_i
    begin
      puts "Retrieving #{uri}... " unless @options[:quiet]
      request_options={}
      request_options['Cookie']= options[:cookies].map{|k,v| [k,v].join("=") }.join("&") if options[:cookies]
      request_options['User-Agent']= options[:user_agent] if options.has_key?(:user_agent)

      request = Net::HTTP::Get.new uri.request_uri, request_options
      # authentication
      if @options[:authentication] == "basic"
        raise ":username and :password are required if using basic authentication" unless @options[:username] && @options[:password]
        request.basic_auth @options[:username], @options[:password]
      end
      if @options[:range]
        request.set_range(@options[:range])
      end

      response = @http.request request

      cookies = Hash[get_cookies(response).to_s.split("; ").map{|s| [CGI.unescape(s.split("=")[0]), s.split("=")[1]]}].merge(options[:cookies] || {})

      if @options[:follow_redirects] and response.code.to_i >= 300 and response.code.to_i < 400

        # get location to redirect to
        uri = UriHelper.join_no_fragment(uri, response['location'])
        puts "Following Redirect to #{uri}... " unless @options[:quiet]

        # decrement redirect limit
        redirect_limit = redirect_limit - 1

        raise RedirectError, "Redirect Limit reached" if redirect_limit == 0
        
        # get the content from redirect location
        content = get(uri, options.merge(:redirect_limit => redirect_limit, :cookies => cookies))

        content[:redirect_through] = [uri.to_s] if content[:redirect_through].nil?
        content[:redirect_through].insert(0, url)
        content[:url] = content[:redirect_through].last

        content[:response_time] = Time.now.to_f - request_time
      else
        content[:response_time] = Time.now.to_f - request_time

        puts "Retrieved." unless @options[:quiet]

        # create the content container
        content[:url] = uri.to_s
        content[:status_code] = response.code.to_i
        content[:cookies] = cookies
        content[:mime_type] = ""
        content[:mime_type] = response.content_type.split(";")[0].strip unless response.content_type.nil?
        if !response["Content-Type"].nil? && response["Content-Type"].include?(";")
          charset = response["Content-Type"][response["Content-Type"].index(";")+2..-1] if !response["Content-Type"].nil? and response["Content-Type"].include?(";")
          charset = charset[charset.index("=")+1..-1] if charset and charset.include?("=")
          content[:character_set] = charset
        end
        content[:length] = response.content_length
        content[:text_content] = text_content?(content[:mime_type])
        if text_content?(content[:mime_type])
          if response["Content-Encoding"]=="gzip"
            content[:body] = Zlib::GzipReader.new(StringIO.new(response.body)).read
          else
            content[:body] = response.body
          end
        else
          content[:body] = Base64.encode64(response.body)
        end
        content[:location] = response["location"]
        content[:headers] = HashUtil.deep_symbolize_keys(response.to_hash)
        # parse data for links
        link_parser = ContentLinkParser.new(content[:url], content[:body])
        content[:links] = link_parser.link_data

      end
      # add content to cache if required
      if @options[:cache]
        if @options[:cache_type] == :crawl_based
          redis.set(unique_id, Marshal.dump(content))
          redis.expire unique_id, @options[:cache].to_i
        else
          full_redis.set(unique_id, Marshal.dump(content))
          full_redis.expire unique_id, @options[:cache].to_i
        end
      end
    rescue RedirectError => e
      if @options[:raise_exceptions]
        puts "Re-Raising error #{e.message} on #{uri.to_s}"
        raise e
      end
      puts "ERROR RedirectError: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/dnslookup"
      content[:headers] = {}
      content[:links] = {}

    rescue SocketError => e
      raise e if @options[:raise_exceptions]
      puts "ERROR SocketError: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/dnslookup"
      content[:headers] = {}
      content[:links] = {}

    rescue Timeout::Error => e
      raise e if @options[:raise_exceptions]
      puts "ERROR Timeout::Error: #{e.message}"

      ## generate a blank content
      content = {}
      content[:url] = uri.to_s
      content[:response_time] = Time.now.to_f - request_time
      content[:status_code] = 0
      content[:length] = 0
      content[:body] = ""
      content[:error] = e.message
      content[:mime_type] = "error/serverdown"
      content[:headers] = {}
      content[:links] = {}
    end
    content
  end
end