Class: Mechanize::HTTP::Agent
- Inherits:
-
Object
- Object
- Mechanize::HTTP::Agent
- Defined in:
- lib/monkey-patches/mechanize-patches.rb
Overview
This patch prevents Mechanize from raising a Mechanize::ResponseCodeError when the HTTP Response Code is 503 or 404. This lets capybara continue the journey.
Instance Method Summary collapse
Instance Method Details
#fetch(uri, method = :get, headers = {}, params = [], referer = current_page, redirects = 0) ⇒ Object
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 |
# File 'lib/monkey-patches/mechanize-patches.rb', line 63 def fetch uri, method = :get, headers = {}, params = [], referer = current_page, redirects = 0 referer_uri = referer ? referer.uri : nil uri = resolve uri, referer uri, params = resolve_parameters uri, method, params request = http_request uri, method, params connection = connection_for uri request_auth request, uri disable_keep_alive request enable_gzip request request_language_charset request request, uri request_host request, uri request_referer request, uri, referer_uri request_user_agent request request_add_headers request, headers pre_connect request # Consult robots.txt if robots && uri.is_a?(URI::HTTP) robots_allowed?(uri) or raise Mechanize::RobotsDisallowedError.new(uri) end # Add If-Modified-Since if page is in history page = visited_page(uri) if (page = visited_page(uri)) and page.response['Last-Modified'] request['If-Modified-Since'] = page.response['Last-Modified'] end if(@conditional_requests) # Specify timeouts if given connection.open_timeout = @open_timeout if @open_timeout connection.read_timeout = @read_timeout if @read_timeout request_log request response_body_io = nil # Send the request begin response = connection.request(uri, request) { |res| response_log res response_body_io = response_read res, request, uri res } rescue Mechanize::ChunkedTerminationError => e raise unless @ignore_bad_chunking response = e.response response_body_io = e.body_io end hook_content_encoding response, uri, response_body_io response_body_io = response_content_encoding response, response_body_io if request.response_body_permitted? post_connect uri, response, response_body_io page = response_parse response, response_body_io, uri response, uri, page = response, uri, page, redirects return if case response when Net::HTTPSuccess if robots && page.is_a?(Mechanize::Page) page.parser.noindex? and raise Mechanize::RobotsDisallowedError.new(uri) end page when Mechanize::FileResponse page when Net::HTTPNotModified log.debug("Got cached page") if log visited_page(uri) || page when Net::HTTPRedirection response_redirect response, method, page, redirects, headers, referer when Net:: response_authenticate(response, page, uri, request, headers, params, referer) else # BEGIN PATCH if page.code == "503" or page.code == "404" page else raise Mechanize::ResponseCodeError.new(page, 'unhandled response') end # END PATCH end end |