Class: HTML::Proofer
- Inherits:
-
Object
- Object
- HTML::Proofer
- Includes:
- Yell::Loggable
- Defined in:
- lib/html/proofer.rb,
lib/html/proofer/checks.rb,
lib/html/proofer/checkable.rb
Defined Under Namespace
Class Method Summary collapse
Instance Method Summary collapse
- #add_failed_tests(filenames, desc, status = nil) ⇒ Object
-
#external_link_checker(external_urls) ⇒ Object
the hypothesis is that Proofer runs way faster if we pull out all the external URLs and run the checks at the end.
- #failed_tests ⇒ Object
- #files ⇒ Object
- #get_checks ⇒ Object
- #hydra ⇒ Object
-
#initialize(src, opts = {}) ⇒ Proofer
constructor
A new instance of Proofer.
- #log_level ⇒ Object
- #queue_request(method, href, filenames) ⇒ Object
- #response_handler(response, filenames) ⇒ Object
- #run ⇒ Object
Constructor Details
#initialize(src, opts = {}) ⇒ Proofer
Returns a new instance of Proofer.
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
# File 'lib/html/proofer.rb', line 27 def initialize(src, opts={}) @src = src @proofer_opts = { :ext => ".html", :favicon => false, :href_swap => [], :href_ignore => [], :alt_ignore => [], :disable_external => false, :verbose => false, :only_4xx => false, :directory_index_file => "index.html" } @typhoeus_opts = { :followlocation => true } # Typhoeus won't let you pass in any non-Typhoeus option; if the option is not # a proofer_opt, it must be for Typhoeus opts.keys.each do |key| if @proofer_opts[key].nil? @typhoeus_opts[key] = opts[key] end end = @proofer_opts.merge(@typhoeus_opts).merge(opts) @failed_tests = [] Yell.new({ :format => false, :name => "HTML::Proofer", :level => "gte.#{log_level}" }) do |l| l.adapter :stdout, level: [:debug, :info, :warn] l.adapter :stderr, level: [:error, :fatal] end end |
Class Method Details
.create_nokogiri(path) ⇒ Object
172 173 174 175 |
# File 'lib/html/proofer.rb', line 172 def self.create_nokogiri(path) content = File.open(path).read Nokogiri::HTML(content) end |
Instance Method Details
#add_failed_tests(filenames, desc, status = nil) ⇒ Object
187 188 189 190 191 192 193 194 195 |
# File 'lib/html/proofer.rb', line 187 def add_failed_tests(filenames, desc, status = nil) if filenames.nil? @failed_tests << Checks::Issue.new("", desc, status) elsif filenames.each { |f| @failed_tests << Checks::Issue.new(f, desc, status) } end end |
#external_link_checker(external_urls) ⇒ Object
the hypothesis is that Proofer runs way faster if we pull out all the external URLs and run the checks at the end. Otherwise, we’re halting the consuming process for every file. In addition, sorting the list lets libcurl keep connections to hosts alive. Finally, we’ll make a HEAD request, rather than GETing all the contents
108 109 110 111 112 113 114 115 116 117 118 119 120 |
# File 'lib/html/proofer.rb', line 108 def external_link_checker(external_urls) external_urls = Hash[external_urls.sort] logger.info HTML::colorize :yellow, "Checking #{external_urls.length} external links..." Ethon.logger = logger # log from Typhoeus/Ethon external_urls.each_pair do |href, filenames| queue_request(:head, href, filenames) end logger.debug HTML::colorize :yellow, "Running requests for all #{hydra.queued_requests.size} external URLs..." hydra.run end |
#failed_tests ⇒ Object
197 198 199 200 201 202 |
# File 'lib/html/proofer.rb', line 197 def failed_tests return [] if @failed_tests.empty? result = [] @failed_tests.each { |f| result << f.to_s } result end |
#files ⇒ Object
161 162 163 164 165 166 167 168 169 170 |
# File 'lib/html/proofer.rb', line 161 def files if File.directory? @src pattern = File.join @src, "**", "*#{@options[:ext]}" Dir.glob(pattern).select { |fn| File.file? fn } elsif File.extname(@src) == [:ext] [@src] else [] end end |
#get_checks ⇒ Object
177 178 179 180 181 |
# File 'lib/html/proofer.rb', line 177 def get_checks checks = HTML::Proofer::Checks::Check.subclasses.map { |c| c.name } checks.delete("Favicons") unless [:favicon] checks end |
#hydra ⇒ Object
157 158 159 |
# File 'lib/html/proofer.rb', line 157 def hydra @hydra ||= Typhoeus::Hydra.new end |
#log_level ⇒ Object
183 184 185 |
# File 'lib/html/proofer.rb', line 183 def log_level [:verbose] ? :debug : :info end |
#queue_request(method, href, filenames) ⇒ Object
122 123 124 125 126 |
# File 'lib/html/proofer.rb', line 122 def queue_request(method, href, filenames) request = Typhoeus::Request.new(href, @typhoeus_opts.merge({:method => method})) request.on_complete { |response| response_handler(response, filenames) } hydra.queue request end |
#response_handler(response, filenames) ⇒ Object
128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 |
# File 'lib/html/proofer.rb', line 128 def response_handler(response, filenames) href = response.[:effective_url] method = response.request.[:method] response_code = response.code debug_msg = "Received a #{response_code} for #{href}" debug_msg << " in #{filenames.join(' ')}" unless filenames.nil? logger.debug debug_msg if response_code.between?(200, 299) # continue with no op elsif response.timed_out? return if [:only_4xx] add_failed_tests filenames, "External link #{href} failed: got a time out", response_code elsif (response_code == 405 || response_code == 420 || response_code == 503) && method == :head # 420s usually come from rate limiting; let's ignore the query and try just the path with a GET uri = URI(href) queue_request(:get, uri.scheme + "://" + uri.host + uri.path, filenames) # just be lazy; perform an explicit get request. some servers are apparently not configured to # intercept HTTP HEAD elsif method == :head queue_request(:get, href, filenames) else return if [:only_4xx] && !response_code.between?(400, 499) # Received a non-successful http response. add_failed_tests filenames, "External link #{href} failed: #{response_code} #{response.return_message}", response_code end end |
#run ⇒ Object
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
# File 'lib/html/proofer.rb', line 64 def run unless @src.is_a? Array total_files = 0 external_urls = {} logger.info HTML::colorize :white, "Running #{get_checks} checks on #{@src} on *#{@options[:ext]}... \n\n" files.each do |path| total_files += 1 html = HTML::Proofer.create_nokogiri(path) get_checks.each do |klass| logger.debug HTML::colorize :blue, "Checking #{klass.to_s.downcase} on #{path} ..." check = Object.const_get(klass).new(@src, path, html, ) check.run external_urls.merge!(check.external_urls) @failed_tests.concat(check.issues) if check.issues.length > 0 end end external_link_checker(external_urls) unless [:disable_external] logger.info HTML::colorize :green, "Ran on #{total_files} files!\n\n" else external_urls = Hash[*@src.map{ |s| [s, nil] }.flatten] external_link_checker(external_urls) unless [:disable_external] end if @failed_tests.empty? logger.info HTML::colorize :green, "HTML-Proofer finished successfully." else @failed_tests.sort_by(&:path).each do |issue| logger.error HTML::colorize :red, issue.to_s end raise HTML::colorize :red, "HTML-Proofer found #{@failed_tests.length} failures!" end end |