Class: DeadFinder::Runner
- Inherits:
-
Object
- Object
- DeadFinder::Runner
- Defined in:
- lib/deadfinder.rb
Instance Method Summary collapse
- #default_options ⇒ Object
- #run(target, options) ⇒ Object
- #worker(_id, jobs, results, target, options) ⇒ Object
Instance Method Details
#default_options ⇒ Object
31 32 33 34 35 36 37 38 39 40 41 42 43 |
# File 'lib/deadfinder.rb', line 31 def { 'concurrency' => 50, 'timeout' => 10, 'output' => '', 'output_format' => 'json', 'headers' => [], 'worker_headers' => [], 'silent' => true, 'verbose' => false, 'include30x' => false } end |
#run(target, options) ⇒ Object
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
# File 'lib/deadfinder.rb', line 45 def run(target, ) Logger.set_silent if ['silent'] headers = ['headers'].each_with_object({}) do |header, hash| kv = header.split(': ') hash[kv[0]] = kv[1] rescue StandardError end page = Nokogiri::HTML(URI.open(target, headers)) links = extract_links(page) total_links_count = links.values.flatten.length link_info = links.map { |type, urls| "#{type}:#{urls.length}" if urls.length.positive? } .compact.join(' / ') Logger.sub_info "Found #{total_links_count} URLs. [#{link_info}]" unless link_info.empty? Logger.sub_info 'Checking' jobs = Channel.new(buffer: :buffered, capacity: 1000) results = Channel.new(buffer: :buffered, capacity: 1000) (1..['concurrency']).each do |w| Channel.go { worker(w, jobs, results, target, ) } end links.values.flatten.uniq.each do |node| result = generate_url(node, target) jobs << result unless result.nil? end jobs_size = jobs.size jobs.close (1..jobs_size).each { ~results } Logger.sub_done 'Done' rescue StandardError => e Logger.error "[#{e}] #{target}" end |
#worker(_id, jobs, results, target, options) ⇒ Object
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
# File 'lib/deadfinder.rb', line 82 def worker(_id, jobs, results, target, ) jobs.each do |j| if CACHE_SET[j] Logger.found "[404 Not Found] #{j}" unless CACHE_QUE[j] else CACHE_SET[j] = true begin CACHE_QUE[j] = true uri = URI.parse(j) proxy_uri = URI.parse(['proxy']) if ['proxy'] && !['proxy'].empty? http = if proxy_uri Net::HTTP.new(uri.host, uri.port, proxy_uri.host, proxy_uri.port, proxy_uri.user, proxy_uri.password) else Net::HTTP.new(uri.host, uri.port) end http.use_ssl = (uri.scheme == 'https') http.read_timeout = ['timeout'].to_i if ['timeout'] http.verify_mode = OpenSSL::SSL::VERIFY_NONE if http.use_ssl? request = Net::HTTP::Get.new(uri.request_uri) request['User-Agent'] = ['user_agent'] ['worker_headers']&.each do |header| key, value = header.split(':', 2) request[key.strip] = value.strip end response = http.request(request) status_code = response.code.to_i Logger.verbose "Status Code: #{status_code} for #{j}" if ['verbose'] if status_code >= 400 || (status_code >= 300 && ['include30x']) Logger.found "[#{status_code} #{response.message}] #{j}" CACHE_QUE[j] = false DeadFinder.output[target] ||= [] DeadFinder.output[target] << j end rescue StandardError => e Logger.verbose "[#{e}] #{j}" if ['verbose'] end end results << j end end |