Class: AsyncFeedbag
- Inherits:
-
Object
- Object
- AsyncFeedbag
- Defined in:
- lib/async-feedbag.rb
Constant Summary collapse
- CONTENT_TYPES =
[ "application/x.atom+xml", "application/atom+xml", "application/xml", "text/xml", "application/rss+xml", "application/rdf+xml", "application/json", "application/feed+json" ].freeze
- FEED_SCHEME_RE =
%r{^feed://}- RedirectionError =
Class.new(StandardError)
- XML_RE =
/.xml$/- SERVICE_FEED_XPATH =
"//link[@rel='alternate' or @rel='service.feed'][@href][@type]"- JSON_FEED_XPATH =
"//link[@rel='alternate' and @type='application/json'][@href]"- FEED_RE =
%r{(\.(rdf|xml|rss)(\?([\w'\-%]?(=[\w'\-%.]*)?(&|#|\+|;)?)+)?(:[\w'\-%]+)?$|feed=(rss|atom)|(atom|feed)/?$)}i
Class Method Summary collapse
Instance Method Summary collapse
- #add_feed(feed_url, orig_url, base_uri = nil) ⇒ Object
- #feed?(url) ⇒ Boolean
- #find(url, _options = {}) ⇒ Object
-
#initialize(options: nil) ⇒ AsyncFeedbag
constructor
A new instance of AsyncFeedbag.
- #looks_like_feed?(url) ⇒ Boolean
Constructor Details
#initialize(options: nil) ⇒ AsyncFeedbag
Returns a new instance of AsyncFeedbag.
46 47 48 49 50 |
# File 'lib/async-feedbag.rb', line 46 def initialize(options: nil) @feeds = [] @options = || {} @options["User-Agent"] ||= "AsyncFeedbag/#{VERSION}" end |
Class Method Details
.feed?(url) ⇒ Object
34 35 36 |
# File 'lib/async-feedbag.rb', line 34 def feed?(url) new.feed?(url) end |
.find(url, options = {}) ⇒ Object
41 42 43 |
# File 'lib/async-feedbag.rb', line 41 def find(url, = {}) new(options: ).find(url, **) end |
Instance Method Details
#add_feed(feed_url, orig_url, base_uri = nil) ⇒ Object
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
# File 'lib/async-feedbag.rb', line 165 def add_feed(feed_url, orig_url, base_uri = nil) url = feed_url.sub(/^feed:/, "").strip if base_uri url = URI.parse(base_uri).merge(feed_url).to_s end begin uri = URI.parse(url) rescue puts "Error with `#{url}'" exit 1 end unless uri.absolute? orig = URI.parse(orig_url) url = orig.merge(url).to_s end # verify url is really valid @feeds.push(url) unless @feeds.include?(url) end |
#feed?(url) ⇒ Boolean
53 54 55 56 57 58 59 60 61 62 63 64 |
# File 'lib/async-feedbag.rb', line 53 def feed?(url) # use LWR::Simple.normalize some time url_uri = URI.parse(url) url = "#{url_uri.scheme or "http"}://#{url_uri.host}#{url_uri.path}" url << "?#{url_uri.query}" if url_uri.query # hack: url.sub!(FEED_SCHEME_RE, "http://") res = AsyncFeedbag.find(url) (res.size == 1) && (res.first == url) end |
#find(url, _options = {}) ⇒ Object
71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
# File 'lib/async-feedbag.rb', line 71 def find(url, = {}) url_uri = URI.parse(url) url = nil if url_uri.scheme.nil? url = "http://#{url_uri}" elsif url_uri.scheme == "feed" return add_feed(url_uri.to_s.sub(FEED_SCHEME_RE, "http://"), nil) else url = url_uri.to_s end # check if feed_valid is avail begin require "feed_validator" v = W3C::FeedValidator.new v.validate_url(url) return add_feed(url, nil) if v.valid? rescue LoadError # scoo rescue REXML::ParseException # usually indicates timeout # TODO: actually find out timeout. use Terminator? # $stderr.puts "Feed looked like feed but might not have passed validation or timed out" rescue => e warn "#{e.class} error occurred with: `#{url}': #{e.}" end retries = 2 begin headers = @options.slice("User-Agent") Sync do response = AsyncInternetWithRedirect.get(url, headers) if response.redirection? original_uri = URI.parse(url) uri = URI.parse(response.headers["location"]) if uri.host == original_uri.host url = response.headers["location"] raise RedirectionError end end content_type = response.headers["content-type"].gsub(/;.*$/, "").downcase next add_feed(url, nil) if CONTENT_TYPES.include?(content_type) doc = Nokogiri::HTML(response.read) @base_uri = (doc.at("base")["href"] if doc.at("base") && doc.at("base")["href"]) # first with links (doc / "atom:link").each do |l| next unless l["rel"] && l["href"].present? add_feed(l["href"], url, @base_uri) if l["type"] && CONTENT_TYPES.include?(l["type"].downcase.strip) && (l["rel"].downcase == "self") end doc.xpath(SERVICE_FEED_XPATH).each do |l| add_feed(l["href"], url, @base_uri) if CONTENT_TYPES.include?(l["type"].downcase.strip) end doc.xpath(JSON_FEED_XPATH).each do |e| add_feed(e["href"], url, @base_uri) if looks_like_feed?(e["href"]) end (doc / "a").each do |a| next unless a["href"] add_feed(a["href"], url, @base_uri) if looks_like_feed?(a["href"]) && (a["href"].include?("/") || a["href"] =~ /#{url_uri.host}/) next unless a["href"] add_feed(a["href"], url, @base_uri) if looks_like_feed?(a["href"]) end # Added support for feeds like http://tabtimes.com/tbfeed/mashable/full.xml add_feed(url, nil) if url.match(XML_RE) && doc.root && doc.root["xml:base"] && (doc.root["xml:base"].strip == url.strip) ensure response&.close end rescue RedirectionError retries -= 1 retry if retries >= 0 rescue Timeout::Error => e warn "Timeout error occurred with `#{url}: #{e}'" rescue => e warn "#{e.class} error occurred with: `#{url}': #{e.}" end return @feeds end |
#looks_like_feed?(url) ⇒ Boolean
161 162 163 |
# File 'lib/async-feedbag.rb', line 161 def looks_like_feed?(url) FEED_RE.match?(url) end |