Class: Feedbag

Inherits:
Object
  • Object
show all
Defined in:
lib/feedbag.rb

Constant Summary collapse

VERSION =
'1.0.1'
CONTENT_TYPES =
[
  'application/x.atom+xml',
  'application/atom+xml',
  'application/xml',
  'text/xml',
  'application/rss+xml',
  'application/rdf+xml',
  'application/json',
  'application/feed+json'
].freeze

Class Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options: nil) ⇒ Feedbag

Returns a new instance of Feedbag.



63
64
65
66
67
# File 'lib/feedbag.rb', line 63

def initialize(options: nil)
  @feeds = []
  @options = options || {}
  @options["User-Agent"] ||= "Feedbag/#{VERSION}"
end

Class Attribute Details

.loggerObject



32
33
34
# File 'lib/feedbag.rb', line 32

def logger
  @logger ||= default_logger
end

Class Method Details

.feed?(url) ⇒ Boolean

Returns:

  • (Boolean)


55
56
57
# File 'lib/feedbag.rb', line 55

def self.feed?(url)
  new.feed?(url)
end

.find(url, options = {}) ⇒ Object



59
60
61
# File 'lib/feedbag.rb', line 59

def self.find(url, options = {})
  new(options: options).find(url, options)
end

.normalize_url(url) ⇒ Object

Normalize a URL to handle non-ASCII characters (IRIs) This converts internationalized URLs to valid ASCII URIs



71
72
73
74
75
76
77
78
79
80
# File 'lib/feedbag.rb', line 71

def self.normalize_url(url)
  return url if url.nil? || url.empty?
  if defined?(Addressable::URI)
    Addressable::URI.parse(url).normalize.to_s
  else
    url
  end
rescue Addressable::URI::InvalidURIError
  url
end

Instance Method Details

#_is_http_valid(uri, orig_url) ⇒ Object

not used. yet.



237
238
239
240
241
242
243
244
245
246
# File 'lib/feedbag.rb', line 237

def _is_http_valid(uri, orig_url)
  req = Net::HTTP.get_response(uri)
  orig_uri = URI.parse(orig_url)
  case req
  when Net::HTTPSuccess then
    return true
  else
    return false
  end
end

#add_feed(feed_url, orig_url, base_uri = nil) ⇒ Object



207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
# File 'lib/feedbag.rb', line 207

def add_feed(feed_url, orig_url, base_uri = nil)
  # puts "#{feed_url} - #{orig_url}"
  url = feed_url.sub(/^feed:/, '').strip

  # Normalize URL to handle non-ASCII characters
  url = Feedbag.normalize_url(url)

  if base_uri
    #	url = base_uri + feed_url
    normalized_base = Feedbag.normalize_url(base_uri)
    url = URI.parse(normalized_base).merge(url).to_s
  end

  begin
    uri = URI.parse(url)
  rescue => ex
    Feedbag.logger.error "Error parsing URL `#{url}': #{ex.message}"
    return
  end
  unless uri.absolute?
    normalized_orig = Feedbag.normalize_url(orig_url)
    orig = URI.parse(normalized_orig)
    url = orig.merge(url).to_s
  end

  # verify url is really valid
  @feeds.push(url) unless @feeds.include?(url)# if self._is_http_valid(URI.parse(url), orig_url)
end

#feed?(url) ⇒ Boolean

Returns:

  • (Boolean)


82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# File 'lib/feedbag.rb', line 82

def feed?(url)
  # Normalize URL to handle non-ASCII characters
  normalized_url = Feedbag.normalize_url(url)
  url_uri = URI.parse(normalized_url)
  url = "#{url_uri.scheme or 'http'}://#{url_uri.host}#{url_uri.path}"
  url << "?#{url_uri.query}" if url_uri.query

    # hack:
    url.sub!(/^feed:\/\//, 'http://')

  res = Feedbag.find(url)
  if res.size == 1 and res.first == url
    return true
  else
    return false
  end
end

#find(url, options = {}) ⇒ Object



100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
# File 'lib/feedbag.rb', line 100

def find(url, options = {})
  # Normalize URL to handle non-ASCII characters
  normalized_url = Feedbag.normalize_url(url)
  url_uri = URI.parse(normalized_url)
  url = nil
  if url_uri.scheme.nil?
    url = "http://#{url_uri.to_s}"
  elsif url_uri.scheme == "feed"
    return self.add_feed(url_uri.to_s.sub(/^feed:\/\//, 'http://'), nil)
  else
    url = url_uri.to_s
  end
  #url = "#{url_uri.scheme or 'http'}://#{url_uri.host}#{url_uri.path}"

  # check if feed_valid is avail
  begin
    require "feed_validator"
    v = W3C::FeedValidator.new
    v.validate_url(url)
    return self.add_feed(url, nil) if v.valid?
  rescue LoadError
    # scoo
  rescue REXML::ParseException
    # usually indicates timeout
    # TODO: actually find out timeout. use Terminator?
    # $stderr.puts "Feed looked like feed but might not have passed validation or timed out"
  rescue => ex
    Feedbag.logger.error "#{ex.class} error occurred with: `#{url}': #{ex.message}"
  end

  begin
    html = URI.open(url, @options) do |f|
      content_type = f.content_type.downcase
      if content_type == "application/octet-stream" # open failed
        content_type = f.meta["content-type"].gsub(/;.*$/, '')
      end
      if CONTENT_TYPES.include?(content_type)
        return self.add_feed(url, nil)
      end

      doc = Nokogiri::HTML(f.read)

      if doc.at("base") and doc.at("base")["href"]
        @base_uri = doc.at("base")["href"]
      else
        @base_uri = nil
      end

      # first with links
      (doc/"atom:link").each do |l|
        next unless l["rel"] && l["href"].present?
        if l["type"] and CONTENT_TYPES.include?(l["type"].downcase.strip) and l["rel"].downcase == "self"
          self.add_feed(l["href"], url, @base_uri)
        end
      end

      doc.xpath("//link[@rel='alternate' or @rel='service.feed'][@href][@type]").each do |l|
        if CONTENT_TYPES.include?(l['type'].downcase.strip)
          self.add_feed(l["href"], url, @base_uri)
        end
      end

      doc.xpath("//link[@rel='alternate' and @type='application/json'][@href]").each do |e|
        self.add_feed(e['href'], url, @base_uri) if self.looks_like_feed?(e['href'])
      end

      (doc/"a").each do |a|
        next unless a["href"]
        if self.looks_like_feed?(a["href"]) and (a["href"] =~ /\// or a["href"] =~ /#{url_uri.host}/)
          self.add_feed(a["href"], url, @base_uri)
        end
      end

      (doc/"a").each do |a|
        next unless a["href"]
        if self.looks_like_feed?(a["href"])
          self.add_feed(a["href"], url, @base_uri)
        end
      end

      # Added support for feeds like http://tabtimes.com/tbfeed/mashable/full.xml
      if url.match(/.xml$/) and doc.root and doc.root["xml:base"] and doc.root["xml:base"].strip == url.strip
        self.add_feed(url, nil)
      end
    end
  rescue Timeout::Error => err
    Feedbag.logger.error "Timeout error occurred with `#{url}: #{err}'"
  rescue OpenURI::HTTPError => the_error
    Feedbag.logger.error "Error occurred with `#{url}': #{the_error}"
  rescue SocketError => err
    Feedbag.logger.error "Socket error occurred with: `#{url}': #{err}"
  rescue => ex
    Feedbag.logger.error "#{ex.class} error occurred with: `#{url}': #{ex.message}"
  ensure
    return @feeds
  end

end

#looks_like_feed?(url) ⇒ Boolean

Returns:

  • (Boolean)


199
200
201
202
203
204
205
# File 'lib/feedbag.rb', line 199

def looks_like_feed?(url)
  if url =~ /(\.(rdf|xml|rss)(\?([\w'\-%]?(=[\w'\-%.]*)?(&|#|\+|\;)?)+)?(:[\w'\-%]+)?$|feed=(rss|atom)|(atom|feed)\/?$)/i
    true
  else
    false
  end
end