Class: Feedbag

Inherits:
Object
  • Object
show all
Defined in:
lib/feedbag.rb

Constant Summary collapse

CONTENT_TYPES =
[
  'application/x.atom+xml',
  'application/atom+xml',
  'application/xml',
  'text/xml',
  'application/rss+xml',
  'application/rdf+xml',
].freeze

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initializeFeedbag

Returns a new instance of Feedbag.



48
49
50
# File 'lib/feedbag.rb', line 48

def initialize
  @feeds = []
end

Class Method Details

.feed?(url) ⇒ Boolean

Returns:

  • (Boolean)


40
41
42
# File 'lib/feedbag.rb', line 40

def self.feed?(url)
  new.feed?(url)
end

.find(url, args = {}) ⇒ Object



44
45
46
# File 'lib/feedbag.rb', line 44

def self.find(url, args = {})
  new.find(url, args = {})
end

Instance Method Details

#_is_http_valid(uri, orig_url) ⇒ Object

not used. yet.



196
197
198
199
200
201
202
203
204
205
# File 'lib/feedbag.rb', line 196

def _is_http_valid(uri, orig_url)
  req = Net::HTTP.get_response(uri)
  orig_uri = URI.parse(orig_url)
  case req
  when Net::HTTPSuccess then
    return true
  else
    return false
  end
end

#add_feed(feed_url, orig_url, base_uri = nil) ⇒ Object



171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
# File 'lib/feedbag.rb', line 171

def add_feed(feed_url, orig_url, base_uri = nil)
  # puts "#{feed_url} - #{orig_url}"
  url = feed_url.sub(/^feed:/, '').strip

  if base_uri
    # url = base_uri + feed_url
    url = URI.parse(base_uri).merge(feed_url).to_s
  end

  begin
    uri = URI.parse(url)
  rescue
    puts "Error with `#{url}'"
    exit 1
  end
  unless uri.absolute?
    orig = URI.parse(orig_url)
    url = orig.merge(url).to_s
  end

  # verify url is really valid
  @feeds.push(url) unless @feeds.include?(url)# if self._is_http_valid(URI.parse(url), orig_url)
end

#feed?(url) ⇒ Boolean

Returns:

  • (Boolean)


52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# File 'lib/feedbag.rb', line 52

def feed?(url)
  # use LWR::Simple.normalize some time
  url_uri = URI.parse(url)
  url = "#{url_uri.scheme or 'http'}://#{url_uri.host}#{url_uri.path}"
  url << "?#{url_uri.query}" if url_uri.query

    # hack:
    url.sub!(/^feed:\/\//, 'http://')

  res = Feedbag.find(url)
  if res.size == 1 and res.first == url
    return true
  else
    return false
  end
end

#find(url, args = {}) ⇒ Object



69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
# File 'lib/feedbag.rb', line 69

def find(url, args = {})
  url_uri = URI.parse(url)
  url = nil
  if url_uri.scheme.nil?
    url = "http://#{url_uri.to_s}"
  elsif url_uri.scheme == "feed"
    return self.add_feed(url_uri.to_s.sub(/^feed:\/\//, 'http://'), nil)
  else
    url = url_uri.to_s
  end
  #url = "#{url_uri.scheme or 'http'}://#{url_uri.host}#{url_uri.path}"

  # check if feed_valid is avail
  begin
    require "feed_validator"
    v = W3C::FeedValidator.new
    v.validate_url(url)
    return self.add_feed(url, nil) if v.valid?
  rescue LoadError
    # scoo
  rescue REXML::ParseException
    # usually indicates timeout
    # TODO: actually find out timeout. use Terminator?
    # $stderr.puts "Feed looked like feed but might not have passed validation or timed out"
  rescue => ex
    $stderr.puts "#{ex.class} error occurred with: `#{url}': #{ex.message}"
  end

  begin
    html = open(url) do |f|
      content_type = f.content_type.downcase
      if content_type == "application/octet-stream" # open failed
        content_type = f.meta["content-type"].gsub(/;.*$/, '')
      end
      if CONTENT_TYPES.include?(content_type)
        return self.add_feed(url, nil)
      end

      doc = Nokogiri::HTML(f.read)

      if doc.at("base") and doc.at("base")["href"]
        @base_uri = doc.at("base")["href"]
      else
        @base_uri = nil
      end

      # first with links
      (doc/"atom:link").each do |l|
        next unless l["rel"]
        if l["type"] and CONTENT_TYPES.include?(l["type"].downcase.strip) and l["rel"].downcase == "self"
          self.add_feed(l["href"], url, @base_uri)
        end
      end

      (doc/"link").each do |l|
        next unless l["rel"]
        if l["type"] and CONTENT_TYPES.include?(l["type"].downcase.strip) and (l["rel"].downcase =~ /alternate/i or l["rel"] == "service.feed")
          self.add_feed(l["href"], url, @base_uri)
        end
      end

      (doc/"a").each do |a|
        next unless a["href"]
        if self.looks_like_feed?(a["href"]) and (a["href"] =~ /\// or a["href"] =~ /#{url_uri.host}/)
          self.add_feed(a["href"], url, @base_uri)
        end
      end

      (doc/"a").each do |a|
        next unless a["href"]
        if self.looks_like_feed?(a["href"])
          self.add_feed(a["href"], url, @base_uri)
        end
      end

      # Added support for feeds like http://tabtimes.com/tbfeed/mashable/full.xml
      if url.match(/.xml$/) and doc.root and doc.root["xml:base"] and doc.root["xml:base"].strip == url.strip
        self.add_feed(url, nil)
      end
    end
  rescue Timeout::Error => err
    $stderr.puts "Timeout error occurred with `#{url}: #{err}'"
  rescue OpenURI::HTTPError => the_error
    $stderr.puts "Error occurred with `#{url}': #{the_error}"
  rescue SocketError => err
    $stderr.puts "Socket error occurred with: `#{url}': #{err}"
  rescue => ex
    $stderr.puts "#{ex.class} error occurred with: `#{url}': #{ex.message}"
  ensure
    return @feeds
  end

end

#looks_like_feed?(url) ⇒ Boolean

Returns:

  • (Boolean)


163
164
165
166
167
168
169
# File 'lib/feedbag.rb', line 163

def looks_like_feed?(url)
  if url =~ /(\.(rdf|xml|rdf|rss)$|feed=(rss|atom)|(atom|feed)\/?$)/i
    true
  else
    false
  end
end