Module: Feedbag

Defined in:
lib/feedbag.rb

Defined Under Namespace

Classes: Feed

Class Method Summary collapse

Class Method Details

._is_http_valid(uri, orig_url) ⇒ Object

not used. yet.



218
219
220
221
222
223
224
225
226
227
# File 'lib/feedbag.rb', line 218

def self._is_http_valid(uri, orig_url)
  req = Net::HTTP.get_response(uri)
  orig_uri = URI.parse(orig_url)
  case req
    when Net::HTTPSuccess then
      return true
    else
      return false
  end
end

.add_feed(feed_url, orig_url, base_uri = nil, title = "", description = "") ⇒ Object



193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
# File 'lib/feedbag.rb', line 193

def self.add_feed(feed_url, orig_url, base_uri = nil, title = "", description = "")
  # puts "#{feed_url} - #{orig_url}"
  url = feed_url.sub(/^feed:/, '').strip

  if base_uri
    # url = base_uri + feed_url
    url = URI.parse(base_uri).merge(feed_url).to_s
  end

  begin
    uri = URI.parse(url)
  rescue
    puts "Error with `#{url}'"
    exit 1
  end
  unless uri.absolute?
    orig = URI.parse(orig_url)
    url = orig.merge(url).to_s
  end

  # verify url is really valid
  $feeds.push(Feed.new(url, title, orig_url, description)) unless $feeds.any? { |f| f.url == url }# if self._is_http_valid(URI.parse(url), orig_url)
end

.feed?(url) ⇒ Boolean



46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# File 'lib/feedbag.rb', line 46

def self.feed?(url)
  # use LWR::Simple.normalize some time
  url_uri = URI.parse(url)
  url = "#{url_uri.scheme or 'http'}://#{url_uri.host}#{url_uri.path}"
  url << "?#{url_uri.query}" if url_uri.query
  
  # hack:
  url.sub!(/^feed:\/\//, 'http://')

  res = self.find(url)
  if res.size == 1 and res.first == url
    return true
  else
    return false
  end
end

.find(url, args = {}) ⇒ Object



63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
# File 'lib/feedbag.rb', line 63

def self.find(url, args = {})
  $feeds = []

  url_uri = URI.parse(url)
  url = nil
  if url_uri.scheme.nil?
    url = "http://#{url_uri.to_s}"
  elsif url_uri.scheme == "feed"
    return self.add_feed(url_uri.to_s.sub(/^feed:\/\//, 'http://'), nil)
  else
    url = url_uri.to_s
  end
  #url = "#{url_uri.scheme or 'http'}://#{url_uri.host}#{url_uri.path}"

   #return self.add_feed(url, nil) if looks_like_feed? url

  # check if feed_valid is avail
   begin
    require "feed_validator"
    v = W3C::FeedValidator.new
    v.validate_url(url)
    return self.add_feed(url, nil) if v.valid?
  rescue LoadError
    # scoo
  rescue REXML::ParseException
    # usually indicates timeout
    # TODO: actually find out timeout. use Terminator?
    # $stderr.puts "Feed looked like feed but might not have passed validation or timed out"
   rescue => ex
    $stderr.puts "#{ex.class} error ocurred with: `#{url}': #{ex.message}"
  end

  begin
    Timeout::timeout(20) do
      html = open(url) do |f|
        content_type = f.content_type.downcase
        if content_type == "application/octet-stream" # open failed
          content_type = f.meta["content-type"].gsub(/;.*$/, '')
        end
        if @content_types.include?(content_type)
          return self.add_feed(url, nil)
        end

        if RUBY_VERSION < '1.9'
          ic = Iconv.new('UTF-8//IGNORE', f.charset)
          doc = Hpricot(ic.iconv(f.read))
         else
          doc = Hpricot(f.read)
         end

        if doc.at("base") and doc.at("base")["href"]
          $base_uri = doc.at("base")["href"]
        else
          $base_uri = nil
        end

        title = (doc/:title).first
        title = title.innerHTML if title

        description = (doc/:description).first
        description = description.innerHTML if description

        # first with links
        (doc/"atom:link").each do |l|
          next unless l["rel"]
          if l["type"] and @content_types.include?(l["type"].downcase.strip) and l["rel"].downcase == "self"
            self.add_feed(l["href"], url, $base_uri, title, description || title)
          end
        end

        (doc/"link").each do |l|
          next unless l["rel"]
          if l["type"] and @content_types.include?(l["type"].downcase.strip) and (l["rel"].downcase =~ /alternate/i or l["rel"] == "service.feed")
            self.add_feed(l["href"], url, $base_uri, title, description || title)
          end
        end

        (doc/"a").each do |a|
          next unless a["href"]
         if self.looks_like_feed?(a["href"]) and (a["href"] =~ /\// or a["href"] =~ /#{url_uri.host}/)
           calculated_title = self.title_for_anchor(a, title)
            self.add_feed(a["href"], url, $base_uri, calculated_title, description || calculated_title)
         end
        end

        (doc/"a").each do |a|
         next unless a["href"]
          if self.looks_like_feed?(a["href"])
            calculated_title = self.title_for_anchor(a, title)
           self.add_feed(a["href"], url, $base_uri, calculated_title, description || calculated_title)
          end
        end

        # Added support for feeds like http://tabtimes.com/tbfeed/mashable/full.xml
        if url.match(/.xml$/) and doc.root and doc.root["xml:base"] and doc.root["xml:base"].strip == url.strip
          self.add_feed(url, url, $base_uri, title, description)
        end
      end
    end
  rescue Timeout::Error => err
    $stderr.puts "Timeout error ocurred with `#{url}: #{err}'"
  rescue OpenURI::HTTPError => the_error
    $stderr.puts "Error ocurred with `#{url}': #{the_error}"
  rescue SocketError => err
    $stderr.puts "Socket error ocurred with: `#{url}': #{err}"
  rescue => ex
    $stderr.puts "#{ex.class} error ocurred with: `#{url}': #{ex.message}"
  ensure
    return $feeds
  end
end

.looks_like_feed?(url) ⇒ Boolean



175
176
177
178
179
180
181
182
# File 'lib/feedbag.rb', line 175

def self.looks_like_feed?(url)
  return false unless url[0...4] == 'http'
  if url =~ /((\.|\/)(rdf|xml|rdf|rss)$|feed=(rss|atom)|(atom|feed)\/?$)/i
    true
  else
    false
  end
end

.title_for_anchor(a, title) ⇒ Object



184
185
186
187
188
189
190
191
# File 'lib/feedbag.rb', line 184

def self.title_for_anchor(a, title)
  t = a["title"] || a.innerText || a['alt']
  if(t && t.length > 0)
    t
  else
    title
  end
end