#-- vim:sw=2:et\r
#++\r
#\r
-# RSS feed plugin for RubyBot\r
-# (c) 2004 Stanislav Karchebny <berkus@madfire.net>\r
-# (c) 2005 Ian Monroe <ian@monroe.nu>\r
-# (c) 2005 Mark Kretschmann <markey@web.de>\r
-# (c) 2006 Giuseppe Bilotta <giuseppe.bilotta@gmail.com>\r
+# :title: RSS feed plugin for rbot\r
#\r
-# Licensed under MIT License.\r
+# Author:: Stanislav Karchebny <berkus@madfire.net>\r
+# Author:: Ian Monroe <ian@monroe.nu>\r
+# Author:: Mark Kretschmann <markey@web.de>\r
+# Author:: Giuseppe Bilotta <giuseppe.bilotta@gmail.com>\r
+#\r
+# Copyright:: (C) 2004 Stanislav Karchebny\r
+# Copyright:: (C) 2005 Ian Monroe, Mark Kretschmann\r
+# Copyright:: (C) 2006-2007 Giuseppe Bilotta\r
+#\r
+# License:: MIT license\r
+\r
+require 'rss'\r
+\r
+# Add support for Slashdot namespace in RDF. The code is just an adaptation of\r
+# the DublinCore code.\r
+module ::RSS\r
+\r
+ # Make an 'unique' ID for a given item, based on appropriate bot options\r
+ # Currently only suppored is bot.config['rss.show_updated']: when true, the\r
+ # description is included in the uid hashing, otherwise it's not\r
+ #\r
+ def RSS.item_uid_for_bot(item, opts={})\r
+ options = { :show_updated => true}.merge(opts)\r
+ desc = options[:show_updated] ? item.description : nil\r
+ [item.title, item.link, desc].hash\r
+ end\r
+\r
+ unless defined?(SLASH_PREFIX)\r
+ SLASH_PREFIX = 'slash'\r
+ SLASH_URI = "http://purl.org/rss/1.0/modules/slash/"\r
+\r
+ RDF.install_ns(SLASH_PREFIX, SLASH_URI)\r
+\r
+ module BaseSlashModel\r
+ def append_features(klass)\r
+ super\r
+\r
+ return if klass.instance_of?(Module)\r
+ SlashModel::ELEMENT_NAME_INFOS.each do |name, plural_name|\r
+ plural = plural_name || "#{name}s"\r
+ full_name = "#{SLASH_PREFIX}_#{name}"\r
+ full_plural_name = "#{SLASH_PREFIX}_#{plural}"\r
+ klass_name = "Slash#{Utils.to_class_name(name)}"\r
+ klass.install_must_call_validator(SLASH_PREFIX, SLASH_URI)\r
+ klass.install_have_children_element(name, SLASH_URI, "*",\r
+ full_name, full_plural_name)\r
+ klass.module_eval(<<-EOC, *get_file_and_line_from_caller(0))\r
+ remove_method :#{full_name}\r
+ remove_method :#{full_name}=\r
+ remove_method :set_#{full_name}\r
+\r
+ def #{full_name}\r
+ @#{full_name}.first and @#{full_name}.first.value\r
+ end\r
+\r
+ def #{full_name}=(new_value)\r
+ @#{full_name}[0] = Utils.new_with_value_if_need(#{klass_name}, new_value)\r
+ end\r
+ alias set_#{full_name} #{full_name}=\r
+ EOC\r
+ end\r
+ end\r
+ end\r
+\r
+ module SlashModel\r
+ extend BaseModel\r
+ extend BaseSlashModel\r
+\r
+ TEXT_ELEMENTS = {\r
+ "department" => nil,\r
+ "section" => nil,\r
+ "comments" => nil,\r
+ "hit_parade" => nil\r
+ }\r
+\r
+ ELEMENT_NAME_INFOS = SlashModel::TEXT_ELEMENTS.to_a\r
+\r
+ ELEMENTS = TEXT_ELEMENTS.keys\r
+\r
+ ELEMENTS.each do |name, plural_name|\r
+ module_eval(<<-EOC, *get_file_and_line_from_caller(0))\r
+ class Slash#{Utils.to_class_name(name)} < Element\r
+ include RSS10\r
+\r
+ content_setup\r
+\r
+ class << self\r
+ def required_prefix\r
+ SLASH_PREFIX\r
+ end\r
+\r
+ def required_uri\r
+ SLASH_URI\r
+ end\r
+ end\r
\r
-require 'rss/parser'\r
-require 'rss/1.0'\r
-require 'rss/2.0'\r
-require 'rss/dublincore'\r
-# begin\r
-# require 'rss/dublincore/2.0'\r
-# rescue\r
-# warning "Unable to load RSS libraries, RSS plugin functionality crippled"\r
-# end\r
+ @tag_name = #{name.dump}\r
\r
-class ::String\r
- def shorten(limit)\r
- if self.length > limit\r
- self+". " =~ /^(.{#{limit}}[^.!;?]*[.!;?])/mi\r
- return $1\r
+ alias_method(:value, :content)\r
+ alias_method(:value=, :content=)\r
+\r
+ def initialize(*args)\r
+ if Utils.element_initialize_arguments?(args)\r
+ super\r
+ else\r
+ super()\r
+ self.content = args[0]\r
+ end\r
+ end\r
+\r
+ def full_name\r
+ tag_name_with_prefix(SLASH_PREFIX)\r
+ end\r
+\r
+ def maker_target(target)\r
+ target.new_#{name}\r
+ end\r
+\r
+ def setup_maker_attributes(#{name})\r
+ #{name}.content = content\r
+ end\r
+ end\r
+ EOC\r
+ end\r
end\r
- self\r
- end\r
\r
- def riphtml\r
- self.gsub(/<[^>]+>/, '').gsub(/&/,'&').gsub(/"/,'"').gsub(/</,'<').gsub(/>/,'>').gsub(/&ellip;/,'...').gsub(/'/, "'").gsub("\n",'')\r
- end\r
+ class RDF\r
+ class Item; include SlashModel; end\r
+ end\r
\r
- def mysqlize\r
- self.gsub(/'/, "''")\r
+ SlashModel::ELEMENTS.each do |name|\r
+ class_name = Utils.to_class_name(name)\r
+ BaseListener.install_class_name(SLASH_URI, name, "Slash#{class_name}")\r
+ end\r
+\r
+ SlashModel::ELEMENTS.collect! {|name| "#{SLASH_PREFIX}_#{name}"}\r
end\r
end\r
\r
+\r
class ::RssBlob\r
- attr :url\r
- attr :handle\r
- attr :type\r
+ attr_accessor :url\r
+ attr_accessor :handle\r
+ attr_accessor :type\r
attr :watchers\r
+ attr_accessor :refresh_rate\r
+ attr_accessor :xml\r
+ attr_accessor :title\r
+ attr_accessor :items\r
+ attr_accessor :mutex\r
\r
- def initialize(url,handle=nil,type=nil,watchers=[])\r
+ def initialize(url,handle=nil,type=nil,watchers=[], xml=nil)\r
@url = url\r
if handle\r
@handle = handle\r
@handle = url\r
end\r
@type = type\r
- @watchers = watchers\r
+ @watchers=[]\r
+ @refresh_rate = nil\r
+ @xml = xml\r
+ @title = nil\r
+ @items = nil\r
+ @mutex = Mutex.new\r
+ sanitize_watchers(watchers)\r
+ end\r
+\r
+ def dup\r
+ @mutex.synchronize do\r
+ self.class.new(@url,\r
+ @handle,\r
+ @type ? @type.dup : nil,\r
+ @watchers.dup,\r
+ @xml ? @xml.dup : nil)\r
+ end\r
+ end\r
+\r
+ # Downcase all watchers, possibly turning them into Strings if they weren't\r
+ def sanitize_watchers(list=@watchers)\r
+ ls = list.dup\r
+ @watchers.clear\r
+ ls.each { |w|\r
+ add_watch(w)\r
+ }\r
end\r
\r
def watched?\r
end\r
\r
def watched_by?(who)\r
- # We need to check bot 'who' itself and the String form, because rss\r
- # watches added before the new Irc framework represented watchers as\r
- # Strings whereas they are now Channels.\r
- #\r
- @watchers.include?(who) || @watchers.include?(who.to_s) \r
+ @watchers.include?(who.downcase)\r
end\r
\r
def add_watch(who)\r
if watched_by?(who)\r
return nil\r
end\r
- # TODO FIXME? should we just store watchers as Strings instead?\r
- # This should then be @watchers << who.downcase\r
- @watchers << who\r
+ @mutex.synchronize do\r
+ @watchers << who.downcase\r
+ end\r
return who\r
end\r
\r
def rm_watch(who)\r
- # See comment to watched_by?\r
- #\r
- @watchers.delete(who)\r
- @watchers.delete(who.to_s)\r
+ @mutex.synchronize do\r
+ @watchers.delete(who.downcase)\r
+ end\r
end\r
\r
def to_a\r
- [@handle,@url,@type,@watchers]\r
+ [@handle,@url,@type,@refresh_rate,@watchers]\r
end\r
\r
def to_s(watchers=false)\r
:default => 300, :validate => Proc.new{|v| v > 30},\r
:desc => "How many seconds to sleep before checking RSS feeds again")\r
\r
- @@watchThreads = Hash.new\r
- @@mutex = Mutex.new\r
+ BotConfig.register BotConfigBooleanValue.new('rss.show_updated',\r
+ :default => true,\r
+ :desc => "Whether feed items for which the description was changed should be shown as new")\r
+\r
+ # We used to save the Mutex with the RssBlob, which was idiotic. And\r
+ # since Mutexes dumped in one version might not be resotrable in another,\r
+ # we need a few tricks to be able to restore data from other versions of Ruby\r
+ #\r
+ # When migrating 1.8.6 => 1.8.5, all we need to do is define an empty\r
+ # #marshal_load() method for Mutex. For 1.8.5 => 1.8.6 we need something\r
+ # dirtier, as seen later on in the initialization code.\r
+ unless Mutex.new.respond_to?(:marshal_load)\r
+ class ::Mutex\r
+ def marshal_load(str)\r
+ return\r
+ end\r
+ end\r
+ end\r
+\r
+ attr_reader :feeds\r
\r
def initialize\r
super\r
- kill_threads\r
if @registry.has_key?(:feeds)\r
+ # When migrating from Ruby 1.8.5 to 1.8.6, dumped Mutexes may render the\r
+ # data unrestorable. If this happens, we patch the data, thus allowing\r
+ # the restore to work.\r
+ #\r
+ # This is actually pretty safe for a number of reasons:\r
+ # * the code is only called if standard marshalling fails\r
+ # * the string we look for is quite unlikely to appear randomly\r
+ # * if the string appears somewhere and the patched string isn't recoverable\r
+ # either, we'll get another (unrecoverable) error, which makes the rss\r
+ # plugin unsable, just like it was if no recovery was attempted\r
+ # * if the string appears somewhere and the patched string is recoverable,\r
+ # we may get a b0rked feed, which is eventually overwritten by a clean\r
+ # one, so the worst thing that can happen is that a feed update spams\r
+ # the watchers once\r
+ @registry.recovery = Proc.new { |val|\r
+ patched = val.sub(":\v@mutexo:\nMutex", ":\v@mutexo:\vObject")\r
+ ret = Marshal.restore(patched)\r
+ ret.each_value { |blob|\r
+ blob.mutex = nil\r
+ blob\r
+ }\r
+ }\r
+\r
@feeds = @registry[:feeds]\r
+\r
+ @registry.recovery = nil\r
+\r
@feeds.keys.grep(/[A-Z]/) { |k|\r
@feeds[k.downcase] = @feeds[k]\r
@feeds.delete(k)\r
}\r
+ @feeds.each { |k, f|\r
+ f.mutex = Mutex.new unless f.mutex\r
+ f.sanitize_watchers\r
+ parseRss(f) if f.xml\r
+ }\r
else\r
@feeds = Hash.new\r
end\r
+ @watch = Hash.new\r
rewatch_rss\r
end\r
\r
end\r
\r
def cleanup\r
- kill_threads\r
+ stop_watches\r
end\r
\r
def save\r
- @registry[:feeds] = @feeds\r
+ unparsed = Hash.new()\r
+ @feeds.each { |k, f|\r
+ unparsed[k] = f.dup\r
+ # we don't want to save the mutex\r
+ unparsed[k].mutex = nil\r
+ }\r
+ @registry[:feeds] = unparsed\r
end\r
\r
- def kill_threads\r
- @@mutex.synchronize {\r
- # Abort all running threads.\r
- @@watchThreads.each { |url, thread|\r
- debug "Killing thread for #{url}"\r
- thread.kill\r
- }\r
- @@watchThreads = Hash.new\r
+ def stop_watch(handle)\r
+ if @watch.has_key?(handle)\r
+ begin\r
+ debug "Stopping watch #{handle}"\r
+ @bot.timer.remove(@watch[handle])\r
+ @watch.delete(handle)\r
+ rescue => e\r
+ report_problem("Failed to stop watch for #{handle}", e, nil)\r
+ end\r
+ end\r
+ end\r
+\r
+ def stop_watches\r
+ @watch.each_key { |k|\r
+ stop_watch(k)\r
}\r
end\r
\r
when "list"\r
"rss list [#{Bold}handle#{Bold}] : list all rss feeds (matching #{Bold}handle#{Bold})"\r
when "watched"\r
- "rss watched [#{Bold}handle#{Bold}] : list all watched rss feeds (matching #{Bold}handle#{Bold})"\r
+ "rss watched [#{Bold}handle#{Bold}] [in #{Bold}chan#{Bold}]: list all watched rss feeds (matching #{Bold}handle#{Bold}) (in channel #{Bold}chan#{Bold})"\r
+ when "who", "watches", "who watches"\r
+ "rss who watches [#{Bold}handle#{Bold}]]: list all watchers for rss feeds (matching #{Bold}handle#{Bold})"\r
when "add"\r
"rss add #{Bold}handle#{Bold} #{Bold}url#{Bold} [#{Bold}type#{Bold}] : add a new rss called #{Bold}handle#{Bold} from url #{Bold}url#{Bold} (of type #{Bold}type#{Bold})"\r
+ when "change"\r
+ "rss change #{Bold}what#{Bold} of #{Bold}handle#{Bold} to #{Bold}new#{Bold} : change the #{Underline}handle#{Underline}, #{Underline}url#{Underline}, #{Underline}type#{Underline} or #{Underline}refresh#{Underline} rate of rss called #{Bold}handle#{Bold} to value #{Bold}new#{Bold}"\r
when /^(del(ete)?|rm)$/\r
"rss del(ete)|rm #{Bold}handle#{Bold} : delete rss feed #{Bold}handle#{Bold}"\r
when "replace"\r
when "forcereplace"\r
"rss forcereplace #{Bold}handle#{Bold} #{Bold}url#{Bold} [#{Bold}type#{Bold}] : replace the url of rss called #{Bold}handle#{Bold} with #{Bold}url#{Bold} (of type #{Bold}type#{Bold})"\r
when "watch"\r
- "rss watch #{Bold}handle#{Bold} [#{Bold}url#{Bold} [#{Bold}type#{Bold}]] : watch rss #{Bold}handle#{Bold} for changes; when the other parameters are present, it will be created if it doesn't exist yet"\r
+ "rss watch #{Bold}handle#{Bold} [#{Bold}url#{Bold} [#{Bold}type#{Bold}]] [in #{Bold}chan#{Bold}]: watch rss #{Bold}handle#{Bold} for changes (in channel #{Bold}chan#{Bold}); when the other parameters are present, the feed will be created if it doesn't exist yet"\r
when /(un|rm)watch/\r
- "rss unwatch|rmwatch #{Bold}handle#{Bold} : stop watching rss #{Bold}handle#{Bold} for changes"\r
+ "rss unwatch|rmwatch #{Bold}handle#{Bold} [in #{Bold}chan#{Bold}]: stop watching rss #{Bold}handle#{Bold} (in channel #{Bold}chan#{Bold}) for changes"\r
when "rewatch"\r
"rss rewatch : restart threads that watch for changes in watched rss"\r
else\r
- "manage RSS feeds: rss show|list|watched|add|del(ete)|rm|(force)replace|watch|unwatch|rmwatch|rewatch"\r
+ "manage RSS feeds: rss show|list|watched|add|change|del(ete)|rm|(force)replace|watch|unwatch|rmwatch|rewatch"\r
end\r
end\r
\r
\r
m.reply "lemme fetch it..."\r
title = items = nil\r
- @@mutex.synchronize {\r
- title, items = fetchRss(feed, m)\r
- }\r
- return unless items\r
+ fetched = fetchRss(feed, m, false)\r
+ return unless fetched or feed.xml\r
+ if not fetched and feed.items\r
+ m.reply "using old data"\r
+ else\r
+ parsed = parseRss(feed, m)\r
+ m.reply "using old data" unless parsed\r
+ end\r
+ return unless feed.items\r
+ title = feed.title\r
+ items = feed.items\r
\r
# We sort the feeds in freshness order (newer ones first)\r
items = freshness_sort(items)\r
def list_rss(m, params)\r
wanted = params[:handle]\r
reply = String.new\r
- @@mutex.synchronize {\r
- @feeds.each { |handle, feed|\r
- next if wanted and !handle.match(/#{wanted}/i)\r
- reply << "#{feed.handle}: #{feed.url} (in format: #{feed.type ? feed.type : 'default'})"\r
- (reply << " (watched)") if feed.watched_by?(m.replyto)\r
- reply << "\n"\r
- }\r
+ @feeds.each { |handle, feed|\r
+ next if wanted and !handle.match(/#{wanted}/i)\r
+ reply << "#{feed.handle}: #{feed.url} (in format: #{feed.type ? feed.type : 'default'})"\r
+ (reply << " refreshing every #{Utils.secs_to_string(feed.refresh_rate)}") if feed.refresh_rate\r
+ (reply << " (watched)") if feed.watched_by?(m.replyto)\r
+ reply << "\n"\r
}\r
if reply.empty?\r
reply = "no feeds found"\r
reply << " matching #{wanted}" if wanted\r
end\r
- m.reply reply\r
+ m.reply reply, :max_lines => reply.length\r
end\r
\r
def watched_rss(m, params)\r
wanted = params[:handle]\r
+ chan = params[:chan] || m.replyto\r
reply = String.new\r
- @@mutex.synchronize {\r
- watchlist.each { |handle, feed|\r
- next if wanted and !handle.match(/#{wanted}/i)\r
- next unless feed.watched_by?(m.replyto)\r
- reply << "#{feed.handle}: #{feed.url} (in format: #{feed.type ? feed.type : 'default'})\n"\r
- }\r
+ watchlist.each { |handle, feed|\r
+ next if wanted and !handle.match(/#{wanted}/i)\r
+ next unless feed.watched_by?(chan)\r
+ reply << "#{feed.handle}: #{feed.url} (in format: #{feed.type ? feed.type : 'default'})"\r
+ (reply << " refreshing every #{Utils.secs_to_string(feed.refresh_rate)}") if feed.refresh_rate\r
+ reply << "\n"\r
+ }\r
+ if reply.empty?\r
+ reply = "no watched feeds"\r
+ reply << " matching #{wanted}" if wanted\r
+ end\r
+ m.reply reply\r
+ end\r
+\r
+ def who_watches(m, params)\r
+ wanted = params[:handle]\r
+ reply = String.new\r
+ watchlist.each { |handle, feed|\r
+ next if wanted and !handle.match(/#{wanted}/i)\r
+ reply << "#{feed.handle}: #{feed.url} (in format: #{feed.type ? feed.type : 'default'})"\r
+ (reply << " refreshing every #{Utils.secs_to_string(feed.refresh_rate)}") if feed.refresh_rate\r
+ reply << ": watched by #{feed.watchers.join(', ')}"\r
+ reply << "\n"\r
}\r
if reply.empty?\r
reply = "no watched feeds"\r
m.reply "You must specify both a handle and an url to add an RSS feed"\r
return\r
end\r
- @@mutex.synchronize {\r
- @feeds[handle.downcase] = RssBlob.new(url,handle,type)\r
- }\r
+ @feeds[handle.downcase] = RssBlob.new(url,handle,type)\r
reply = "Added RSS #{url} named #{handle}"\r
if type\r
reply << " (format: #{type})"\r
return handle\r
end\r
\r
+ def change_rss(m, params)\r
+ handle = params[:handle].downcase\r
+ feed = @feeds.fetch(handle, nil)\r
+ unless feed\r
+ m.reply "No such feed with handle #{handle}"\r
+ return\r
+ end\r
+ case params[:what].intern\r
+ when :handle\r
+ new = params[:new].downcase\r
+ if @feeds.key?(new) and @feeds[new]\r
+ m.reply "There already is a feed with handle #{new}"\r
+ return\r
+ else\r
+ feed.mutex.synchronize do\r
+ @feeds[new] = feed\r
+ @feeds.delete(handle)\r
+ feed.handle = new\r
+ end\r
+ handle = new\r
+ end\r
+ when :url\r
+ new = params[:new]\r
+ feed.mutex.synchronize do\r
+ feed.url = new\r
+ end\r
+ when :format, :type\r
+ new = params[:new]\r
+ new = nil if new == 'default'\r
+ feed.mutex.synchronize do\r
+ feed.type = new\r
+ end\r
+ when :refresh\r
+ new = params[:new].to_i\r
+ new = nil if new == 0\r
+ feed.mutex.synchronize do\r
+ feed.refresh_rate = new\r
+ end\r
+ else\r
+ m.reply "Don't know how to change #{params[:what]} for feeds"\r
+ return\r
+ end\r
+ m.reply "Feed changed:"\r
+ list_rss(m, {:handle => handle})\r
+ end\r
+\r
def del_rss(m, params, pass=false)\r
feed = unwatch_rss(m, params, true)\r
if feed.watched?\r
m.reply "someone else is watching #{feed.handle}, I won't remove it from my list"\r
return\r
end\r
- @@mutex.synchronize {\r
- @feeds.delete(feed.handle.downcase)\r
- }\r
+ @feeds.delete(feed.handle.downcase)\r
m.okay unless pass\r
return\r
end\r
\r
def watch_rss(m, params)\r
handle = params[:handle]\r
+ chan = params[:chan] || m.replyto\r
url = params[:url]\r
type = params[:type]\r
if url\r
add_rss(m, params)\r
end\r
- feed = nil\r
- @@mutex.synchronize {\r
- feed = @feeds.fetch(handle.downcase, nil)\r
- }\r
+ feed = @feeds.fetch(handle.downcase, nil)\r
if feed\r
- @@mutex.synchronize {\r
- if feed.add_watch(m.replyto)\r
- watchRss(feed, m)\r
- m.okay\r
- else\r
- m.reply "Already watching #{feed.handle}"\r
- end\r
- }\r
+ if feed.add_watch(chan)\r
+ watchRss(feed, m)\r
+ m.okay\r
+ else\r
+ m.reply "Already watching #{feed.handle} in #{chan}"\r
+ end\r
else\r
m.reply "Couldn't watch feed #{handle} (no such feed found)"\r
end\r
\r
def unwatch_rss(m, params, pass=false)\r
handle = params[:handle].downcase\r
+ chan = params[:chan] || m.replyto\r
unless @feeds.has_key?(handle)\r
m.reply("dunno that feed")\r
return\r
end\r
feed = @feeds[handle]\r
- if feed.rm_watch(m.replyto)\r
- m.reply "#{m.replyto} has been removed from the watchlist for #{feed.handle}"\r
+ if feed.rm_watch(chan)\r
+ m.reply "#{chan} has been removed from the watchlist for #{feed.handle}"\r
else\r
- m.reply("#{m.replyto} wasn't watching #{feed.handle}") unless pass\r
+ m.reply("#{chan} wasn't watching #{feed.handle}") unless pass\r
end\r
if !feed.watched?\r
- @@mutex.synchronize {\r
- if @@watchThreads[handle].kind_of? Thread\r
- @@watchThreads[handle].kill\r
- debug "rmwatch: Killed thread for #{handle}"\r
- @@watchThreads.delete(handle)\r
- end\r
- }\r
+ stop_watch(handle)\r
end\r
return feed\r
end\r
\r
def rewatch_rss(m=nil, params=nil)\r
- kill_threads\r
+ stop_watches\r
\r
# Read watches from list.\r
watchlist.each{ |handle, feed|\r
\r
private\r
def watchRss(feed, m=nil)\r
- if @@watchThreads.has_key?(feed.handle)\r
+ if @watch.has_key?(feed.handle)\r
report_problem("watcher thread for #{feed.handle} is already running", nil, m)\r
return\r
end\r
- @@watchThreads[feed.handle] = Thread.new do\r
+ status = Hash.new\r
+ status[:failures] = 0\r
+ status[:first_run] = true\r
+ @watch[feed.handle] = @bot.timer.add(0, status) {\r
debug "watcher for #{feed} started"\r
- oldItems = []\r
- firstRun = true\r
- failures = 0\r
- loop do\r
- begin\r
- debug "fetching #{feed}"\r
- title = newItems = nil\r
- @@mutex.synchronize {\r
- title, newItems = fetchRss(feed)\r
- }\r
- unless newItems\r
- debug "no items in feed #{feed}"\r
- failures +=1\r
+ failures = status[:failures]\r
+ first_run = status.delete(:first_run)\r
+ begin\r
+ debug "fetching #{feed}"\r
+ oldxml = feed.xml ? feed.xml.dup : nil\r
+ unless fetchRss(feed)\r
+ failures += 1\r
+ else\r
+ if first_run\r
+ debug "first run for #{feed}, getting items"\r
+ parseRss(feed)\r
+ elsif oldxml and oldxml == feed.xml\r
+ debug "xml for #{feed} didn't change"\r
+ failures -= 1 if failures > 0\r
else\r
- debug "Checking if new items are available for #{feed}"\r
- if firstRun\r
- debug "First run, we'll see next time"\r
- firstRun = false\r
+ if not feed.items\r
+ debug "no previous items in feed #{feed}"\r
+ parseRss(feed)\r
+ failures -= 1 if failures > 0\r
else\r
- otxt = oldItems.map { |item| item.to_s }\r
- dispItems = newItems.reject { |item|\r
- otxt.include?(item.to_s)\r
+ # This one is used for debugging\r
+ otxt = []\r
+\r
+ # These are used for checking new items vs old ones\r
+ uid_opts = { :show_updated => @bot.config['rss.show_updated'] }\r
+ oids = Set.new feed.items.map { |item|\r
+ uid = RSS.item_uid_for_bot(item, uid_opts)\r
+ otxt << item.to_s\r
+ debug [uid, item].inspect\r
+ debug [uid, otxt.last].inspect\r
+ uid\r
}\r
- if dispItems.length > 0\r
- debug "Found #{dispItems.length} new items in #{feed}"\r
- # When displaying watched feeds, publish them from older to newer\r
- dispItems.reverse.each { |item|\r
- @@mutex.synchronize {\r
+\r
+ unless parseRss(feed)\r
+ debug "no items in feed #{feed}"\r
+ failures += 1\r
+ else\r
+ debug "Checking if new items are available for #{feed}"\r
+ failures -= 1 if failures > 0\r
+ # debug "Old:"\r
+ # debug oldxml\r
+ # debug "New:"\r
+ # debug feed.xml\r
+\r
+ dispItems = feed.items.reject { |item|\r
+ uid = RSS.item_uid_for_bot(item, uid_opts)\r
+ txt = item.to_s\r
+ if oids.include?(uid)\r
+ debug "rejecting old #{uid} #{item.inspect}"\r
+ debug [uid, txt].inspect\r
+ true\r
+ else\r
+ debug "accepting new #{uid} #{item.inspect}"\r
+ debug [uid, txt].inspect\r
+ warn "same text! #{txt}" if otxt.include?(txt)\r
+ false\r
+ end\r
+ }\r
+\r
+ if dispItems.length > 0\r
+ debug "Found #{dispItems.length} new items in #{feed}"\r
+ # When displaying watched feeds, publish them from older to newer\r
+ dispItems.reverse.each { |item|\r
printFormattedRss(feed, item)\r
}\r
- }\r
- else\r
- debug "No new items found in #{feed}"\r
+ else\r
+ debug "No new items found in #{feed}"\r
+ end\r
end\r
end\r
- oldItems = newItems.dup\r
end\r
- rescue Exception => e\r
- error "Error watching #{feed}: #{e.inspect}"\r
- debug e.backtrace.join("\n")\r
- failures += 1\r
end\r
+ rescue Exception => e\r
+ error "Error watching #{feed}: #{e.inspect}"\r
+ debug e.backtrace.join("\n")\r
+ failures += 1\r
+ end\r
+\r
+ status[:failures] = failures\r
\r
- seconds = @bot.config['rss.thread_sleep'] * (failures + 1)\r
+ feed.mutex.synchronize do\r
+ seconds = (feed.refresh_rate || @bot.config['rss.thread_sleep']) * (failures + 1)\r
seconds += seconds * (rand(100)-50)/100\r
debug "watcher for #{feed} going to sleep #{seconds} seconds.."\r
- sleep seconds\r
+ @bot.timer.reschedule(@watch[feed.handle], seconds)\r
end\r
- end\r
+ }\r
+ debug "watcher for #{feed} added"\r
end\r
\r
def printFormattedRss(feed, item, opts=nil)\r
if opts.key?(:date) && opts[:date]\r
if item.respond_to?(:pubDate) \r
if item.pubDate.class <= Time\r
- date = item.pubDate.strftime("%Y/%m/%d %H.%M.%S")\r
+ date = item.pubDate.strftime("%Y/%m/%d %H:%M")\r
else\r
date = item.pubDate.to_s\r
end\r
elsif item.respond_to?(:date)\r
if item.date.class <= Time\r
- date = item.date.strftime("%Y/%m/%d %H.%M.%S")\r
+ date = item.date.strftime("%Y/%m/%d %H:%M")\r
else\r
date = item.date.to_s\r
end\r
date += " :: "\r
end\r
end\r
- title = "#{Bold}#{item.title.chomp.riphtml}#{Bold}" if item.title\r
- desc = item.description.gsub(/\s+/,' ').strip.riphtml.shorten(@bot.config['rss.text_max']) if item.description\r
+\r
+ title = "#{Bold}#{item.title.ircify_html}#{Bold}" if item.title\r
+\r
+ desc = item.description.ircify_html(:a_href => :link_out) if item.description\r
+\r
link = item.link.chomp if item.link\r
- places.each { |loc|\r
- case feed.type\r
- when 'blog'\r
- @bot.say loc, "#{handle}#{date}#{item.category.content} blogged at #{link}"\r
- @bot.say loc, "#{handle}#{title} - #{desc}"\r
- when 'forum'\r
- @bot.say loc, "#{handle}#{date}#{title}#{' @ ' if item.title && item.link}#{link}"\r
- when 'wiki'\r
- @bot.say loc, "#{handle}#{date}#{item.title} has been edited by #{item.dc_creator}. #{desc} #{link}"\r
- when 'gmame'\r
- @bot.say loc, "#{handle}#{date}Message #{title} sent by #{item.dc_creator}. #{desc}"\r
- when 'trac'\r
- @bot.say loc, "#{handle}#{date}#{title} @ #{link}"\r
- unless item.title =~ /^Changeset \[(\d+)\]/\r
- @bot.say loc, "#{handle}#{date}#{desc}"\r
- end\r
- else\r
- @bot.say loc, "#{handle}#{date}#{title}#{' @ ' if item.title && item.link}#{link}"\r
+\r
+ debug item.inspect\r
+ category = item.dc_subject rescue item.category rescue nil\r
+ author = item.dc_creator rescue item.author rescue nil\r
+\r
+ line1 = nil\r
+ line2 = nil\r
+\r
+ at = ((item.title && item.link) ? ' @ ' : '')\r
+ case feed.type\r
+ when 'blog'\r
+ abt = category ? "about #{category} " : ""\r
+ line1 = "#{handle}#{date}#{author} blogged #{abt}at #{link}"\r
+ line2 = "#{handle}#{title} - #{desc}"\r
+ when 'forum'\r
+ line1 = "#{handle}#{date}#{title}#{at}#{link}"\r
+ when 'wiki'\r
+ line1 = "#{handle}#{date}#{title}#{at}#{link} has been edited by #{author}. #{desc}"\r
+ when 'gmane'\r
+ line1 = "#{handle}#{date}Message #{title} sent by #{author}. #{desc}"\r
+ when 'trac'\r
+ line1 = "#{handle}#{date}#{title} @ #{link}"\r
+ unless item.title =~ /^Changeset \[(\d+)\]/\r
+ line2 = "#{handle}#{date}#{desc}"\r
end\r
+ when '/.'\r
+ dept = "(from the #{item.slash_department} dept) " rescue nil\r
+ sec = " in section #{item.slash_section}" rescue nil\r
+\r
+ line1 = "#{handle}#{date}#{dept}#{title}#{at}#{link} (posted by #{author}#{sec})"\r
+ else\r
+ line1 = "#{handle}#{date}#{title}#{at}#{link}"\r
+ end\r
+ places.each { |loc|\r
+ @bot.say loc, line1, :overlong => :truncate\r
+ next unless line2\r
+ @bot.say loc, line2, :overlong => :truncate\r
}\r
end\r
\r
- def fetchRss(feed, m=nil)\r
+ def fetchRss(feed, m=nil, cache=true)\r
begin\r
# Use 60 sec timeout, cause the default is too low\r
- # Do not use get_cached for RSS until we have proper cache handling\r
- # xml = @bot.httputil.get_cached(feed.url,60,60)\r
- xml = @bot.httputil.get_cached(feed.url, 60, 60)\r
+ xml = @bot.httputil.get(feed.url,\r
+ :read_timeout => 60,\r
+ :open_timeout => 60,\r
+ :cache => cache)\r
rescue URI::InvalidURIError, URI::BadURIError => e\r
report_problem("invalid rss feed #{feed.url}", e, m)\r
- return\r
+ return nil\r
rescue => e\r
report_problem("error getting #{feed.url}", e, m)\r
- return\r
+ return nil\r
end\r
debug "fetched #{feed}"\r
unless xml\r
report_problem("reading feed #{feed} failed", nil, m)\r
- return\r
+ return nil\r
end\r
+ # Ok, 0.9 feeds are not supported, maybe because\r
+ # Netscape happily removed the DTD. So what we do is just to\r
+ # reassign the 0.9 RDFs to 1.0, and hope it goes right.\r
+ xml.gsub!("xmlns=\"http://my.netscape.com/rdf/simple/0.9/\"",\r
+ "xmlns=\"http://purl.org/rss/1.0/\"")\r
+ feed.mutex.synchronize do\r
+ feed.xml = xml\r
+ end\r
+ return true\r
+ end\r
\r
- begin\r
- ## do validate parse\r
- rss = RSS::Parser.parse(xml)\r
- debug "parsed #{feed}"\r
- rescue RSS::InvalidRSSError\r
- ## do non validate parse for invalid RSS 1.0\r
+ def parseRss(feed, m=nil)\r
+ return nil unless feed.xml\r
+ feed.mutex.synchronize do\r
+ xml = feed.xml\r
begin\r
- rss = RSS::Parser.parse(xml, false)\r
+ ## do validate parse\r
+ rss = RSS::Parser.parse(xml)\r
+ debug "parsed and validated #{feed}"\r
+ rescue RSS::InvalidRSSError\r
+ ## do non validate parse for invalid RSS 1.0\r
+ begin\r
+ rss = RSS::Parser.parse(xml, false)\r
+ debug "parsed but not validated #{feed}"\r
+ rescue RSS::Error => e\r
+ report_problem("parsing rss stream failed, whoops =(", e, m)\r
+ return nil\r
+ end\r
rescue RSS::Error => e\r
- report_problem("parsing rss stream failed, whoops =(", e, m)\r
- return\r
+ report_problem("parsing rss stream failed, oioi", e, m)\r
+ return nil\r
+ rescue => e\r
+ report_problem("processing error occured, sorry =(", e, m)\r
+ return nil\r
end\r
- rescue RSS::Error => e\r
- report_problem("parsing rss stream failed, oioi", e, m)\r
- return\r
- rescue => e\r
- report_problem("processing error occured, sorry =(", e, m)\r
- return\r
- end\r
- items = []\r
- if rss.nil?\r
- report_problem("#{feed} does not include RSS 1.0 or 0.9x/2.0", nil, m)\r
- else\r
- begin\r
- rss.output_encoding = 'UTF-8'\r
- rescue RSS::UnknownConvertMethod => e\r
- report_problem("bah! something went wrong =(", e, m)\r
- return\r
- end\r
- rss.channel.title ||= "Unknown"\r
- title = rss.channel.title\r
- rss.items.each do |item|\r
- item.title ||= "Unknown"\r
- items << item\r
+ items = []\r
+ if rss.nil?\r
+ report_problem("#{feed} does not include RSS 1.0 or 0.9x/2.0", nil, m)\r
+ else\r
+ begin\r
+ rss.output_encoding = 'UTF-8'\r
+ rescue RSS::UnknownConvertMethod => e\r
+ report_problem("bah! something went wrong =(", e, m)\r
+ return nil\r
+ end\r
+ rss.channel.title ||= "Unknown"\r
+ title = rss.channel.title\r
+ rss.items.each do |item|\r
+ item.title ||= "Unknown"\r
+ items << item\r
+ end\r
end\r
- end\r
\r
- if items.empty?\r
- report_problem("no items found in the feed, maybe try weed?", e, m)\r
- return\r
+ if items.empty?\r
+ report_problem("no items found in the feed, maybe try weed?", e, m)\r
+ return nil\r
+ end\r
+ feed.title = title\r
+ feed.items = items\r
+ return true\r
end\r
- return [title, items]\r
end\r
end\r
\r
:defaults => {:limit => 5}\r
plugin.map 'rss list :handle',\r
:action => 'list_rss',\r
- :defaults => {:handle => nil}\r
-plugin.map 'rss watched :handle',\r
+ :defaults => {:handle => nil}\r
+plugin.map 'rss watched :handle [in :chan]',\r
:action => 'watched_rss',\r
- :defaults => {:handle => nil}\r
+ :defaults => {:handle => nil}\r
+plugin.map 'rss who watches :handle',\r
+ :action => 'who_watches',\r
+ :defaults => {:handle => nil}\r
plugin.map 'rss add :handle :url :type',\r
:action => 'add_rss',\r
:defaults => {:type => nil}\r
+plugin.map 'rss change :what of :handle to :new',\r
+ :action => 'change_rss',\r
+ :requirements => { :what => /handle|url|format|type|refresh/ }\r
+plugin.map 'rss change :what for :handle to :new',\r
+ :action => 'change_rss',\r
+ :requirements => { :what => /handle|url|format|type|refesh/ }\r
plugin.map 'rss del :handle',\r
:action => 'del_rss'\r
plugin.map 'rss delete :handle',\r
plugin.map 'rss forcereplace :handle :url :type',\r
:action => 'forcereplace_rss',\r
:defaults => {:type => nil}\r
-plugin.map 'rss watch :handle :url :type',\r
+plugin.map 'rss watch :handle :url :type [in :chan]',\r
:action => 'watch_rss',\r
:defaults => {:url => nil, :type => nil}\r
-plugin.map 'rss unwatch :handle',\r
+plugin.map 'rss unwatch :handle [in :chan]',\r
:action => 'unwatch_rss'\r
-plugin.map 'rss rmwatch :handle',\r
+plugin.map 'rss rmwatch :handle [in :chan]',\r
:action => 'unwatch_rss'\r
plugin.map 'rss rewatch',\r
:action => 'rewatch_rss'\r