X-Git-Url: https://git.netwichtig.de/gitweb/?a=blobdiff_plain;f=data%2Frbot%2Fplugins%2Furl.rb;h=269227a3c468e21d643d83e8f66296be5e6128fb;hb=69db4133c5ccfa41c35b43c67fce1d5ff640bfd5;hp=f9e64efbc55e42e85104881459a21b88dd3d0c6b;hpb=b4d55669782d34c59688e7413402ab489bb0791e;p=user%2Fhenk%2Fcode%2Fruby%2Frbot.git diff --git a/data/rbot/plugins/url.rb b/data/rbot/plugins/url.rb index f9e64efb..269227a3 100644 --- a/data/rbot/plugins/url.rb +++ b/data/rbot/plugins/url.rb @@ -1,9 +1,12 @@ -require 'uri' +define_structure :Url, :channel, :nick, :time, :url, :info -Url = Struct.new("Url", :channel, :nick, :time, :url) -TITLE_RE = /<\s*?title\s*?>(.+?)<\s*?\/title\s*?>/im +class ::UrlLinkError < RuntimeError +end class UrlPlugin < Plugin + TITLE_RE = /<\s*?title\s*?>(.+?)<\s*?\/title\s*?>/im + LINK_INFO = "[Link Info]" + BotConfig.register BotConfigIntegerValue.new('url.max_urls', :default => 100, :validate => Proc.new{|v| v > 0}, :desc => "Maximum number of urls to store. New urls replace oldest ones.") @@ -13,6 +16,13 @@ class UrlPlugin < Plugin BotConfig.register BotConfigBooleanValue.new('url.titles_only', :default => false, :desc => "Only show info for links that have tags (in other words, don't display info for jpegs, mpegs, etc.)") + BotConfig.register BotConfigBooleanValue.new('url.first_par', + :default => false, + :desc => "Also try to get the first paragraph of a web page") + BotConfig.register BotConfigBooleanValue.new('url.info_on_list', + :default => false, + :desc => "Show link info when listing/searching for urls") + def initialize super @@ -25,55 +35,92 @@ class UrlPlugin < Plugin def get_title_from_html(pagedata) return unless TITLE_RE.match(pagedata) - title = $1.strip.gsub(/\s*\n+\s*/, " ") - title = Utils.decode_html_entities title - "title: #{title}" + $1.ircify_html end - def get_title_for_url(uri_str) + def get_title_for_url(uri_str, nick = nil, channel = nil) url = uri_str.kind_of?(URI) ? uri_str : URI.parse(uri_str) return if url.scheme !~ /https?/ + logopts = Hash.new + logopts[:nick] = nick if nick + logopts[:channel] = channel if channel + title = nil + extra = String.new begin - @bot.httputil.get_response(url) { |response| - case response + debug "+ getting #{url.request_uri}" + @bot.httputil.get_response(url) { |resp| + case resp when Net::HTTPSuccess - if response['content-type'] =~ /^text\// - # since the content is 'text/*' and is small enough to - # be a webpage, retrieve the title from the page - debug "+ getting #{url.request_uri}" - - # we look for the title in the first 4k bytes - response.partial_body(@bot.config['http.info_bytes']) { |part| - title = get_title_from_html(part) - return title if title - } - # if nothing was found, provide more basic info + + debug resp.to_hash + + if resp['content-type'] =~ /^text\/|(?:x|ht)ml/ + # The page is text or HTML, so we can try finding a title and, if + # requested, the first par. + # + # We act differently depending on whether we want the first par or + # not: in the first case we download the initial part and the parse + # it; in the second case we only download as much as we need to find + # the title + # + if @bot.config['url.first_par'] + partial = resp.partial_body(@bot.config['http.info_bytes']) + logopts[:title] = title = get_title_from_html(partial) + first_par = Utils.ircify_first_html_par(partial, :strip => title) + unless first_par.empty? + logopts[:extra] = first_par + extra << ", #{Bold}text#{Bold}: #{first_par}" + end + call_event(:url_added, url.to_s, logopts) + return "#{Bold}title#{Bold}: #{title}#{extra}" if title + else + resp.partial_body(@bot.config['http.info_bytes']) { |part| + logopts[:title] = title = get_title_from_html(part) + call_event(:url_added, url.to_s, logopts) + return "#{Bold}title#{Bold}: #{title}" if title + } + end + # if nothing was found, provide more basic info, as for non-html pages + else + resp.no_cache = true end - debug response.to_hash.inspect + + enc = resp['content-encoding'] + logopts[:extra] = String.new + logopts[:extra] << "Content Type: #{resp['content-type']}" + if enc + logopts[:extra] << ", encoding: #{enc}" + extra << ", #{Bold}encoding#{Bold}: #{enc}" + end + unless @bot.config['url.titles_only'] # content doesn't have title, just display info. - size = response['content-length'].gsub(/(\d)(?=\d{3}+(?:\.|$))(\d{3}\..*)?/,'\1,\2') rescue nil - size = size ? ", size: #{size} bytes" : "" - return "type: #{response['content-type']}#{size}" + size = resp['content-length'].gsub(/(\d)(?=\d{3}+(?:\.|$))(\d{3}\..*)?/,'\1,\2') rescue nil + if size + logopts[:extra] << ", size: #{size} bytes" + size = ", #{Bold}size#{Bold}: #{size} bytes" + end + call_event(:url_added, url.to_s, logopts) + return "#{Bold}type#{Bold}: #{resp['content-type']}#{size}#{extra}" end - when Net::HTTPResponse - return "Error getting link (#{response.code} - #{response.message})" + call_event(:url_added, url.to_s, logopts) else - raise response + raise UrlLinkError, "getting link (#{resp.code} - #{resp.message})" end } - rescue Object => e - if e.class <= StandardError - error e.inspect - debug e.backtrace.join("\n") + return nil + rescue Exception => e + case e + when UrlLinkError + raise e + else + error e + raise "connecting to site/processing information (#{e.message})" end - - msg = e.respond_to?(:message) ? e.message : e.to_s - return "Error connecting to site (#{e.message})" end end @@ -86,19 +133,20 @@ class UrlPlugin < Plugin urlstr = $1 list = @registry[m.target] + title = nil if @bot.config['url.display_link_info'] Thread.start do debug "Getting title for #{urlstr}..." begin - title = get_title_for_url urlstr + title = get_title_for_url urlstr, m.source.nick, m.channel if title - m.reply "[Link Info] #{title}" + m.reply "#{LINK_INFO} #{title}", :overlong => :truncate debug "Title found!" else debug "Title not found!" end rescue => e - debug "Failed: #{e}" + m.reply "Error #{e.message}" end end end @@ -106,7 +154,7 @@ class UrlPlugin < Plugin # check to see if this url is already listed return if list.find {|u| u.url == urlstr } - url = Url.new(m.target, m.sourcenick, Time.new, urlstr) + url = Url.new(m.target, m.sourcenick, Time.new, urlstr, title) debug "#{list.length} urls so far" if list.length > @bot.config['url.max_urls'] list.pop @@ -119,6 +167,31 @@ class UrlPlugin < Plugin end end + def reply_urls(opts={}) + list = opts[:list] + max = opts[:max] + channel = opts[:channel] + m = opts[:msg] + return unless list and max and m + list[0..(max-1)].each do |url| + disp = "[#{url.time.strftime('%Y/%m/%d %H:%M:%S')}] <#{url.nick}> #{url.url}" + if @bot.config['url.info_on_list'] + title = url.info || get_title_for_url(url.url, url.nick, channel) rescue nil + # If the url info was missing and we now have some, try to upgrade it + if channel and title and not url.info + ll = @registry[channel] + debug ll + if el = ll.find { |u| u.url == url.url } + el.info = title + @registry[channel] = ll + end + end + disp << " --> #{title}" if title + end + m.reply disp, :overlong => :truncate + end + end + def urls(m, params) channel = params[:channel] ? params[:channel] : m.target max = params[:limit].to_i @@ -128,9 +201,7 @@ class UrlPlugin < Plugin if list.empty? m.reply "no urls seen yet for channel #{channel}" else - list[0..(max-1)].each do |url| - m.reply "[#{url.time.strftime('%Y/%m/%d %H:%M:%S')}] <#{url.nick}> #{url.url}" - end + reply_urls :msg => m, :channel => channel, :list => list, :max => max end end @@ -142,17 +213,17 @@ class UrlPlugin < Plugin max = 1 if max < 1 regex = Regexp.new(string, Regexp::IGNORECASE) list = @registry[channel].find_all {|url| - regex.match(url.url) || regex.match(url.nick) + regex.match(url.url) || regex.match(url.nick) || + (@bot.config['url.info_on_list'] && regex.match(url.info)) } if list.empty? m.reply "no matches for channel #{channel}" else - list[0..(max-1)].each do |url| - m.reply "[#{url.time.strftime('%Y/%m/%d %H:%M:%S')}] <#{url.nick}> #{url.url}" - end + reply_urls :msg => m, :channel => channel, :list => list, :max => max end end end + plugin = UrlPlugin.new plugin.map 'urls search :channel :limit :string', :action => 'search', :defaults => {:limit => 4},