diff options
-rw-r--r-- | data/rbot/plugins/url.rb | 6 | ||||
-rw-r--r-- | lib/rbot/core/utils/httputil.rb | 38 |
2 files changed, 23 insertions, 21 deletions
diff --git a/data/rbot/plugins/url.rb b/data/rbot/plugins/url.rb index 0a5ef74e..8a46af59 100644 --- a/data/rbot/plugins/url.rb +++ b/data/rbot/plugins/url.rb @@ -1,8 +1,9 @@ Url = Struct.new("Url", :channel, :nick, :time, :url) -TITLE_RE = /<\s*?title\s*?>(.+?)<\s*?\/title\s*?>/im -LINK_INFO = "[Link Info]" class UrlPlugin < Plugin + TITLE_RE = /<\s*?title\s*?>(.+?)<\s*?\/title\s*?>/im + LINK_INFO = "[Link Info]" + BotConfig.register BotConfigIntegerValue.new('url.max_urls', :default => 100, :validate => Proc.new{|v| v > 0}, :desc => "Maximum number of urls to store. New urls replace oldest ones.") @@ -166,6 +167,7 @@ class UrlPlugin < Plugin end end end + plugin = UrlPlugin.new plugin.map 'urls search :channel :limit :string', :action => 'search', :defaults => {:limit => 4}, diff --git a/lib/rbot/core/utils/httputil.rb b/lib/rbot/core/utils/httputil.rb index e0f93953..b4219f66 100644 --- a/lib/rbot/core/utils/httputil.rb +++ b/lib/rbot/core/utils/httputil.rb @@ -26,9 +26,9 @@ end require 'stringio' require 'zlib' -module ::Net - class HTTPResponse - attr_accessor :no_cache +module ::Net + class HTTPResponse + attr_accessor :no_cache if !instance_methods.include?('raw_body') alias :raw_body :body end @@ -88,22 +88,22 @@ module ::Net return self.body_to_utf(self.decompress_body(self.raw_body)) end - # Read chunks from the body until we have at least _size_ bytes, yielding - # the partial text at each chunk. Return the partial body. - def partial_body(size=0, &block) + # Read chunks from the body until we have at least _size_ bytes, yielding + # the partial text at each chunk. Return the partial body. + def partial_body(size=0, &block) self.no_cache = true - partial = String.new + partial = String.new - self.read_body { |chunk| - partial << chunk - yield self.body_to_utf(partial) if block_given? - break if size and size > 0 and partial.length >= size - } + self.read_body { |chunk| + partial << chunk + yield self.body_to_utf(self.decompress_body(partial)) if block_given? + break if size and size > 0 and partial.length >= size + } - return self.body_to_utf(partial) - end - end + return self.body_to_utf(self.decompress_body(partial)) + end + end end Net::HTTP.version_1_2 @@ -244,12 +244,12 @@ class HttpUtil 'Accept-Encoding' => 'gzip;q=1, identity;q=0.8, *;q=0.2', 'User-Agent' => "rbot http util #{$version} (http://linuxbrit.co.uk/rbot/)" - } + } debug "starting http cache cleanup timer" @timer = @bot.timer.add(300) { self.remove_stale_cache unless @bot.config['http.no_expire_cache'] } - end + end def cleanup debug 'stopping http cache cleanup timer' @@ -305,7 +305,7 @@ class HttpUtil # proxying based on the bot's proxy configuration. # This will include per-url proxy configuration based on the bot config # +http_proxy_include/exclude+ options. - + def get_proxy(uri, options = {}) opts = { :read_timeout => 10, @@ -444,7 +444,7 @@ class HttpUtil return handle_response(uri, cached.response, opts, &block) end end - + headers = @headers.dup.merge(opts[:headers] || {}) headers['Range'] = opts[:range] if opts[:range] |