5 # :title: rbot HTTP provider
7 # Author:: Tom Gilbert <tom@linuxbrit.co.uk>
8 # Author:: Giuseppe "Oblomov" Bilotta <giuseppe.bilotta@gmail.com>
9 # Author:: Dmitry "jsn" Kim <dmitry point kim at gmail point com>
18 error "Couldn't load 'net/https': #{e}"
19 error "Secured HTTP connections will fail"
20 # give a nicer error than "undefined method `use_ssl='"
21 ::Net::HTTP.class_eval <<-EOC
22 define_method :use_ssl= do |val|
23 # does anybody really set it to false?
25 raise _("I can't do secure HTTP, sorry (%{msg})") % {
32 # To handle Gzipped pages
38 attr_accessor :no_cache
39 unless method_defined? :raw_body
43 def body_charset(str=self.raw_body)
44 ctype = self['content-type'] || 'text/html'
45 return nil unless ctype =~ /^text/i || ctype =~ /x(ht)?ml/i
47 charsets = ['ISO-8859-1'] # should be in config
49 if ctype.match(/charset=["']?([^\s"']+)["']?/i)
51 debug "charset #{charsets.last} added from header"
54 # str might be invalid utf-8 that will crash on the pattern match:
55 str.encode!('UTF-8', 'UTF-8', :invalid => :replace)
57 when /<\?xml\s[^>]*encoding=['"]([^\s"'>]+)["'][^>]*\?>/i
59 debug "xml charset #{charsets.last} added from xml pi"
60 when /<(meta\s[^>]*http-equiv=["']?Content-Type["']?[^>]*)>/i
62 if meta =~ /charset=['"]?([^\s'";]+)['"]?/
64 debug "html charset #{charsets.last} added from meta"
71 charsets = self.body_charset(str) or return str
73 charsets.reverse_each do |charset|
75 debug "try decoding using #{charset}"
76 str.force_encoding(charset)
77 tmp = str.encode('UTF-16le', :invalid => :replace, :replace => '').encode('UTF-8')
83 error 'failed to use encoding'
91 def decompress_body(str)
92 method = self['content-encoding']
96 when /gzip/ # Matches gzip, x-gzip, and the non-rfc-compliant gzip;q=\d sent by some servers
97 debug "gunzipping body"
99 return Zlib::GzipReader.new(StringIO.new(str)).read
100 rescue Zlib::Error => e
101 # If we can't unpack the whole stream (e.g. because we're doing a
103 debug "full gunzipping failed (#{e}), trying to recover as much as possible"
105 ret.force_encoding(Encoding::ASCII_8BIT)
107 Zlib::GzipReader.new(StringIO.new(str)).each_byte { |byte|
115 debug "inflating body"
116 # From http://www.koders.com/ruby/fid927B4382397E5115AC0ABE21181AB5C1CBDD5C17.aspx?s=thread:
117 # -MAX_WBITS stops zlib from looking for a zlib header
118 inflater = Zlib::Inflate.new(-Zlib::MAX_WBITS)
120 return inflater.inflate(str)
121 rescue Zlib::Error => e
124 # debug "full inflation failed (#{e}), trying to recover as much as possible"
126 when /^(?:iso-8859-\d+|windows-\d+|utf-8|utf8)$/i
127 # B0rked servers (Freshmeat being one of them) sometimes return the charset
128 # in the content-encoding; in this case we assume that the document has
129 # a standard content-encoding
130 old_hsh = self.to_hash
131 self['content-type']= self['content-type']+"; charset="+method.downcase
132 warning "Charset vs content-encoding confusion, trying to recover: from\n#{old_hsh.pretty_inspect}to\n#{self.to_hash.pretty_inspect}"
136 raise "Unhandled content encoding #{method}"
141 return self.body_to_utf(self.decompress_body(self.raw_body))
144 # Read chunks from the body until we have at least _size_ bytes, yielding
145 # the partial text at each chunk. Return the partial body.
146 def partial_body(size=0, &block)
151 debug "using body() as partial"
153 yield self.body_to_utf(self.decompress_body(partial)) if block_given?
155 debug "disabling cache"
157 self.read_body { |chunk|
159 yield self.body_to_utf(self.decompress_body(partial)) if block_given?
160 break if size and size > 0 and partial.length >= size
164 return self.body_to_utf(self.decompress_body(partial))
169 Net::HTTP.version_1_2
174 # class for making http requests easier (mainly for plugins to use)
175 # this class can check the bot proxy configuration to determine if a proxy
176 # needs to be used, which includes support for per-url proxy configuration.
178 Bot::Config.register Bot::Config::IntegerValue.new('http.read_timeout',
179 :default => 10, :desc => "Default read timeout for HTTP connections")
180 Bot::Config.register Bot::Config::IntegerValue.new('http.open_timeout',
181 :default => 20, :desc => "Default open timeout for HTTP connections")
182 Bot::Config.register Bot::Config::BooleanValue.new('http.use_proxy',
183 :default => false, :desc => "should a proxy be used for HTTP requests?")
184 Bot::Config.register Bot::Config::StringValue.new('http.proxy_uri', :default => false,
185 :desc => "Proxy server to use for HTTP requests (URI, e.g http://proxy.host:port)")
186 Bot::Config.register Bot::Config::StringValue.new('http.proxy_user',
188 :desc => "User for authenticating with the http proxy (if required)")
189 Bot::Config.register Bot::Config::StringValue.new('http.proxy_pass',
191 :desc => "Password for authenticating with the http proxy (if required)")
192 Bot::Config.register Bot::Config::ArrayValue.new('http.proxy_include',
194 :desc => "List of regexps to check against a URI's hostname/ip to see if we should use the proxy to access this URI. All URIs are proxied by default if the proxy is set, so this is only required to re-include URIs that might have been excluded by the exclude list. e.g. exclude /.*\.foo\.com/, include bar\.foo\.com")
195 Bot::Config.register Bot::Config::ArrayValue.new('http.proxy_exclude',
197 :desc => "List of regexps to check against a URI's hostname/ip to see if we should use avoid the proxy to access this URI and access it directly")
198 Bot::Config.register Bot::Config::IntegerValue.new('http.max_redir',
200 :desc => "Maximum number of redirections to be used when getting a document")
201 Bot::Config.register Bot::Config::IntegerValue.new('http.expire_time',
203 :desc => "After how many minutes since last use a cached document is considered to be expired")
204 Bot::Config.register Bot::Config::IntegerValue.new('http.max_cache_time',
206 :desc => "After how many minutes since first use a cached document is considered to be expired")
207 Bot::Config.register Bot::Config::BooleanValue.new('http.no_expire_cache',
209 :desc => "Set this to true if you want the bot to never expire the cached pages")
210 Bot::Config.register Bot::Config::IntegerValue.new('http.info_bytes',
212 :desc => "How many bytes to download from a web page to find some information. Set to 0 to let the bot download the whole page.")
215 attr_accessor :response, :last_used, :first_used, :count, :expires, :date
217 def self.maybe_new(resp)
218 debug "maybe new #{resp}"
219 return nil if resp.no_cache
220 return nil unless Net::HTTPOK === resp ||
221 Net::HTTPMovedPermanently === resp ||
222 Net::HTTPFound === resp ||
223 Net::HTTPPartialContent === resp
225 cc = resp['cache-control']
226 return nil if cc && (cc =~ /no-cache/i)
230 date = Time.httpdate(d)
233 return nil if resp['expires'] && (Time.httpdate(resp['expires']) < date)
235 debug "creating cache obj"
242 @first_used = now if @count == 0
248 debug "checking expired?"
249 if cc = self.response['cache-control'] && cc =~ /must-revalidate/
252 return self.expires < Time.now
255 def setup_headers(hdr)
256 hdr['if-modified-since'] = self.date.rfc2822
258 debug "ims == #{hdr['if-modified-since']}"
260 if etag = self.response['etag']
261 hdr['if-none-match'] = etag
262 debug "etag: #{etag}"
266 def revalidate(resp = self.response)
269 self.date = resp.key?('date') ? Time.httpdate(resp['date']) : Time.now
271 cc = resp['cache-control']
272 if cc && (cc =~ /max-age=(\d+)/)
273 self.expires = self.date + $1.to_i
274 elsif resp.key?('expires')
275 self.expires = Time.httpdate(resp['expires'])
276 elsif lm = resp['last-modified']
277 delta = self.date - Time.httpdate(lm)
278 delta = 10 if delta <= 0
280 self.expires = self.date + delta
282 self.expires = self.date + 300
284 # self.expires = Time.now + 10 # DEBUG
285 debug "expires on #{self.expires}"
295 self.response.raw_body
296 rescue Exception => e
303 # Create the HttpUtil instance, associating it with Bot _bot_
309 'Accept-Charset' => 'utf-8;q=1.0, *;q=0.8',
310 'Accept-Encoding' => 'gzip;q=1, deflate;q=1, identity;q=0.8, *;q=0.2',
312 "rbot http util #{$version} (#{Irc::Bot::SOURCE_URL})"
314 debug "starting http cache cleanup timer"
315 @timer = @bot.timer.add(300) {
316 self.remove_stale_cache unless @bot.config['http.no_expire_cache']
320 # Clean up on HttpUtil unloading, by stopping the cache cleanup timer.
322 debug 'stopping http cache cleanup timer'
323 @bot.timer.remove(@timer)
326 # This method checks if a proxy is required to access _uri_, by looking at
327 # the values of config values +http.proxy_include+ and +http.proxy_exclude+.
329 # Each of these config values, if set, should be a Regexp the server name and
330 # IP address should be checked against.
332 def proxy_required(uri)
334 if @bot.config["http.proxy_exclude"].empty? && @bot.config["http.proxy_include"].empty?
340 list.concat Resolv.getaddresses(uri.host)
341 rescue StandardError => err
342 warning "couldn't resolve host uri.host"
345 unless @bot.config["http.proxy_exclude"].empty?
346 re = @bot.config["http.proxy_exclude"].collect{|r| Regexp.new(r)}
356 unless @bot.config["http.proxy_include"].empty?
357 re = @bot.config["http.proxy_include"].collect{|r| Regexp.new(r)}
367 debug "using proxy for uri #{uri}?: #{use_proxy}"
371 # _uri_:: URI to create a proxy for
373 # Return a net/http Proxy object, configured for proxying based on the
374 # bot's proxy configuration. See proxy_required for more details on this.
376 def get_proxy(uri, options = {})
378 :read_timeout => @bot.config["http.read_timeout"],
379 :open_timeout => @bot.config["http.open_timeout"]
388 if @bot.config["http.use_proxy"]
389 if (ENV['http_proxy'])
390 proxy = URI.parse ENV['http_proxy'] rescue nil
392 if (@bot.config["http.proxy_uri"])
393 proxy = URI.parse @bot.config["http.proxy_uri"] rescue nil
396 debug "proxy is set to #{proxy.host} port #{proxy.port}"
397 if proxy_required(uri)
398 proxy_host = proxy.host
399 proxy_port = proxy.port
400 proxy_user = @bot.config["http.proxy_user"]
401 proxy_pass = @bot.config["http.proxy_pass"]
406 h = Net::HTTP.new(uri.host, uri.port, proxy_host, proxy_port, proxy_user, proxy_pass)
407 h.use_ssl = true if uri.scheme == "https"
409 h.read_timeout = opts[:read_timeout]
410 h.open_timeout = opts[:open_timeout]
414 # Internal method used to hanlde response _resp_ received when making a
415 # request for URI _uri_.
417 # It follows redirects, optionally yielding them if option :yield is :all.
419 # Also yields and returns the final _resp_.
421 def handle_response(uri, resp, opts, &block) # :yields: resp
422 if Net::HTTPRedirection === resp && opts[:max_redir] >= 0
423 if resp.key?('location')
424 raise 'Too many redirections' if opts[:max_redir] <= 0
425 yield resp if opts[:yield] == :all && block_given?
426 # some servers actually provide unescaped location, e.g.
427 # http://ulysses.soup.io/post/60734021/Image%20curve%20ball
428 # rediects to something like
429 # http://ulysses.soup.io/post/60734021/Image curve ball?sessid=8457b2a3752085cca3fb1d79b9965446
430 # causing the URI parser to (obviously) complain. We cannot just
431 # escape blindly, as this would make a mess of already-escaped
432 # locations, so we only do it if the URI.parse fails
433 loc = resp['location']
435 debug "redirect location: #{loc.inspect}"
437 new_loc = URI.join(uri.to_s, loc) rescue URI.parse(loc)
442 loc = URI.escape(loc)
444 debug "escaped redirect location: #{loc.inspect}"
449 new_opts[:max_redir] -= 1
450 case opts[:method].to_s.downcase.intern
451 when :post, :"net::http::post"
452 new_opts[:method] = :get
454 if resp['set-cookie']
455 debug "set cookie request for #{resp['set-cookie']}"
456 cookie, cookie_flags = (resp['set-cookie']+'; ').split('; ', 2)
458 cookie_flags.scan(/(\S+)=(\S+);/) { |key, val|
459 if key.intern == :domain
464 debug "cookie domain #{domain} / #{new_loc.host}"
465 if new_loc.host.rindex(domain) == new_loc.host.length - domain.length
466 debug "setting cookie"
467 new_opts[:headers] ||= Hash.new
468 new_opts[:headers]['Cookie'] = cookie
470 debug "cookie is for another domain, ignoring"
473 debug "following the redirect to #{new_loc}"
474 return get_response(new_loc, new_opts, &block)
476 warning ":| redirect w/o location?"
481 alias :body :cooked_body
483 unless resp['content-type']
484 debug "No content type, guessing"
485 resp['content-type'] =
486 case resp['x-rbot-location']
492 'application/xml+xhtml'
493 when /.(gif|png|jpe?g|jp2|tiff?)$/i
494 "image/#{$1.sub(/^jpg$/,'jpeg').sub(/^tif$/,'tiff')}"
496 'application/octetstream'
502 # Net::HTTP wants us to read the whole body here
508 # _uri_:: uri to query (URI object or String)
510 # Generic http transaction method. It will return a Net::HTTPResponse
511 # object or raise an exception
513 # If a block is given, it will yield the response (see :yield option)
515 # Currently supported _options_:
517 # method:: request method [:get (default), :post or :head]
518 # open_timeout:: open timeout for the proxy
519 # read_timeout:: read timeout for the proxy
520 # cache:: should we cache results?
521 # yield:: if :final [default], calls the block for the response object;
522 # if :all, call the block for all intermediate redirects, too
523 # max_redir:: how many redirects to follow before raising the exception
524 # if -1, don't follow redirects, just return them
525 # range:: make a ranged request (usually GET). accepts a string
526 # for HTTP/1.1 "Range:" header (i.e. "bytes=0-1000")
527 # body:: request body (usually for POST requests)
528 # headers:: additional headers to be set for the request. Its value must
529 # be a Hash in the form { 'Header' => 'value' }
531 def get_response(uri_or_s, options = {}, &block) # :yields: resp
532 uri = uri_or_s.kind_of?(URI) ? uri_or_s : URI.parse(uri_or_s.to_s)
533 unless URI::HTTP === uri
535 raise "#{uri.scheme.inspect} URI scheme is not supported"
537 raise "don't know what to do with #{uri.to_s.inspect}"
542 :max_redir => @bot.config['http.max_redir'],
548 req_class = case opts[:method].to_s.downcase.intern
549 when :head, :"net::http::head"
550 opts[:max_redir] = -1
552 when :get, :"net::http::get"
554 when :post, :"net::http::post"
556 opts[:body] or raise 'post request w/o a body?'
557 warning "refusing to cache POST request" if options[:cache]
560 warning "unsupported method #{opts[:method]}, doing GET"
564 if req_class != Net::HTTP::Get && opts[:range]
565 warning "can't request ranges for #{req_class}"
569 cache_key = "#{opts[:range]}|#{req_class}|#{uri.to_s}"
571 if req_class != Net::HTTP::Get && req_class != Net::HTTP::Head
573 warning "can't cache #{req_class.inspect} requests, working w/o cache"
578 debug "get_response(#{uri}, #{opts.inspect})"
580 cached = @cache[cache_key]
582 if opts[:cache] && cached
587 return handle_response(uri, cached.response, opts, &block)
591 headers = @headers.dup.merge(opts[:headers] || {})
592 headers['Range'] = opts[:range] if opts[:range]
593 headers['Authorization'] = opts[:auth_head] if opts[:auth_head]
595 if opts[:cache] && cached && (req_class == Net::HTTP::Get)
596 cached.setup_headers headers
599 req = req_class.new(uri.request_uri, headers)
600 if uri.user && uri.password
601 req.basic_auth(uri.user, uri.password)
602 opts[:auth_head] = req['Authorization']
604 req.body = opts[:body] if req_class == Net::HTTP::Post
605 debug "prepared request: #{req.to_hash.inspect}"
608 get_proxy(uri, opts).start do |http|
609 http.request(req) do |resp|
610 resp['x-rbot-location'] = uri.to_s
611 if Net::HTTPNotModified === resp
614 cached.revalidate(resp)
615 rescue Exception => e
618 debug "reusing cached"
619 resp = cached.response
620 elsif Net::HTTPServerError === resp || Net::HTTPClientError === resp
621 debug "http error, deleting cached obj" if cached
622 @cache.delete(cache_key)
626 return handle_response(uri, resp, opts, &block)
628 if cached = CachedObject.maybe_new(resp) rescue nil
629 debug "storing to cache"
630 @cache[cache_key] = cached
635 rescue Exception => e
641 # _uri_:: uri to query (URI object or String)
643 # Simple GET request, returns (if possible) response body following redirs
644 # and caching if requested, yielding the actual response(s) to the optional
645 # block. See get_response for details on the supported _options_
647 def get(uri, options = {}, &block) # :yields: resp
649 resp = get_response(uri, options, &block)
650 raise "http error: #{resp}" unless Net::HTTPOK === resp ||
651 Net::HTTPPartialContent === resp
653 rescue Exception => e
659 # _uri_:: uri to query (URI object or String)
661 # Simple HEAD request, returns (if possible) response head following redirs
662 # and caching if requested, yielding the actual response(s) to the optional
663 # block. See get_response for details on the supported _options_
665 def head(uri, options = {}, &block) # :yields: resp
666 opts = {:method => :head}.merge(options)
668 resp = get_response(uri, opts, &block)
669 # raise "http error #{resp}" if Net::HTTPClientError === resp ||
670 # Net::HTTPServerError == resp
672 rescue Exception => e
678 # _uri_:: uri to query (URI object or String)
679 # _data_:: body of the POST
681 # Simple POST request, returns (if possible) response following redirs and
682 # caching if requested, yielding the response(s) to the optional block. See
683 # get_response for details on the supported _options_
685 def post(uri, data, options = {}, &block) # :yields: resp
686 opts = {:method => :post, :body => data, :cache => false}.merge(options)
688 resp = get_response(uri, opts, &block)
689 raise 'http error' unless Net::HTTPOK === resp or Net::HTTPCreated === resp
691 rescue Exception => e
697 # _uri_:: uri to query (URI object or String)
698 # _nbytes_:: number of bytes to get
700 # Partial GET request, returns (if possible) the first _nbytes_ bytes of the
701 # response body, following redirs and caching if requested, yielding the
702 # actual response(s) to the optional block. See get_response for details on
703 # the supported _options_
705 def get_partial(uri, nbytes = @bot.config['http.info_bytes'], options = {}, &block) # :yields: resp
706 opts = {:range => "bytes=0-#{nbytes}"}.merge(options)
707 return get(uri, opts, &block)
710 def remove_stale_cache
711 debug "Removing stale cache"
713 max_last = @bot.config['http.expire_time'] * 60
714 max_first = @bot.config['http.max_cache_time'] * 60
715 debug "#{@cache.size} pages before"
717 @cache.reject! { |k, val|
718 (now - val.last_used > max_last) || (now - val.first_used > max_first)
721 error "Failed to remove stale cache: #{e.pretty_inspect}"
723 debug "#{@cache.size} pages after"
730 class HttpUtilPlugin < CoreBotModule
733 debug 'initializing httputil'
734 @bot.httputil = Irc::Utils::HttpUtil.new(@bot)
738 debug 'shutting down httputil'
739 @bot.httputil.cleanup