4 # :title: rbot HTTP provider
6 # Author:: Tom Gilbert <tom@linuxbrit.co.uk>
7 # Author:: Giuseppe "Oblomov" Bilotta <giuseppe.bilotta@gmail.com>
9 # Copyright:: (C) 2002-2005 Tom Gilbert
10 # Copyright:: (C) 2006 Tom Gilbert, Giuseppe Bilotta
11 # Copyright:: (C) 2006,2007 Giuseppe Bilotta
21 error "Couldn't load 'net/https': #{e.inspect}"
22 error "Secured HTTP connections will fail"
27 # class for making http requests easier (mainly for plugins to use)
28 # this class can check the bot proxy configuration to determine if a proxy
29 # needs to be used, which includes support for per-url proxy configuration.
31 BotConfig.register BotConfigBooleanValue.new('http.use_proxy',
32 :default => false, :desc => "should a proxy be used for HTTP requests?")
33 BotConfig.register BotConfigStringValue.new('http.proxy_uri', :default => false,
34 :desc => "Proxy server to use for HTTP requests (URI, e.g http://proxy.host:port)")
35 BotConfig.register BotConfigStringValue.new('http.proxy_user',
37 :desc => "User for authenticating with the http proxy (if required)")
38 BotConfig.register BotConfigStringValue.new('http.proxy_pass',
40 :desc => "Password for authenticating with the http proxy (if required)")
41 BotConfig.register BotConfigArrayValue.new('http.proxy_include',
43 :desc => "List of regexps to check against a URI's hostname/ip to see if we should use the proxy to access this URI. All URIs are proxied by default if the proxy is set, so this is only required to re-include URIs that might have been excluded by the exclude list. e.g. exclude /.*\.foo\.com/, include bar\.foo\.com")
44 BotConfig.register BotConfigArrayValue.new('http.proxy_exclude',
46 :desc => "List of regexps to check against a URI's hostname/ip to see if we should use avoid the proxy to access this URI and access it directly")
47 BotConfig.register BotConfigIntegerValue.new('http.max_redir',
49 :desc => "Maximum number of redirections to be used when getting a document")
50 BotConfig.register BotConfigIntegerValue.new('http.expire_time',
52 :desc => "After how many minutes since last use a cached document is considered to be expired")
53 BotConfig.register BotConfigIntegerValue.new('http.max_cache_time',
55 :desc => "After how many minutes since first use a cached document is considered to be expired")
56 BotConfig.register BotConfigIntegerValue.new('http.no_expire_cache',
58 :desc => "Set this to true if you want the bot to never expire the cached pages")
64 'User-Agent' => "rbot http util #{$version} (http://linuxbrit.co.uk/rbot/)",
68 attr_reader :last_response
71 # if http_proxy_include or http_proxy_exclude are set, then examine the
72 # uri to see if this is a proxied uri
73 # the in/excludes are a list of regexps, and each regexp is checked against
74 # the server name, and its IP addresses
75 def proxy_required(uri)
77 if @bot.config["http.proxy_exclude"].empty? && @bot.config["http.proxy_include"].empty?
83 list.concat Resolv.getaddresses(uri.host)
84 rescue StandardError => err
85 warning "couldn't resolve host uri.host"
88 unless @bot.config["http.proxy_exclude"].empty?
89 re = @bot.config["http.proxy_exclude"].collect{|r| Regexp.new(r)}
99 unless @bot.config["http.proxy_include"].empty?
100 re = @bot.config["http.proxy_include"].collect{|r| Regexp.new(r)}
110 debug "using proxy for uri #{uri}?: #{use_proxy}"
114 # uri:: Uri to create a proxy for
116 # return a net/http Proxy object, which is configured correctly for
117 # proxying based on the bot's proxy configuration.
118 # This will include per-url proxy configuration based on the bot config
119 # +http_proxy_include/exclude+ options.
127 if @bot.config["http.use_proxy"]
128 if (ENV['http_proxy'])
129 proxy = URI.parse ENV['http_proxy'] rescue nil
131 if (@bot.config["http.proxy_uri"])
132 proxy = URI.parse @bot.config["http.proxy_uri"] rescue nil
135 debug "proxy is set to #{proxy.host} port #{proxy.port}"
136 if proxy_required(uri)
137 proxy_host = proxy.host
138 proxy_port = proxy.port
139 proxy_user = @bot.config["http.proxy_user"]
140 proxy_pass = @bot.config["http.proxy_pass"]
145 h = Net::HTTP.new(uri.host, uri.port, proxy_host, proxy_port, proxy_user, proxy_port)
146 h.use_ssl = true if uri.scheme == "https"
150 # uri:: uri to query (Uri object)
151 # readtimeout:: timeout for reading the response
152 # opentimeout:: timeout for opening the connection
154 # simple get request, returns (if possible) response body following redirs
155 # and caching if requested
156 # if a block is given, it yields the urls it gets redirected to
157 # TODO we really need something to implement proper caching
158 def get(uri_or_str, readtimeout=10, opentimeout=5, max_redir=@bot.config["http.max_redir"], cache=false)
159 if uri_or_str.kind_of?(URI)
162 uri = URI.parse(uri_or_str.to_s)
164 debug "Getting #{uri}"
166 proxy = get_proxy(uri)
167 proxy.open_timeout = opentimeout
168 proxy.read_timeout = readtimeout
171 proxy.start() {|http|
172 yield uri.request_uri() if block_given?
173 req = Net::HTTP::Get.new(uri.request_uri(), @headers)
174 if uri.user and uri.password
175 req.basic_auth(uri.user, uri.password)
177 resp = http.request(req)
179 when Net::HTTPSuccess
181 debug "Caching #{uri.to_s}"
182 cache_response(uri.to_s, resp)
185 when Net::HTTPRedirection
186 if resp.key?('location')
187 new_loc = URI.join(uri, resp['location'])
188 debug "Redirecting #{uri} to #{new_loc}"
189 yield new_loc if block_given?
191 # If cache is an Array, we assume get was called by get_cached
192 # because of a cache miss and that the first value of the Array
193 # was the noexpire value. Since the cache miss might have been
194 # caused by a redirection, we want to try get_cached again
195 # TODO FIXME look at Python's httplib2 for a most likely
196 # better way to handle all this mess
197 if cache.kind_of?(Array)
198 return get_cached(new_loc, readtimeout, opentimeout, max_redir-1, cache[0])
200 return get(new_loc, readtimeout, opentimeout, max_redir-1, cache)
203 warning "Max redirection reached, not going to #{new_loc}"
206 warning "Unknown HTTP redirection #{resp.inspect}"
209 debug "HttpUtil.get return code #{resp.code} #{resp.body}"
211 @last_response = resp
214 rescue StandardError, Timeout::Error => e
215 error "HttpUtil.get exception: #{e.inspect}, while trying to get #{uri}"
216 debug e.backtrace.join("\n")
222 # just like the above, but only gets the head
223 def head(uri_or_str, readtimeout=10, opentimeout=5, max_redir=@bot.config["http.max_redir"])
224 if uri_or_str.kind_of?(URI)
227 uri = URI.parse(uri_or_str.to_s)
230 proxy = get_proxy(uri)
231 proxy.open_timeout = opentimeout
232 proxy.read_timeout = readtimeout
235 proxy.start() {|http|
236 yield uri.request_uri() if block_given?
237 req = Net::HTTP::Head.new(uri.request_uri(), @headers)
238 if uri.user and uri.password
239 req.basic_auth(uri.user, uri.password)
241 resp = http.request(req)
243 when Net::HTTPSuccess
245 when Net::HTTPRedirection
246 debug "Redirecting #{uri} to #{resp['location']}"
247 yield resp['location'] if block_given?
249 return head( URI.parse(resp['location']), readtimeout, opentimeout, max_redir-1)
251 warning "Max redirection reached, not going to #{resp['location']}"
254 debug "HttpUtil.head return code #{resp.code}"
256 @last_response = resp
259 rescue StandardError, Timeout::Error => e
260 error "HttpUtil.head exception: #{e.inspect}, while trying to get #{uri}"
261 debug e.backtrace.join("\n")
267 def cache_response(k, resp)
269 if resp.key?('pragma') and resp['pragma'] == 'no-cache'
270 debug "Not caching #{k}, it has Pragma: no-cache"
273 # TODO should we skip caching if neither last-modified nor etag are present?
278 u[:last_modified] = nil
279 u[:last_modified] = Time.httpdate(resp['date']) if resp.key?('date')
280 u[:last_modified] = Time.httpdate(resp['last-modified']) if resp.key?('last-modified')
282 u[:expires] = Time.httpdate(resp['expires']) if resp.key?('expires')
283 u[:revalidate] = false
284 if resp.key?('cache-control')
286 case resp['cache-control']
287 when /no-cache|must-revalidate/
288 u[:revalidate] = true
292 u[:etag] = resp['etag'] if resp.key?('etag')
297 error "Failed to cache #{k}/#{resp.to_hash.inspect}: #{e.inspect}"
301 debug "Cached #{k}/#{resp.to_hash.inspect}: #{u.inspect_no_body}"
302 debug "#{@cache.size} pages (#{@cache.keys.join(', ')}) cached up to now"
305 # For debugging purposes
314 def expired?(uri, readtimeout, opentimeout)
316 debug "Checking cache validity for #{k}"
318 return true unless @cache.key?(k)
321 # TODO we always revalidate for the time being
323 if u[:etag].empty? and u[:last_modified].nil?
328 proxy = get_proxy(uri)
329 proxy.open_timeout = opentimeout
330 proxy.read_timeout = readtimeout
332 proxy.start() {|http|
333 yield uri.request_uri() if block_given?
334 headers = @headers.dup
335 headers['If-None-Match'] = u[:etag] unless u[:etag].empty?
336 headers['If-Modified-Since'] = u[:last_modified].rfc2822 if u[:last_modified]
337 debug "Cache HEAD request headers: #{headers.inspect}"
338 # FIXME TODO We might want to use a Get here
339 # because if a 200 OK is returned we would get the new body
340 # with one connection less ...
341 req = Net::HTTP::Head.new(uri.request_uri(), headers)
342 if uri.user and uri.password
343 req.basic_auth(uri.user, uri.password)
345 resp = http.request(req)
346 debug "Checking cache validity of #{u.inspect_no_body} against #{resp.inspect}/#{resp.to_hash.inspect}"
348 when Net::HTTPNotModified
355 error "Failed to check cache validity for #{uri}: #{e.inspect}"
360 # gets a page from the cache if it's still (assumed to be) valid
361 # TODO remove stale cached pages, except when called with noexpire=true
362 def get_cached(uri_or_str, readtimeout=10, opentimeout=5,
363 max_redir=@bot.config['http.max_redir'],
364 noexpire=@bot.config['http.no_expire_cache'])
365 if uri_or_str.kind_of?(URI)
368 uri = URI.parse(uri_or_str.to_s)
370 debug "Getting cached #{uri}"
372 if expired?(uri, readtimeout, opentimeout)
373 debug "Cache expired"
374 bod = get(uri, readtimeout, opentimeout, max_redir, [noexpire])
375 bod.instance_variable_set(:@cached,false)
379 @cache[k][:count] += 1
380 @cache[k][:last_use] = Time.now
381 bod = @cache[k][:body]
382 bod.instance_variable_set(:@cached,true)
387 unless bod.respond_to?(:cached?)
395 # We consider a page to be manually expired if it has no
396 # etag and no last-modified and if any of the expiration
397 # conditions are met (expire_time, max_cache_time, Expires)
398 def manually_expired?(hash, time)
399 auto = hash[:etag].empty? and hash[:last_modified].nil?
401 manual = (time - hash[:last_use] > @bot.config['http.expire_time']*60) or
402 (time - hash[:first_use] > @bot.config['http.max_cache_time']*60) or
403 (hash[:expires] < time)
404 return (auto and manual)
407 def remove_stale_cache
408 debug "Removing stale cache"
409 debug "#{@cache.size} pages before"
412 @cache.reject! { |k, val|
413 manually_expired?(val, now)
416 error "Failed to remove stale cache: #{e.inspect}"
418 debug "#{@cache.size} pages after"