+ # just like the above, but only gets the head
+ def head(uri_or_str, readtimeout=10, opentimeout=5, max_redir=@bot.config["http.max_redir"])
+ if uri_or_str.class <= URI
+ uri = uri_or_str
+ else
+ uri = URI.parse(uri_or_str.to_s)
+ end
+
+ proxy = get_proxy(uri)
+ proxy.open_timeout = opentimeout
+ proxy.read_timeout = readtimeout
+
+ begin
+ proxy.start() {|http|
+ yield uri.request_uri() if block_given?
+ resp = http.head(uri.request_uri(), @headers)
+ case resp
+ when Net::HTTPSuccess
+ return resp
+ when Net::HTTPRedirection
+ debug "Redirecting #{uri} to #{resp['location']}"
+ yield resp['location'] if block_given?
+ if max_redir > 0
+ return head( URI.parse(resp['location']), readtimeout, opentimeout, max_redir-1)
+ else
+ warning "Max redirection reached, not going to #{resp['location']}"
+ end
+ else
+ debug "HttpUtil.head return code #{resp.code}"
+ end
+ return nil
+ }
+ rescue StandardError, Timeout::Error => e
+ error "HttpUtil.head exception: #{e.inspect}, while trying to get #{uri}"
+ debug e.backtrace.join("\n")
+ end
+ return nil
+ end
+
+ # gets a page from the cache if it's still (assumed to be) valid
+ # TODO remove stale cached pages, except when called with noexpire=true
+ def get_cached(uri_or_str, readtimeout=10, opentimeout=5,
+ max_redir=@bot.config['http.max_redir'],
+ noexpire=@bot.config['http.no_expire_cache'])
+ if uri_or_str.class <= URI
+ uri = uri_or_str
+ else
+ uri = URI.parse(uri_or_str.to_s)
+ end
+
+ k = uri.to_s
+ if !@cache.key?(k)
+ remove_stale_cache unless noexpire
+ return get(uri, readtimeout, opentimeout, max_redir, true)
+ end
+ now = Time.new
+ begin
+ # See if the last-modified header can be used
+ # Assumption: the page was not modified if both the header
+ # and the cached copy have the last-modified value, and it's the same time
+ # If only one of the cached copy and the header have the value, or if the
+ # value is different, we assume that the cached copyis invalid and therefore
+ # get a new one.
+ # On our first try, we tested for last-modified in the webpage first,
+ # and then on the local cache. however, this is stupid (in general),
+ # so we only test for the remote page if the local copy had the header
+ # in the first place.
+ if @cache[k].key?(:last_mod)
+ h = head(uri, readtimeout, opentimeout, max_redir)
+ if h.key?('last-modified')
+ if Time.httpdate(h['last-modified']) == @cache[k][:last_mod]
+ if h.key?('date')
+ @cache[k][:last_use] = Time.httpdate(h['date'])
+ else
+ @cache[k][:last_use] = now
+ end
+ @cache[k][:count] += 1
+ return @cache[k][:body]
+ end
+ remove_stale_cache unless noexpire
+ return get(uri, readtimeout, opentimeout, max_redir, true)
+ end
+ remove_stale_cache unless noexpire
+ return get(uri, readtimeout, opentimeout, max_redir, true)
+ end
+ rescue => e
+ warning "Error #{e.inspect} getting the page #{uri}, using cache"
+ debug e.backtrace.join("\n")
+ return @cache[k][:body]
+ end
+ # If we still haven't returned, we are dealing with a non-redirected document
+ # that doesn't have the last-modified attribute
+ debug "Could not use last-modified attribute for URL #{uri}, guessing cache validity"
+ if noexpire or !expired?(@cache[k], now)
+ @cache[k][:count] += 1
+ @cache[k][:last_use] = now
+ debug "Using cache"
+ return @cache[k][:body]
+ end
+ debug "Cache expired, getting anew"
+ @cache.delete(k)
+ remove_stale_cache unless noexpire
+ return get(uri, readtimeout, opentimeout, max_redir, true)
+ end
+
+ def expired?(hash, time)
+ (time - hash[:last_use] > @bot.config['http.expire_time']*60) or
+ (time - hash[:first_use] > @bot.config['http.max_cache_time']*60)
+ end
+
+ def remove_stale_cache
+ now = Time.new
+ @cache.reject! { |k, val|
+ !val.key?(:last_modified) && expired?(val, now)
+ }
+ end
+
+end
+end