2002-12-16 22:06:36 +03:00
|
|
|
#= open-uri.rb
|
|
|
|
#
|
2005-02-12 08:38:44 +03:00
|
|
|
#open-uri.rb is easy-to-use wrapper for net/http, net/https and net/ftp.
|
2004-04-21 15:11:40 +04:00
|
|
|
#
|
2002-12-16 22:06:36 +03:00
|
|
|
#== Example
|
|
|
|
#
|
2005-02-12 08:38:44 +03:00
|
|
|
#It is possible to open http/https/ftp URL as usual a file:
|
2002-12-16 22:06:36 +03:00
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/") {|f|
|
|
|
|
# f.each_line {|line| p line}
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
#The opened file has several methods for meta information as follows since
|
|
|
|
#it is extended by OpenURI::Meta.
|
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/en") {|f|
|
|
|
|
# f.each_line {|line| p line}
|
|
|
|
# p f.base_uri # <URI::HTTP:0x40e6ef2 URL:http://www.ruby-lang.org/en/>
|
|
|
|
# p f.content_type # "text/html"
|
|
|
|
# p f.charset # "iso-8859-1"
|
|
|
|
# p f.content_encoding # []
|
|
|
|
# p f.last_modified # Thu Dec 05 02:45:02 UTC 2002
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
#Additional header fields can be specified by an optional hash argument.
|
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/en/",
|
|
|
|
# "User-Agent" => "Ruby/#{RUBY_VERSION}",
|
|
|
|
# "From" => "foo@bar.invalid",
|
|
|
|
# "Referer" => "http://www.ruby-lang.org/") {|f|
|
|
|
|
# ...
|
|
|
|
# }
|
|
|
|
#
|
2005-02-12 08:38:44 +03:00
|
|
|
#The environment variables such as http_proxy, https_proxy and ftp_proxy
|
|
|
|
#are in effect by default. :proxy => nil disables proxy.
|
2002-12-16 22:06:36 +03:00
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/en/raa.html",
|
|
|
|
# :proxy => nil) {|f|
|
|
|
|
# ...
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
#URI objects can be opened in similar way.
|
2004-04-21 15:11:40 +04:00
|
|
|
#
|
2002-12-16 22:06:36 +03:00
|
|
|
# uri = URI.parse("http://www.ruby-lang.org/en/")
|
|
|
|
# uri.open {|f|
|
|
|
|
# ...
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
#URI objects can be read directly.
|
|
|
|
#The returned string is also extended by OpenURI::Meta.
|
|
|
|
#
|
|
|
|
# str = uri.read
|
|
|
|
# p str.base_uri
|
|
|
|
#
|
|
|
|
#Author:: Tanaka Akira <akr@m17n.org>
|
|
|
|
|
|
|
|
require 'uri'
|
|
|
|
require 'stringio'
|
|
|
|
require 'time'
|
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
module Kernel
|
|
|
|
private
|
|
|
|
alias open_uri_original_open open # :nodoc:
|
|
|
|
|
2003-11-13 14:39:16 +03:00
|
|
|
# makes possible to open various resources including URIs.
|
|
|
|
# If the first argument respond to `open' method,
|
|
|
|
# the method is called with the rest arguments.
|
|
|
|
#
|
2004-04-21 15:11:40 +04:00
|
|
|
# If the first argument is a string which begins with xxx://,
|
2003-11-13 14:39:16 +03:00
|
|
|
# it is parsed by URI.parse. If the parsed object respond to `open' method,
|
|
|
|
# the method is called with the rest arguments.
|
|
|
|
#
|
|
|
|
# Otherwise original open is called.
|
|
|
|
#
|
2005-02-12 08:38:44 +03:00
|
|
|
# Since open-uri.rb provides URI::HTTP#open, URI::HTTPS#open and
|
|
|
|
# URI::FTP#open,
|
2003-11-13 14:39:16 +03:00
|
|
|
# Kernel[#.]open can accepts such URIs and strings which begins with
|
2005-02-12 08:38:44 +03:00
|
|
|
# http://, https:// and ftp://.
|
|
|
|
# In these case, the opened file object is extended by OpenURI::Meta.
|
2004-04-12 14:17:46 +04:00
|
|
|
def open(name, *rest, &block) # :doc:
|
2003-11-13 14:54:31 +03:00
|
|
|
if name.respond_to?(:open)
|
2003-02-05 13:44:05 +03:00
|
|
|
name.open(*rest, &block)
|
2004-01-07 14:31:21 +03:00
|
|
|
elsif name.respond_to?(:to_str) &&
|
2003-11-13 14:39:16 +03:00
|
|
|
%r{\A[A-Za-z][A-Za-z0-9+\-\.]*://} =~ name &&
|
|
|
|
(uri = URI.parse(name)).respond_to?(:open)
|
|
|
|
uri.open(*rest, &block)
|
2003-02-05 13:44:05 +03:00
|
|
|
else
|
|
|
|
open_uri_original_open(name, *rest, &block)
|
|
|
|
end
|
2002-12-16 22:06:36 +03:00
|
|
|
end
|
2003-02-26 11:39:05 +03:00
|
|
|
module_function :open
|
2003-02-05 13:44:05 +03:00
|
|
|
end
|
2002-12-16 22:06:36 +03:00
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
module OpenURI
|
2003-11-24 11:02:36 +03:00
|
|
|
Options = {
|
|
|
|
:proxy => true,
|
|
|
|
:progress_proc => true,
|
|
|
|
:content_length_proc => true,
|
2005-02-11 05:47:11 +03:00
|
|
|
:http_basic_authentication => true,
|
2003-11-24 11:02:36 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
def OpenURI.check_options(options) # :nodoc:
|
|
|
|
options.each {|k, v|
|
|
|
|
next unless Symbol === k
|
|
|
|
unless Options.include? k
|
|
|
|
raise ArgumentError, "unrecognized option: #{k}"
|
|
|
|
end
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
def OpenURI.scan_open_optional_arguments(*rest) # :nodoc:
|
2002-12-16 22:06:36 +03:00
|
|
|
if !rest.empty? && (String === rest.first || Integer === rest.first)
|
|
|
|
mode = rest.shift
|
|
|
|
if !rest.empty? && Integer === rest.first
|
|
|
|
perm = rest.shift
|
|
|
|
end
|
|
|
|
end
|
2003-02-05 13:44:05 +03:00
|
|
|
return mode, perm, rest
|
|
|
|
end
|
|
|
|
|
|
|
|
def OpenURI.open_uri(name, *rest) # :nodoc:
|
|
|
|
uri = URI::Generic === name ? name : URI.parse(name)
|
|
|
|
mode, perm, rest = OpenURI.scan_open_optional_arguments(*rest)
|
|
|
|
options = rest.shift if !rest.empty? && Hash === rest.first
|
|
|
|
raise ArgumentError.new("extra arguments") if !rest.empty?
|
2003-11-24 11:02:36 +03:00
|
|
|
options ||= {}
|
|
|
|
OpenURI.check_options(options)
|
2002-12-16 22:06:36 +03:00
|
|
|
|
|
|
|
unless mode == nil ||
|
2002-12-18 22:22:46 +03:00
|
|
|
mode == 'r' || mode == 'rb' ||
|
2003-11-13 11:58:20 +03:00
|
|
|
mode == File::RDONLY
|
2002-12-16 22:06:36 +03:00
|
|
|
raise ArgumentError.new("invalid access mode #{mode} (#{uri.class} resource is read only.)")
|
|
|
|
end
|
|
|
|
|
2003-11-24 11:02:36 +03:00
|
|
|
io = open_loop(uri, options)
|
2002-12-16 22:06:36 +03:00
|
|
|
if block_given?
|
|
|
|
begin
|
|
|
|
yield io
|
|
|
|
ensure
|
|
|
|
io.close
|
|
|
|
end
|
|
|
|
else
|
|
|
|
io
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
def OpenURI.open_loop(uri, options) # :nodoc:
|
2002-12-16 22:06:36 +03:00
|
|
|
case opt_proxy = options.fetch(:proxy, true)
|
|
|
|
when true
|
|
|
|
find_proxy = lambda {|u| u.find_proxy}
|
|
|
|
when nil, false
|
|
|
|
find_proxy = lambda {|u| nil}
|
|
|
|
when String
|
|
|
|
opt_proxy = URI.parse(opt_proxy)
|
|
|
|
find_proxy = lambda {|u| opt_proxy}
|
|
|
|
when URI::Generic
|
|
|
|
find_proxy = lambda {|u| opt_proxy}
|
|
|
|
else
|
|
|
|
raise ArgumentError.new("Invalid proxy option: #{opt_proxy}")
|
|
|
|
end
|
|
|
|
|
|
|
|
uri_set = {}
|
2003-11-24 17:36:18 +03:00
|
|
|
buf = nil
|
|
|
|
while true
|
|
|
|
redirect = catch(:open_uri_redirect) {
|
|
|
|
buf = Buffer.new
|
2005-02-12 08:01:18 +03:00
|
|
|
uri.buffer_open(buf, find_proxy.call(uri), options)
|
2003-11-24 17:36:18 +03:00
|
|
|
nil
|
|
|
|
}
|
|
|
|
if redirect
|
|
|
|
if redirect.relative?
|
2004-04-04 11:57:39 +04:00
|
|
|
# Although it violates RFC2616, Location: field may have relative
|
|
|
|
# URI. It is converted to absolute URI using uri as a base URI.
|
2003-11-24 17:36:18 +03:00
|
|
|
redirect = uri + redirect
|
|
|
|
end
|
|
|
|
unless OpenURI.redirectable?(uri, redirect)
|
|
|
|
raise "redirection forbidden: #{uri} -> #{redirect}"
|
|
|
|
end
|
2005-02-12 11:40:23 +03:00
|
|
|
if options.include? :http_basic_authentication
|
|
|
|
# send authentication only for the URI directly specified.
|
|
|
|
options = options.dup
|
|
|
|
options.delete :http_basic_authentication
|
|
|
|
end
|
2003-11-24 17:36:18 +03:00
|
|
|
uri = redirect
|
|
|
|
raise "HTTP redirection loop: #{uri}" if uri_set.include? uri.to_s
|
2004-04-21 15:11:40 +04:00
|
|
|
uri_set[uri.to_s] = true
|
2002-12-16 22:06:36 +03:00
|
|
|
else
|
2003-11-24 17:36:18 +03:00
|
|
|
break
|
2003-02-05 12:58:18 +03:00
|
|
|
end
|
2002-12-16 22:06:36 +03:00
|
|
|
end
|
|
|
|
io = buf.io
|
|
|
|
io.base_uri = uri
|
|
|
|
io
|
|
|
|
end
|
|
|
|
|
2003-11-24 17:36:18 +03:00
|
|
|
def OpenURI.redirectable?(uri1, uri2) # :nodoc:
|
|
|
|
# This test is intended to forbid a redirection from http://... to
|
|
|
|
# file:///etc/passwd.
|
|
|
|
# However this is ad hoc. It should be extensible/configurable.
|
|
|
|
uri1.scheme.downcase == uri2.scheme.downcase ||
|
|
|
|
(/\A(?:http|ftp)\z/i =~ uri1.scheme && /\A(?:http|ftp)\z/i =~ uri2.scheme)
|
2002-12-16 22:06:36 +03:00
|
|
|
end
|
|
|
|
|
2005-02-12 08:01:18 +03:00
|
|
|
def OpenURI.open_http(buf, target, proxy, options) # :nodoc:
|
|
|
|
if proxy
|
|
|
|
raise "Non-HTTP proxy URI: #{proxy}" if proxy.class != URI::HTTP
|
|
|
|
end
|
|
|
|
|
2005-02-12 11:40:23 +03:00
|
|
|
if target.userinfo && "1.9.0" <= RUBY_VERSION
|
|
|
|
# don't raise for 1.8 because compatibility.
|
2005-02-19 07:55:16 +03:00
|
|
|
raise ArgumentError, "userinfo not supported. [RFC3986]"
|
2005-02-12 08:12:56 +03:00
|
|
|
end
|
|
|
|
|
2005-02-12 08:01:18 +03:00
|
|
|
require 'net/http'
|
|
|
|
klass = Net::HTTP
|
|
|
|
if URI::HTTP === target
|
|
|
|
# HTTP or HTTPS
|
|
|
|
if proxy
|
|
|
|
klass = Net::HTTP::Proxy(proxy.host, proxy.port)
|
|
|
|
end
|
|
|
|
target_host = target.host
|
|
|
|
target_port = target.port
|
|
|
|
request_uri = target.request_uri
|
|
|
|
else
|
|
|
|
# FTP over HTTP proxy
|
|
|
|
target_host = proxy.host
|
|
|
|
target_port = proxy.port
|
|
|
|
request_uri = target.to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
http = klass.new(target_host, target_port)
|
|
|
|
if target.class == URI::HTTPS
|
|
|
|
require 'net/https'
|
|
|
|
http.use_ssl = true
|
|
|
|
http.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
|
|
|
store = OpenSSL::X509::Store.new
|
|
|
|
store.set_default_paths
|
|
|
|
http.cert_store = store
|
|
|
|
end
|
|
|
|
|
|
|
|
header = {}
|
|
|
|
options.each {|k, v| header[k] = v if String === k }
|
|
|
|
|
|
|
|
resp = nil
|
|
|
|
http.start {
|
2005-02-19 07:55:16 +03:00
|
|
|
if http.respond_to?(:verify_mode) &&
|
|
|
|
(http.verify_mode & OpenSSL::SSL::VERIFY_PEER) != 0
|
|
|
|
# xxx: information hiding violation
|
|
|
|
http.instance_variable_get(:@socket).io.post_connection_check(target_host)
|
|
|
|
end
|
2005-02-12 08:01:18 +03:00
|
|
|
req = Net::HTTP::Get.new(request_uri, header)
|
|
|
|
if options.include? :http_basic_authentication
|
|
|
|
user, pass = options[:http_basic_authentication]
|
|
|
|
req.basic_auth user, pass
|
|
|
|
end
|
|
|
|
http.request(req) {|response|
|
|
|
|
resp = response
|
|
|
|
if options[:content_length_proc] && Net::HTTPSuccess === resp
|
|
|
|
if resp.key?('Content-Length')
|
|
|
|
options[:content_length_proc].call(resp['Content-Length'].to_i)
|
|
|
|
else
|
|
|
|
options[:content_length_proc].call(nil)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
resp.read_body {|str|
|
|
|
|
buf << str
|
|
|
|
if options[:progress_proc] && Net::HTTPSuccess === resp
|
|
|
|
options[:progress_proc].call(buf.size)
|
|
|
|
end
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
io = buf.io
|
|
|
|
io.rewind
|
|
|
|
io.status = [resp.code, resp.message]
|
|
|
|
resp.each {|name,value| buf.io.meta_add_field name, value }
|
|
|
|
case resp
|
|
|
|
when Net::HTTPSuccess
|
|
|
|
when Net::HTTPMovedPermanently, # 301
|
|
|
|
Net::HTTPFound, # 302
|
|
|
|
Net::HTTPSeeOther, # 303
|
|
|
|
Net::HTTPTemporaryRedirect # 307
|
|
|
|
throw :open_uri_redirect, URI.parse(resp['location'])
|
|
|
|
else
|
|
|
|
raise OpenURI::HTTPError.new(io.status.join(' '), io)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2002-12-16 22:06:36 +03:00
|
|
|
class HTTPError < StandardError
|
|
|
|
def initialize(message, io)
|
|
|
|
super(message)
|
|
|
|
@io = io
|
|
|
|
end
|
|
|
|
attr_reader :io
|
|
|
|
end
|
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
class Buffer # :nodoc:
|
2002-12-16 22:06:36 +03:00
|
|
|
def initialize
|
|
|
|
@io = StringIO.new
|
2003-11-15 04:09:21 +03:00
|
|
|
@size = 0
|
2002-12-16 22:06:36 +03:00
|
|
|
end
|
2003-11-15 04:09:21 +03:00
|
|
|
attr_reader :size
|
2002-12-16 22:06:36 +03:00
|
|
|
|
|
|
|
StringMax = 10240
|
|
|
|
def <<(str)
|
|
|
|
@io << str
|
2003-11-15 04:09:21 +03:00
|
|
|
@size += str.length
|
|
|
|
if StringIO === @io && StringMax < @size
|
2002-12-16 22:06:36 +03:00
|
|
|
require 'tempfile'
|
|
|
|
io = Tempfile.new('open-uri')
|
2004-01-31 23:33:33 +03:00
|
|
|
io.binmode
|
2002-12-16 22:06:36 +03:00
|
|
|
Meta.init io, @io if Meta === @io
|
|
|
|
io << @io.string
|
|
|
|
@io = io
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def io
|
|
|
|
Meta.init @io unless Meta === @io
|
|
|
|
@io
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Mixin for holding meta-information.
|
|
|
|
module Meta
|
2003-02-05 13:44:05 +03:00
|
|
|
def Meta.init(obj, src=nil) # :nodoc:
|
2002-12-16 22:06:36 +03:00
|
|
|
obj.extend Meta
|
|
|
|
obj.instance_eval {
|
|
|
|
@base_uri = nil
|
|
|
|
@meta = {}
|
|
|
|
}
|
|
|
|
if src
|
|
|
|
obj.status = src.status
|
|
|
|
obj.base_uri = src.base_uri
|
|
|
|
src.meta.each {|name, value|
|
|
|
|
obj.meta_add_field(name, value)
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2004-01-07 14:31:21 +03:00
|
|
|
# returns an Array which consists status code and message.
|
2002-12-16 22:06:36 +03:00
|
|
|
attr_accessor :status
|
|
|
|
|
|
|
|
# returns a URI which is base of relative URIs in the data.
|
|
|
|
# It may differ from the URI supplied by a user because redirection.
|
|
|
|
attr_accessor :base_uri
|
|
|
|
|
|
|
|
# returns a Hash which represents header fields.
|
|
|
|
# The Hash keys are downcased for canonicalization.
|
|
|
|
attr_reader :meta
|
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
def meta_add_field(name, value) # :nodoc:
|
2002-12-16 22:06:36 +03:00
|
|
|
@meta[name.downcase] = value
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns a Time which represents Last-Modified field.
|
|
|
|
def last_modified
|
|
|
|
if v = @meta['last-modified']
|
|
|
|
Time.httpdate(v)
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
RE_LWS = /[\r\n\t ]+/n
|
|
|
|
RE_TOKEN = %r{[^\x00- ()<>@,;:\\"/\[\]?={}\x7f]+}n
|
|
|
|
RE_QUOTED_STRING = %r{"(?:[\r\n\t !#-\[\]-~\x80-\xff]|\\[\x00-\x7f])"}n
|
|
|
|
RE_PARAMETERS = %r{(?:;#{RE_LWS}?#{RE_TOKEN}#{RE_LWS}?=#{RE_LWS}?(?:#{RE_TOKEN}|#{RE_QUOTED_STRING})#{RE_LWS}?)*}n
|
|
|
|
|
2003-02-05 13:44:05 +03:00
|
|
|
def content_type_parse # :nodoc:
|
2002-12-16 22:06:36 +03:00
|
|
|
v = @meta['content-type']
|
2004-04-04 11:57:39 +04:00
|
|
|
# The last (?:;#{RE_LWS}?)? matches extra ";" which violates RFC2045.
|
2004-03-27 15:43:02 +03:00
|
|
|
if v && %r{\A#{RE_LWS}?(#{RE_TOKEN})#{RE_LWS}?/(#{RE_TOKEN})#{RE_LWS}?(#{RE_PARAMETERS})(?:;#{RE_LWS}?)?\z}no =~ v
|
2002-12-16 22:06:36 +03:00
|
|
|
type = $1.downcase
|
|
|
|
subtype = $2.downcase
|
|
|
|
parameters = []
|
2004-03-27 15:43:02 +03:00
|
|
|
$3.scan(/;#{RE_LWS}?(#{RE_TOKEN})#{RE_LWS}?=#{RE_LWS}?(?:(#{RE_TOKEN})|(#{RE_QUOTED_STRING}))/no) {|att, val, qval|
|
2002-12-16 22:06:36 +03:00
|
|
|
val = qval.gsub(/[\r\n\t !#-\[\]-~\x80-\xff]+|(\\[\x00-\x7f])/) { $1 ? $1[1,1] : $& } if qval
|
|
|
|
parameters << [att.downcase, val]
|
|
|
|
}
|
|
|
|
["#{type}/#{subtype}", *parameters]
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns "type/subtype" which is MIME Content-Type.
|
|
|
|
# It is downcased for canonicalization.
|
|
|
|
# Content-Type parameters are stripped.
|
|
|
|
def content_type
|
|
|
|
type, *parameters = content_type_parse
|
|
|
|
type || 'application/octet-stream'
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns a charset parameter in Content-Type field.
|
|
|
|
# It is downcased for canonicalization.
|
2003-05-15 13:48:12 +04:00
|
|
|
#
|
|
|
|
# If charset parameter is not given but a block is given,
|
|
|
|
# the block is called and its result is returned.
|
|
|
|
# It can be used to guess charset.
|
|
|
|
#
|
|
|
|
# If charset parameter and block is not given,
|
|
|
|
# nil is returned except text type in HTTP.
|
|
|
|
# In that case, "iso-8859-1" is returned as defined by RFC2616 3.7.1.
|
2002-12-16 22:06:36 +03:00
|
|
|
def charset
|
|
|
|
type, *parameters = content_type_parse
|
|
|
|
if pair = parameters.assoc('charset')
|
|
|
|
pair.last.downcase
|
2003-05-15 13:48:12 +04:00
|
|
|
elsif block_given?
|
|
|
|
yield
|
2002-12-16 22:06:36 +03:00
|
|
|
elsif type && %r{\Atext/} =~ type &&
|
2003-11-24 14:01:57 +03:00
|
|
|
@base_uri && /\Ahttp\z/i =~ @base_uri.scheme
|
2002-12-16 22:06:36 +03:00
|
|
|
"iso-8859-1" # RFC2616 3.7.1
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns a list of encodings in Content-Encoding field
|
|
|
|
# as an Array of String.
|
|
|
|
# The encodings are downcased for canonicalization.
|
|
|
|
def content_encoding
|
|
|
|
v = @meta['content-encoding']
|
|
|
|
if v && %r{\A#{RE_LWS}?#{RE_TOKEN}#{RE_LWS}?(?:,#{RE_LWS}?#{RE_TOKEN}#{RE_LWS}?)*}o =~ v
|
|
|
|
v.scan(RE_TOKEN).map {|content_coding| content_coding.downcase}
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-11-13 14:39:16 +03:00
|
|
|
# Mixin for HTTP and FTP URIs.
|
2002-12-16 22:06:36 +03:00
|
|
|
module OpenRead
|
2003-11-24 11:02:36 +03:00
|
|
|
# OpenURI::OpenRead#open provides `open' for URI::HTTP and URI::FTP.
|
|
|
|
#
|
|
|
|
# OpenURI::OpenRead#open takes optional 3 arguments as:
|
|
|
|
# OpenURI::OpenRead#open([mode [, perm]] [, options]) [{|io| ... }]
|
|
|
|
#
|
|
|
|
# `mode', `perm' is same as Kernel#open.
|
|
|
|
#
|
|
|
|
# However, `mode' must be read mode because OpenURI::OpenRead#open doesn't
|
|
|
|
# support write mode (yet).
|
|
|
|
# Also `perm' is just ignored because it is meaningful only for file
|
|
|
|
# creation.
|
|
|
|
#
|
|
|
|
# `options' must be a hash.
|
|
|
|
#
|
|
|
|
# Each pairs which key is a string in the hash specify a extra header
|
|
|
|
# field for HTTP.
|
|
|
|
# I.e. it is ignored for FTP without HTTP proxy.
|
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# The hash may include other options which key is a symbol:
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# [:proxy]
|
|
|
|
# Synopsis:
|
|
|
|
# :proxy => "http://proxy.foo.com:8000/"
|
|
|
|
# :proxy => URI.parse("http://proxy.foo.com:8000/")
|
|
|
|
# :proxy => true
|
|
|
|
# :proxy => false
|
|
|
|
# :proxy => nil
|
|
|
|
#
|
|
|
|
# If :proxy option is specified, the value should be String, URI,
|
|
|
|
# boolean or nil.
|
|
|
|
# When String or URI is given, it is treated as proxy URI.
|
|
|
|
# When true is given or the option itself is not specified,
|
|
|
|
# environment variable `scheme_proxy' is examined.
|
2005-02-12 08:38:44 +03:00
|
|
|
# `scheme' is replaced by `http', `https' or `ftp'.
|
2005-02-08 06:06:37 +03:00
|
|
|
# When false or nil is given, the environment variables are ignored and
|
|
|
|
# connection will be made to a server directly.
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-11 05:47:11 +03:00
|
|
|
# [:http_basic_authentication]
|
|
|
|
# Synopsis:
|
|
|
|
# :http_basic_authentication=>[user, password]
|
|
|
|
#
|
|
|
|
# If :http_basic_authentication is specified,
|
|
|
|
# the value should be an array which contains 2 strings:
|
|
|
|
# username and password.
|
|
|
|
# It is used for HTTP Basic authentication defined by RFC 2617.
|
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# [:content_length_proc]
|
|
|
|
# Synopsis:
|
|
|
|
# :content_length_proc => lambda {|content_length| ... }
|
|
|
|
#
|
|
|
|
# If :content_length_proc option is specified, the option value procedure
|
|
|
|
# is called before actual transfer is started.
|
|
|
|
# It takes one argument which is expected content length in bytes.
|
|
|
|
#
|
|
|
|
# If two or more transfer is done by HTTP redirection, the procedure
|
|
|
|
# is called only one for a last transfer.
|
|
|
|
#
|
|
|
|
# When expected content length is unknown, the procedure is called with
|
|
|
|
# nil.
|
|
|
|
# It is happen when HTTP response has no Content-Length header.
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# [:progress_proc]
|
|
|
|
# Synopsis:
|
|
|
|
# :progress_proc => lambda {|size| ...}
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# If :progress_proc option is specified, the proc is called with one
|
|
|
|
# argument each time when `open' gets content fragment from network.
|
|
|
|
# The argument `size' `size' is a accumulated transfered size in bytes.
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# If two or more transfer is done by HTTP redirection, the procedure
|
|
|
|
# is called only one for a last transfer.
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# :progress_proc and :content_length_proc are intended to be used for
|
|
|
|
# progress bar.
|
|
|
|
# For example, it can be implemented as follows using Ruby/ProgressBar.
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
2005-02-08 06:06:37 +03:00
|
|
|
# pbar = nil
|
|
|
|
# open("http://...",
|
|
|
|
# :content_length_proc => lambda {|t|
|
|
|
|
# if t && 0 < t
|
|
|
|
# pbar = ProgressBar.new("...", t)
|
|
|
|
# pbar.file_transfer_mode
|
|
|
|
# end
|
|
|
|
# },
|
|
|
|
# :progress_proc => lambda {|s|
|
|
|
|
# pbar.set s if pbar
|
|
|
|
# }) {|f| ... }
|
2003-11-24 11:02:36 +03:00
|
|
|
#
|
|
|
|
# OpenURI::OpenRead#open returns an IO like object if block is not given.
|
|
|
|
# Otherwise it yields the IO object and return the value of the block.
|
|
|
|
# The IO object is extended with OpenURI::Meta.
|
2003-02-05 13:44:05 +03:00
|
|
|
def open(*rest, &block)
|
|
|
|
OpenURI.open_uri(self, *rest, &block)
|
2002-12-16 22:06:36 +03:00
|
|
|
end
|
|
|
|
|
2003-11-24 11:02:36 +03:00
|
|
|
# OpenURI::OpenRead#read([options]) reads a content referenced by self and
|
|
|
|
# returns the content as string.
|
|
|
|
# The string is extended with OpenURI::Meta.
|
|
|
|
# The argument `options' is same as OpenURI::OpenRead#open.
|
2002-12-16 22:06:36 +03:00
|
|
|
def read(options={})
|
|
|
|
self.open(options) {|f|
|
|
|
|
str = f.read
|
|
|
|
Meta.init str, f
|
|
|
|
str
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
module URI
|
|
|
|
class Generic
|
|
|
|
# returns a proxy URI.
|
|
|
|
# The proxy URI is obtained from environment variables such as http_proxy,
|
|
|
|
# ftp_proxy, no_proxy, etc.
|
|
|
|
# If there is no proper proxy, nil is returned.
|
2003-11-24 14:01:57 +03:00
|
|
|
#
|
2003-11-25 11:53:42 +03:00
|
|
|
# Note that capitalized variables (HTTP_PROXY, FTP_PROXY, NO_PROXY, etc.)
|
|
|
|
# are examined too.
|
|
|
|
#
|
|
|
|
# But http_proxy and HTTP_PROXY is treated specially under CGI environment.
|
|
|
|
# It's because HTTP_PROXY may be set by Proxy: header.
|
|
|
|
# So HTTP_PROXY is not used.
|
2004-01-07 14:31:21 +03:00
|
|
|
# http_proxy is not used too if the variable is case insensitive.
|
2003-11-25 11:53:42 +03:00
|
|
|
# CGI_HTTP_PROXY can be used instead.
|
2002-12-16 22:06:36 +03:00
|
|
|
def find_proxy
|
2003-11-24 14:01:57 +03:00
|
|
|
name = self.scheme.downcase + '_proxy'
|
2003-11-25 11:53:42 +03:00
|
|
|
proxy_uri = nil
|
|
|
|
if name == 'http_proxy' && ENV.include?('REQUEST_METHOD') # CGI?
|
|
|
|
# HTTP_PROXY conflicts with *_proxy for proxy settings and
|
2004-01-07 14:31:21 +03:00
|
|
|
# HTTP_* for header information in CGI.
|
2003-11-25 11:53:42 +03:00
|
|
|
# So it should be careful to use it.
|
2003-11-25 12:56:49 +03:00
|
|
|
pairs = ENV.reject {|k, v| /\Ahttp_proxy\z/i !~ k }
|
|
|
|
case pairs.length
|
|
|
|
when 0 # no proxy setting anyway.
|
|
|
|
proxy_uri = nil
|
|
|
|
when 1
|
2003-11-25 13:04:37 +03:00
|
|
|
k, v = pairs.shift
|
2003-11-25 12:56:49 +03:00
|
|
|
if k == 'http_proxy' && ENV[k.upcase] == nil
|
2004-01-07 14:31:21 +03:00
|
|
|
# http_proxy is safe to use because ENV is case sensitive.
|
2003-11-25 12:56:49 +03:00
|
|
|
proxy_uri = ENV[name]
|
|
|
|
else
|
|
|
|
proxy_uri = nil
|
|
|
|
end
|
2004-01-07 14:31:21 +03:00
|
|
|
else # http_proxy is safe to use because ENV is case sensitive.
|
2003-11-25 11:53:42 +03:00
|
|
|
proxy_uri = ENV[name]
|
|
|
|
end
|
|
|
|
if !proxy_uri
|
|
|
|
# Use CGI_HTTP_PROXY. cf. libwww-perl.
|
|
|
|
proxy_uri = ENV["CGI_#{name.upcase}"]
|
|
|
|
end
|
2004-01-31 21:11:55 +03:00
|
|
|
elsif name == 'http_proxy'
|
|
|
|
unless proxy_uri = ENV[name]
|
|
|
|
if proxy_uri = ENV[name.upcase]
|
|
|
|
warn 'The environment variable HTTP_PROXY is discouraged. Use http_proxy.'
|
|
|
|
end
|
|
|
|
end
|
2003-11-25 11:53:42 +03:00
|
|
|
else
|
|
|
|
proxy_uri = ENV[name] || ENV[name.upcase]
|
2003-11-24 14:01:57 +03:00
|
|
|
end
|
2003-11-25 11:53:42 +03:00
|
|
|
|
2004-02-01 18:31:59 +03:00
|
|
|
if proxy_uri && self.host
|
|
|
|
require 'socket'
|
|
|
|
begin
|
|
|
|
addr = IPSocket.getaddress(self.host)
|
|
|
|
proxy_uri = nil if /\A127\.|\A::1\z/ =~ addr
|
|
|
|
rescue SocketError
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-11-25 11:53:42 +03:00
|
|
|
if proxy_uri
|
2002-12-16 22:06:36 +03:00
|
|
|
proxy_uri = URI.parse(proxy_uri)
|
|
|
|
name = 'no_proxy'
|
|
|
|
if no_proxy = ENV[name] || ENV[name.upcase]
|
|
|
|
no_proxy.scan(/([^:,]*)(?::(\d+))?/) {|host, port|
|
2003-10-05 06:00:29 +04:00
|
|
|
if /(\A|\.)#{Regexp.quote host}\z/i =~ self.host &&
|
2002-12-16 22:06:36 +03:00
|
|
|
(!port || self.port == port.to_i)
|
|
|
|
proxy_uri = nil
|
|
|
|
break
|
|
|
|
end
|
|
|
|
}
|
|
|
|
end
|
|
|
|
proxy_uri
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class HTTP
|
2005-02-12 08:01:18 +03:00
|
|
|
def buffer_open(buf, proxy, options) # :nodoc:
|
|
|
|
OpenURI.open_http(buf, self, proxy, options)
|
2002-12-16 22:06:36 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
include OpenURI::OpenRead
|
|
|
|
end
|
|
|
|
|
|
|
|
class FTP
|
2005-02-12 08:01:18 +03:00
|
|
|
def buffer_open(buf, proxy, options) # :nodoc:
|
|
|
|
if proxy
|
|
|
|
OpenURI.open_http(buf, self, proxy, options)
|
|
|
|
return
|
|
|
|
end
|
2002-12-16 22:06:36 +03:00
|
|
|
require 'net/ftp'
|
2005-02-19 18:53:43 +03:00
|
|
|
|
|
|
|
directories = self.path.split(%r{/}, -1)
|
|
|
|
directories.shift if directories[0] == '' # strip a field before leading slash
|
|
|
|
directories.each {|d|
|
|
|
|
d.gsub!(/%([0-9A-Fa-f][0-9A-Fa-f])/) { [$1].pack("H2") }
|
|
|
|
}
|
|
|
|
unless filename = directories.pop
|
|
|
|
raise ArgumentError, "no filename: #{self.inspect}"
|
|
|
|
end
|
|
|
|
directories.each {|d|
|
|
|
|
if /[\r\n]/ =~ d
|
|
|
|
raise ArgumentError, "invalid directory: #{d.inspect}"
|
|
|
|
end
|
|
|
|
}
|
|
|
|
if /[\r\n]/ =~ filename
|
|
|
|
raise ArgumentError, "invalid filename: #{filename.inspect}"
|
|
|
|
end
|
|
|
|
typecode = self.typecode
|
|
|
|
if typecode && /\A[aid]\z/ !~ typecode
|
|
|
|
raise ArgumentError, "invalid typecode: #{typecode.inspect}"
|
|
|
|
end
|
|
|
|
|
|
|
|
# The access sequence is defined by RFC 1738
|
|
|
|
ftp = Net::FTP.open(self.host)
|
2002-12-16 22:06:36 +03:00
|
|
|
# todo: extract user/passwd from .netrc.
|
|
|
|
user = 'anonymous'
|
|
|
|
passwd = nil
|
|
|
|
user, passwd = self.userinfo.split(/:/) if self.userinfo
|
|
|
|
ftp.login(user, passwd)
|
2005-02-19 18:53:43 +03:00
|
|
|
directories.each {|cwd|
|
|
|
|
ftp.voidcmd("CWD #{cwd}")
|
|
|
|
}
|
|
|
|
if typecode
|
|
|
|
# xxx: typecode D is not handled.
|
|
|
|
ftp.voidcmd("TYPE #{typecode.upcase}")
|
|
|
|
end
|
2003-11-24 11:02:36 +03:00
|
|
|
if options[:content_length_proc]
|
2005-02-19 18:53:43 +03:00
|
|
|
options[:content_length_proc].call(ftp.size(filename))
|
2003-11-24 11:02:36 +03:00
|
|
|
end
|
2005-02-19 18:53:43 +03:00
|
|
|
ftp.retrbinary("RETR #{filename}", 4096) { |str|
|
2003-11-15 04:09:21 +03:00
|
|
|
buf << str
|
2003-11-24 11:02:36 +03:00
|
|
|
options[:progress_proc].call(buf.size) if options[:progress_proc]
|
2003-11-15 04:09:21 +03:00
|
|
|
}
|
2002-12-16 22:06:36 +03:00
|
|
|
ftp.close
|
|
|
|
buf.io.rewind
|
|
|
|
end
|
|
|
|
|
|
|
|
include OpenURI::OpenRead
|
|
|
|
end
|
|
|
|
end
|