2019-05-28 08:20:42 +03:00
|
|
|
# Used by configure and make to download or update mirrored Ruby and GCC
|
2016-07-03 00:01:04 +03:00
|
|
|
# files. This will use HTTPS if possible, falling back to HTTP.
|
|
|
|
|
2017-05-21 19:45:35 +03:00
|
|
|
require 'fileutils'
|
2013-06-14 08:01:54 +04:00
|
|
|
require 'open-uri'
|
2017-05-21 19:45:35 +03:00
|
|
|
require 'pathname'
|
2014-12-25 19:25:57 +03:00
|
|
|
begin
|
|
|
|
require 'net/https'
|
2015-11-26 12:09:49 +03:00
|
|
|
rescue LoadError
|
|
|
|
https = 'http'
|
|
|
|
else
|
|
|
|
https = 'https'
|
2014-12-25 19:25:57 +03:00
|
|
|
|
2018-07-16 13:20:24 +03:00
|
|
|
# open-uri of ruby 2.2.0 accepts an array of PEMs as ssl_ca_cert, but old
|
|
|
|
# versions do not. so, patching OpenSSL::X509::Store#add_file instead.
|
2014-12-25 19:25:57 +03:00
|
|
|
class OpenSSL::X509::Store
|
|
|
|
alias orig_add_file add_file
|
|
|
|
def add_file(pems)
|
|
|
|
Array(pems).each do |pem|
|
|
|
|
if File.directory?(pem)
|
|
|
|
add_path pem
|
|
|
|
else
|
|
|
|
orig_add_file pem
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-01-23 10:30:32 +03:00
|
|
|
# since open-uri internally checks ssl_ca_cert using File.directory?,
|
|
|
|
# allow to accept an array.
|
2014-12-25 19:25:57 +03:00
|
|
|
class <<File
|
|
|
|
alias orig_directory? directory?
|
|
|
|
def File.directory? files
|
|
|
|
files.is_a?(Array) ? false : orig_directory?(files)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-06-14 08:01:54 +04:00
|
|
|
|
2014-06-24 05:26:21 +04:00
|
|
|
class Downloader
|
2016-02-02 10:08:13 +03:00
|
|
|
def self.https=(https)
|
|
|
|
@@https = https
|
|
|
|
end
|
|
|
|
|
2016-02-18 10:53:13 +03:00
|
|
|
def self.https?
|
|
|
|
@@https == 'https'
|
|
|
|
end
|
|
|
|
|
2015-11-26 12:09:49 +03:00
|
|
|
def self.https
|
|
|
|
@@https
|
|
|
|
end
|
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
class GNU < self
|
|
|
|
def self.download(name, *rest)
|
2016-02-18 10:53:13 +03:00
|
|
|
if https?
|
2015-11-26 18:40:58 +03:00
|
|
|
super("https://raw.githubusercontent.com/gcc-mirror/gcc/master/#{name}", name, *rest)
|
|
|
|
else
|
2016-02-18 10:53:13 +03:00
|
|
|
super("https://repo.or.cz/official-gcc.git/blob_plain/HEAD:/#{name}", name, *rest)
|
2015-11-26 18:40:58 +03:00
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
end
|
2014-09-23 15:56:30 +04:00
|
|
|
end
|
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
class RubyGems < self
|
2016-01-23 10:30:32 +03:00
|
|
|
def self.download(name, dir = nil, since = true, options = {})
|
2015-01-14 10:45:28 +03:00
|
|
|
require 'rubygems'
|
2017-03-21 10:19:23 +03:00
|
|
|
options = options.dup
|
2016-04-06 11:40:02 +03:00
|
|
|
options[:ssl_ca_cert] = Dir.glob(File.expand_path("../lib/rubygems/ssl_certs/**/*.pem", File.dirname(__FILE__)))
|
2017-05-21 17:29:00 +03:00
|
|
|
super("https://rubygems.org/downloads/#{name}", name, dir, since, options)
|
2015-01-14 10:45:28 +03:00
|
|
|
end
|
2014-09-23 15:56:30 +04:00
|
|
|
end
|
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
Gems = RubyGems
|
2014-09-24 12:55:09 +04:00
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
class Unicode < self
|
2019-01-06 10:50:18 +03:00
|
|
|
INDEX = {} # cache index file information across files in the same directory
|
|
|
|
UNICODE_PUBLIC = "http://www.unicode.org/Public/"
|
|
|
|
|
|
|
|
def self.download(name, dir = nil, since = true, options = {})
|
|
|
|
options = options.dup
|
|
|
|
unicode_beta = options.delete(:unicode_beta)
|
|
|
|
name_dir_part = name.sub(/[^\/]+$/, '')
|
|
|
|
if unicode_beta == 'YES'
|
|
|
|
if INDEX.size == 0
|
|
|
|
index_options = options.dup
|
|
|
|
index_options[:cache_save] = false # TODO: make sure caching really doesn't work for index file
|
2019-04-17 10:35:29 +03:00
|
|
|
index_data = File.read(under(dir, "index.html")) rescue nil
|
2019-01-06 10:50:18 +03:00
|
|
|
index_file = super(UNICODE_PUBLIC+name_dir_part, "#{name_dir_part}index.html", dir, true, index_options)
|
2019-04-17 10:09:28 +03:00
|
|
|
INDEX[:index] = File.read(index_file)
|
2019-04-17 10:35:29 +03:00
|
|
|
since = true unless INDEX[:index] == index_data
|
2019-01-06 10:50:18 +03:00
|
|
|
end
|
2019-01-06 14:41:12 +03:00
|
|
|
file_base = File.basename(name, '.txt')
|
2019-04-06 03:08:04 +03:00
|
|
|
return if file_base == '.' # Use pre-generated headers and tables
|
2019-04-05 17:44:15 +03:00
|
|
|
beta_name = INDEX[:index][/#{Regexp.quote(file_base)}(-[0-9.]+d\d+)?\.txt/]
|
2019-01-06 10:50:18 +03:00
|
|
|
# make sure we always check for new versions of files,
|
|
|
|
# because they can easily change in the beta period
|
2019-04-17 10:35:29 +03:00
|
|
|
super(UNICODE_PUBLIC+name_dir_part+beta_name, name, dir, since, options)
|
2019-01-06 10:50:18 +03:00
|
|
|
else
|
|
|
|
index_file = Pathname.new(under(dir, name_dir_part+'index.html'))
|
2019-05-24 13:01:39 +03:00
|
|
|
if index_file.exist? and name_dir_part !~ /^(12\.1\.0|emoji\/12\.0)/
|
2019-01-06 10:50:18 +03:00
|
|
|
raise "Although Unicode is not in beta, file #{index_file} exists. " +
|
|
|
|
"Remove all files in this directory and in .downloaded-cache/ " +
|
|
|
|
"because they may be leftovers from the beta period."
|
|
|
|
end
|
|
|
|
super(UNICODE_PUBLIC+name, name, dir, since, options)
|
|
|
|
end
|
2014-09-23 15:56:30 +04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-09-23 15:52:23 +04:00
|
|
|
def self.mode_for(data)
|
2014-09-29 01:47:59 +04:00
|
|
|
/\A#!/ =~ data ? 0755 : 0644
|
2014-09-23 15:52:23 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.http_options(file, since)
|
|
|
|
options = {}
|
|
|
|
if since
|
|
|
|
case since
|
|
|
|
when true
|
|
|
|
since = (File.mtime(file).httpdate rescue nil)
|
|
|
|
when Time
|
|
|
|
since = since.httpdate
|
|
|
|
end
|
|
|
|
if since
|
|
|
|
options['If-Modified-Since'] = since
|
|
|
|
end
|
|
|
|
end
|
2019-04-30 16:35:38 +03:00
|
|
|
options['Accept-Encoding'] = 'identity' # to disable Net::HTTP::GenericRequest#decode_content
|
2014-09-23 15:52:23 +04:00
|
|
|
options
|
2014-06-24 05:26:21 +04:00
|
|
|
end
|
2014-09-23 11:08:16 +04:00
|
|
|
|
2016-01-23 10:30:32 +03:00
|
|
|
# Downloader.download(url, name, [dir, [since]])
|
2014-09-23 15:52:23 +04:00
|
|
|
#
|
2014-09-23 11:08:03 +04:00
|
|
|
# Update a file from url if newer version is available.
|
|
|
|
# Creates the file if the file doesn't yet exist; however, the
|
|
|
|
# directory where the file is being created has to exist already.
|
2016-01-23 10:30:32 +03:00
|
|
|
# The +since+ parameter can take the following values, with associated meanings:
|
|
|
|
# true ::
|
|
|
|
# Take the last-modified time of the current file on disk, and only download
|
|
|
|
# if the server has a file that was modified later. Download unconditionally
|
|
|
|
# if we don't have the file yet. Default.
|
|
|
|
# +some time value+ ::
|
|
|
|
# Use this time value instead of the time of modification of the file on disk.
|
|
|
|
# nil ::
|
|
|
|
# Only download the file if it doesn't exist yet.
|
|
|
|
# false ::
|
|
|
|
# always download url regardless of whether we already have a file,
|
|
|
|
# and regardless of modification times. (This is essentially just a waste of
|
|
|
|
# network resources, except in the case that the file we have is somehow damaged.
|
|
|
|
# Please note that using this recurringly might create or be seen as a
|
|
|
|
# denial of service attack.)
|
2014-09-23 15:52:23 +04:00
|
|
|
#
|
2014-09-23 11:08:03 +04:00
|
|
|
# Example usage:
|
2014-10-06 05:15:23 +04:00
|
|
|
# download 'http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt',
|
|
|
|
# 'UnicodeData.txt', 'enc/unicode/data'
|
2016-01-23 10:30:32 +03:00
|
|
|
def self.download(url, name, dir = nil, since = true, options = {})
|
2017-03-21 10:19:23 +03:00
|
|
|
options = options.dup
|
2017-05-21 19:45:35 +03:00
|
|
|
url = URI(url)
|
2017-03-21 10:19:23 +03:00
|
|
|
dryrun = options.delete(:dryrun)
|
2019-01-06 10:50:18 +03:00
|
|
|
options.delete(:unicode_beta) # just to be on the safe side for gems and gcc
|
2018-12-19 07:08:46 +03:00
|
|
|
|
2017-05-21 19:45:35 +03:00
|
|
|
if name
|
|
|
|
file = Pathname.new(under(dir, name))
|
|
|
|
else
|
|
|
|
name = File.basename(url.path)
|
|
|
|
end
|
2017-06-27 20:10:56 +03:00
|
|
|
cache_save = options.delete(:cache_save) {
|
|
|
|
ENV["CACHE_SAVE"] != "no"
|
|
|
|
}
|
2017-05-21 19:45:35 +03:00
|
|
|
cache = cache_file(url, name, options.delete(:cache_dir))
|
|
|
|
file ||= cache
|
|
|
|
if since.nil? and file.exist?
|
2014-11-19 18:06:04 +03:00
|
|
|
if $VERBOSE
|
2017-03-23 18:49:10 +03:00
|
|
|
$stdout.puts "#{file} already exists"
|
2014-11-19 18:06:04 +03:00
|
|
|
$stdout.flush
|
|
|
|
end
|
2017-06-27 20:10:56 +03:00
|
|
|
if cache_save
|
|
|
|
save_cache(cache, file, name)
|
|
|
|
end
|
2019-05-30 03:58:18 +03:00
|
|
|
return file.to_path
|
2014-11-19 18:06:04 +03:00
|
|
|
end
|
2017-03-23 18:49:10 +03:00
|
|
|
if dryrun
|
|
|
|
puts "Download #{url} into #{file}"
|
2017-05-21 19:45:35 +03:00
|
|
|
return
|
|
|
|
end
|
|
|
|
if link_cache(cache, file, name, $VERBOSE)
|
|
|
|
return file.to_path
|
2017-03-23 18:49:10 +03:00
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
if !https? and URI::HTTPS === url
|
2016-02-18 10:53:13 +03:00
|
|
|
warn "*** using http instead of https ***"
|
2017-05-21 19:45:35 +03:00
|
|
|
url.scheme = 'http'
|
|
|
|
url = URI(url.to_s)
|
2016-02-18 10:53:13 +03:00
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
if $VERBOSE
|
|
|
|
$stdout.print "downloading #{name} ... "
|
|
|
|
$stdout.flush
|
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
begin
|
2019-04-27 16:21:34 +03:00
|
|
|
data = with_retry(9) do
|
2017-12-27 16:32:59 +03:00
|
|
|
url.read(options.merge(http_options(file, since.nil? ? true : since)))
|
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
rescue OpenURI::HTTPError => http_error
|
2014-09-28 06:54:59 +04:00
|
|
|
if http_error.message =~ /^304 / # 304 Not Modified
|
|
|
|
if $VERBOSE
|
2016-01-23 10:30:32 +03:00
|
|
|
$stdout.puts "#{name} not modified"
|
2014-09-28 06:54:59 +04:00
|
|
|
$stdout.flush
|
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-09-28 06:54:59 +04:00
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
raise
|
2014-10-20 14:48:52 +04:00
|
|
|
rescue Timeout::Error
|
2017-05-21 19:45:35 +03:00
|
|
|
if since.nil? and file.exist?
|
2014-10-20 14:48:52 +04:00
|
|
|
puts "Request for #{url} timed out, using old version."
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-10-20 14:48:52 +04:00
|
|
|
end
|
|
|
|
raise
|
|
|
|
rescue SocketError
|
2017-05-21 19:45:35 +03:00
|
|
|
if since.nil? and file.exist?
|
2014-10-20 14:48:52 +04:00
|
|
|
puts "No network connection, unable to download #{url}, using old version."
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-10-20 14:48:52 +04:00
|
|
|
end
|
|
|
|
raise
|
2014-09-23 11:08:03 +04:00
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
mtime = nil
|
2017-07-09 07:49:15 +03:00
|
|
|
dest = (cache_save && cache && !cache.exist? ? cache : file)
|
2017-05-21 19:45:35 +03:00
|
|
|
dest.parent.mkpath
|
|
|
|
dest.open("wb", 0600) do |f|
|
2014-09-23 15:52:23 +04:00
|
|
|
f.write(data)
|
|
|
|
f.chmod(mode_for(data))
|
|
|
|
mtime = data.meta["last-modified"]
|
2014-09-23 11:08:03 +04:00
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
if mtime
|
|
|
|
mtime = Time.httpdate(mtime)
|
2017-05-21 19:45:35 +03:00
|
|
|
dest.utime(mtime, mtime)
|
2014-09-23 15:52:23 +04:00
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
if $VERBOSE
|
|
|
|
$stdout.puts "done"
|
|
|
|
$stdout.flush
|
|
|
|
end
|
2017-05-22 06:18:17 +03:00
|
|
|
if dest.eql?(cache)
|
|
|
|
link_cache(cache, file, name)
|
2017-06-27 20:10:56 +03:00
|
|
|
elsif cache_save
|
2017-05-22 06:18:17 +03:00
|
|
|
save_cache(cache, file, name)
|
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-09-23 15:52:23 +04:00
|
|
|
rescue => e
|
2017-12-27 16:32:59 +03:00
|
|
|
raise "failed to download #{name}\n#{e.class}: #{e.message}: #{url}"
|
2014-09-23 15:52:23 +04:00
|
|
|
end
|
2015-01-14 10:45:28 +03:00
|
|
|
|
|
|
|
def self.under(dir, name)
|
|
|
|
dir ? File.join(dir, File.basename(name)) : name
|
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
|
|
|
|
def self.cache_file(url, name, cache_dir = nil)
|
|
|
|
case cache_dir
|
|
|
|
when false
|
|
|
|
return nil
|
|
|
|
when nil
|
|
|
|
cache_dir = ENV['CACHE_DIR']
|
|
|
|
if !cache_dir or cache_dir.empty?
|
|
|
|
cache_dir = ".downloaded-cache"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
Pathname.new(cache_dir) + (name || File.basename(URI(url).path))
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.link_cache(cache, file, name, verbose = false)
|
|
|
|
return false unless cache and cache.exist?
|
|
|
|
return true if cache.eql?(file)
|
2017-05-22 07:10:51 +03:00
|
|
|
if /cygwin/ !~ RUBY_PLATFORM or /winsymlink:nativestrict/ =~ ENV['CYGWIN']
|
|
|
|
begin
|
|
|
|
file.make_symlink(cache.relative_path_from(file.parent))
|
|
|
|
rescue SystemCallError
|
|
|
|
else
|
|
|
|
if verbose
|
|
|
|
$stdout.puts "made symlink #{name} to #{cache}"
|
|
|
|
$stdout.flush
|
|
|
|
end
|
|
|
|
return true
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
begin
|
|
|
|
file.make_link(cache)
|
|
|
|
rescue SystemCallError
|
|
|
|
else
|
|
|
|
if verbose
|
|
|
|
$stdout.puts "made link #{name} to #{cache}"
|
|
|
|
$stdout.flush
|
|
|
|
end
|
|
|
|
return true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.save_cache(cache, file, name)
|
2019-04-02 11:08:29 +03:00
|
|
|
return unless cache or cache.eql?(file)
|
|
|
|
begin
|
|
|
|
st = cache.stat
|
|
|
|
rescue
|
2017-05-21 19:45:35 +03:00
|
|
|
begin
|
|
|
|
file.rename(cache)
|
|
|
|
rescue
|
2019-04-02 11:08:29 +03:00
|
|
|
return
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
2019-04-02 11:08:29 +03:00
|
|
|
else
|
|
|
|
return unless st.mtime > file.lstat.mtime
|
|
|
|
file.unlink
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
2019-04-02 11:08:29 +03:00
|
|
|
link_cache(cache, file, name)
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
2017-12-27 16:32:59 +03:00
|
|
|
|
2018-08-11 07:22:14 +03:00
|
|
|
def self.with_retry(max_times, &block)
|
2017-12-27 16:32:59 +03:00
|
|
|
times = 0
|
|
|
|
begin
|
|
|
|
block.call
|
2018-12-01 03:37:54 +03:00
|
|
|
rescue Errno::ETIMEDOUT, SocketError, OpenURI::HTTPError, Net::ReadTimeout, Net::OpenTimeout => e
|
2018-08-18 05:04:44 +03:00
|
|
|
raise if e.is_a?(OpenURI::HTTPError) && e.message !~ /^50[023] / # retry only 500, 502, 503 for http error
|
2017-12-27 16:32:59 +03:00
|
|
|
times += 1
|
|
|
|
if times <= max_times
|
|
|
|
$stderr.puts "retrying #{e.class} (#{e.message}) after #{times ** 2} seconds..."
|
|
|
|
sleep(times ** 2)
|
|
|
|
retry
|
2017-12-27 16:35:25 +03:00
|
|
|
else
|
|
|
|
raise
|
2017-12-27 16:32:59 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
private_class_method :with_retry
|
2013-06-14 08:01:54 +04:00
|
|
|
end
|
2014-09-28 15:27:22 +04:00
|
|
|
|
2016-02-02 10:08:13 +03:00
|
|
|
Downloader.https = https.freeze
|
2015-11-26 12:09:49 +03:00
|
|
|
|
2014-09-28 15:27:22 +04:00
|
|
|
if $0 == __FILE__
|
2016-01-23 10:30:32 +03:00
|
|
|
since = true
|
2016-05-18 10:04:55 +03:00
|
|
|
options = {}
|
2014-09-28 15:27:22 +04:00
|
|
|
until ARGV.empty?
|
|
|
|
case ARGV[0]
|
|
|
|
when '-d'
|
|
|
|
destdir = ARGV[1]
|
2014-09-29 01:47:59 +04:00
|
|
|
ARGV.shift
|
2016-01-18 13:21:03 +03:00
|
|
|
when '-p'
|
|
|
|
# strip directory names from the name to download, and add the
|
|
|
|
# prefix instead.
|
|
|
|
prefix = ARGV[1]
|
|
|
|
ARGV.shift
|
2014-09-28 15:27:22 +04:00
|
|
|
when '-e'
|
2016-01-23 10:30:32 +03:00
|
|
|
since = nil
|
2014-10-20 18:58:06 +04:00
|
|
|
when '-a'
|
2016-01-23 10:30:32 +03:00
|
|
|
since = false
|
2017-03-21 10:19:23 +03:00
|
|
|
when '-n', '--dryrun'
|
|
|
|
options[:dryrun] = true
|
2017-06-27 20:10:56 +03:00
|
|
|
when '--cache-dir'
|
|
|
|
options[:cache_dir] = ARGV[1]
|
|
|
|
ARGV.shift
|
2018-12-19 03:26:31 +03:00
|
|
|
when '--unicode-beta'
|
|
|
|
options[:unicode_beta] = ARGV[1]
|
|
|
|
ARGV.shift
|
2017-06-27 20:10:56 +03:00
|
|
|
when /\A--cache-dir=(.*)/m
|
|
|
|
options[:cache_dir] = $1
|
2014-09-28 15:27:22 +04:00
|
|
|
when /\A-/
|
|
|
|
abort "#{$0}: unknown option #{ARGV[0]}"
|
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
2014-09-29 01:47:59 +04:00
|
|
|
ARGV.shift
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
|
|
|
dl = Downloader.constants.find do |name|
|
|
|
|
ARGV[0].casecmp(name.to_s) == 0
|
2014-10-30 10:39:51 +03:00
|
|
|
end unless ARGV.empty?
|
2014-09-28 15:27:22 +04:00
|
|
|
$VERBOSE = true
|
|
|
|
if dl
|
|
|
|
dl = Downloader.const_get(dl)
|
|
|
|
ARGV.shift
|
|
|
|
ARGV.each do |name|
|
2017-03-23 18:49:10 +03:00
|
|
|
dir = destdir
|
2016-11-30 20:29:19 +03:00
|
|
|
if prefix
|
2017-03-23 18:49:10 +03:00
|
|
|
name = name.sub(/\A\.\//, '')
|
2018-07-16 13:20:24 +03:00
|
|
|
destdir2 = destdir.sub(/\A\.\//, '')
|
|
|
|
if name.start_with?(destdir2+"/")
|
|
|
|
name = name[(destdir2.size+1)..-1]
|
2017-03-23 18:49:10 +03:00
|
|
|
if (dir = File.dirname(name)) == '.'
|
|
|
|
dir = destdir
|
|
|
|
else
|
|
|
|
dir = File.join(destdir, dir)
|
|
|
|
end
|
2016-11-30 20:29:19 +03:00
|
|
|
else
|
2017-03-23 18:49:10 +03:00
|
|
|
name = File.basename(name)
|
2016-11-30 20:29:19 +03:00
|
|
|
end
|
2017-03-23 18:49:10 +03:00
|
|
|
name = "#{prefix}/#{name}"
|
2016-11-30 20:29:19 +03:00
|
|
|
end
|
2017-03-23 18:49:10 +03:00
|
|
|
dl.download(name, dir, since, options)
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
|
|
|
else
|
|
|
|
abort "usage: #{$0} url name" unless ARGV.size == 2
|
2016-05-18 10:04:55 +03:00
|
|
|
Downloader.download(ARGV[0], ARGV[1], destdir, since, options)
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
|
|
|
end
|