2019-05-28 08:20:42 +03:00
|
|
|
# Used by configure and make to download or update mirrored Ruby and GCC
|
2016-07-03 00:01:04 +03:00
|
|
|
# files. This will use HTTPS if possible, falling back to HTTP.
|
|
|
|
|
2017-05-21 19:45:35 +03:00
|
|
|
require 'fileutils'
|
2013-06-14 08:01:54 +04:00
|
|
|
require 'open-uri'
|
2017-05-21 19:45:35 +03:00
|
|
|
require 'pathname'
|
2014-12-25 19:25:57 +03:00
|
|
|
begin
|
|
|
|
require 'net/https'
|
2015-11-26 12:09:49 +03:00
|
|
|
rescue LoadError
|
|
|
|
https = 'http'
|
|
|
|
else
|
|
|
|
https = 'https'
|
2014-12-25 19:25:57 +03:00
|
|
|
|
2018-07-16 13:20:24 +03:00
|
|
|
# open-uri of ruby 2.2.0 accepts an array of PEMs as ssl_ca_cert, but old
|
|
|
|
# versions do not. so, patching OpenSSL::X509::Store#add_file instead.
|
2014-12-25 19:25:57 +03:00
|
|
|
class OpenSSL::X509::Store
|
|
|
|
alias orig_add_file add_file
|
|
|
|
def add_file(pems)
|
|
|
|
Array(pems).each do |pem|
|
|
|
|
if File.directory?(pem)
|
|
|
|
add_path pem
|
|
|
|
else
|
|
|
|
orig_add_file pem
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-01-23 10:30:32 +03:00
|
|
|
# since open-uri internally checks ssl_ca_cert using File.directory?,
|
|
|
|
# allow to accept an array.
|
2014-12-25 19:25:57 +03:00
|
|
|
class <<File
|
|
|
|
alias orig_directory? directory?
|
|
|
|
def File.directory? files
|
|
|
|
files.is_a?(Array) ? false : orig_directory?(files)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-06-14 08:01:54 +04:00
|
|
|
|
2014-06-24 05:26:21 +04:00
|
|
|
class Downloader
|
2022-09-17 16:48:40 +03:00
|
|
|
def self.find(dlname)
|
|
|
|
constants.find do |name|
|
|
|
|
return const_get(name) if dlname.casecmp(name.to_s) == 0
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-02-02 10:08:13 +03:00
|
|
|
def self.https=(https)
|
|
|
|
@@https = https
|
|
|
|
end
|
|
|
|
|
2016-02-18 10:53:13 +03:00
|
|
|
def self.https?
|
|
|
|
@@https == 'https'
|
|
|
|
end
|
|
|
|
|
2015-11-26 12:09:49 +03:00
|
|
|
def self.https
|
|
|
|
@@https
|
|
|
|
end
|
|
|
|
|
2022-09-17 16:48:40 +03:00
|
|
|
def self.get_option(argv, options)
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
class GNU < self
|
|
|
|
def self.download(name, *rest)
|
2016-02-18 10:53:13 +03:00
|
|
|
if https?
|
2020-11-20 07:11:05 +03:00
|
|
|
begin
|
|
|
|
super("https://cdn.jsdelivr.net/gh/gcc-mirror/gcc@master/#{name}", name, *rest)
|
|
|
|
rescue => e
|
2022-12-02 19:28:34 +03:00
|
|
|
m1, m2 = e.message.split("\n", 2)
|
|
|
|
STDERR.puts "Download failed (#{m1}), try another URL\n#{m2}"
|
2020-11-20 07:11:05 +03:00
|
|
|
super("https://raw.githubusercontent.com/gcc-mirror/gcc/master/#{name}", name, *rest)
|
|
|
|
end
|
2015-11-26 18:40:58 +03:00
|
|
|
else
|
2016-02-18 10:53:13 +03:00
|
|
|
super("https://repo.or.cz/official-gcc.git/blob_plain/HEAD:/#{name}", name, *rest)
|
2015-11-26 18:40:58 +03:00
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
end
|
2014-09-23 15:56:30 +04:00
|
|
|
end
|
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
class RubyGems < self
|
2016-01-23 10:30:32 +03:00
|
|
|
def self.download(name, dir = nil, since = true, options = {})
|
2015-01-14 10:45:28 +03:00
|
|
|
require 'rubygems'
|
2017-03-21 10:19:23 +03:00
|
|
|
options = options.dup
|
2016-04-06 11:40:02 +03:00
|
|
|
options[:ssl_ca_cert] = Dir.glob(File.expand_path("../lib/rubygems/ssl_certs/**/*.pem", File.dirname(__FILE__)))
|
2024-03-27 09:48:41 +03:00
|
|
|
if Gem::Version.new(name[/-\K[^-]*(?=\.gem\z)/]).prerelease?
|
|
|
|
options[:ignore_http_client_errors] = true
|
|
|
|
end
|
2017-05-21 17:29:00 +03:00
|
|
|
super("https://rubygems.org/downloads/#{name}", name, dir, since, options)
|
2015-01-14 10:45:28 +03:00
|
|
|
end
|
2014-09-23 15:56:30 +04:00
|
|
|
end
|
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
Gems = RubyGems
|
2014-09-24 12:55:09 +04:00
|
|
|
|
2014-09-28 06:54:59 +04:00
|
|
|
class Unicode < self
|
2019-01-06 10:50:18 +03:00
|
|
|
INDEX = {} # cache index file information across files in the same directory
|
2020-03-09 07:38:05 +03:00
|
|
|
UNICODE_PUBLIC = "https://www.unicode.org/Public/"
|
2019-01-06 10:50:18 +03:00
|
|
|
|
2022-09-17 16:48:40 +03:00
|
|
|
def self.get_option(argv, options)
|
|
|
|
case argv[0]
|
|
|
|
when '--unicode-beta'
|
|
|
|
options[:unicode_beta] = argv[1]
|
|
|
|
argv.shift(2)
|
|
|
|
true
|
|
|
|
when /\A--unicode-beta=(.*)/m
|
|
|
|
options[:unicode_beta] = $1
|
|
|
|
argv.shift
|
|
|
|
true
|
|
|
|
else
|
|
|
|
super
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-06 10:50:18 +03:00
|
|
|
def self.download(name, dir = nil, since = true, options = {})
|
|
|
|
options = options.dup
|
|
|
|
unicode_beta = options.delete(:unicode_beta)
|
|
|
|
name_dir_part = name.sub(/[^\/]+$/, '')
|
|
|
|
if unicode_beta == 'YES'
|
|
|
|
if INDEX.size == 0
|
|
|
|
index_options = options.dup
|
|
|
|
index_options[:cache_save] = false # TODO: make sure caching really doesn't work for index file
|
2019-04-17 10:35:29 +03:00
|
|
|
index_data = File.read(under(dir, "index.html")) rescue nil
|
2019-01-06 10:50:18 +03:00
|
|
|
index_file = super(UNICODE_PUBLIC+name_dir_part, "#{name_dir_part}index.html", dir, true, index_options)
|
2019-04-17 10:09:28 +03:00
|
|
|
INDEX[:index] = File.read(index_file)
|
2019-04-17 10:35:29 +03:00
|
|
|
since = true unless INDEX[:index] == index_data
|
2019-01-06 10:50:18 +03:00
|
|
|
end
|
2019-01-06 14:41:12 +03:00
|
|
|
file_base = File.basename(name, '.txt')
|
2019-04-06 03:08:04 +03:00
|
|
|
return if file_base == '.' # Use pre-generated headers and tables
|
2019-04-05 17:44:15 +03:00
|
|
|
beta_name = INDEX[:index][/#{Regexp.quote(file_base)}(-[0-9.]+d\d+)?\.txt/]
|
2019-01-06 10:50:18 +03:00
|
|
|
# make sure we always check for new versions of files,
|
|
|
|
# because they can easily change in the beta period
|
2019-04-17 10:35:29 +03:00
|
|
|
super(UNICODE_PUBLIC+name_dir_part+beta_name, name, dir, since, options)
|
2019-01-06 10:50:18 +03:00
|
|
|
else
|
|
|
|
index_file = Pathname.new(under(dir, name_dir_part+'index.html'))
|
2019-05-24 13:01:39 +03:00
|
|
|
if index_file.exist? and name_dir_part !~ /^(12\.1\.0|emoji\/12\.0)/
|
2019-01-06 10:50:18 +03:00
|
|
|
raise "Although Unicode is not in beta, file #{index_file} exists. " +
|
|
|
|
"Remove all files in this directory and in .downloaded-cache/ " +
|
|
|
|
"because they may be leftovers from the beta period."
|
|
|
|
end
|
|
|
|
super(UNICODE_PUBLIC+name, name, dir, since, options)
|
|
|
|
end
|
2014-09-23 15:56:30 +04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-09-23 15:52:23 +04:00
|
|
|
def self.mode_for(data)
|
2014-09-29 01:47:59 +04:00
|
|
|
/\A#!/ =~ data ? 0755 : 0644
|
2014-09-23 15:52:23 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.http_options(file, since)
|
|
|
|
options = {}
|
|
|
|
if since
|
|
|
|
case since
|
|
|
|
when true
|
|
|
|
since = (File.mtime(file).httpdate rescue nil)
|
|
|
|
when Time
|
|
|
|
since = since.httpdate
|
|
|
|
end
|
|
|
|
if since
|
|
|
|
options['If-Modified-Since'] = since
|
|
|
|
end
|
|
|
|
end
|
2019-04-30 16:35:38 +03:00
|
|
|
options['Accept-Encoding'] = 'identity' # to disable Net::HTTP::GenericRequest#decode_content
|
2014-09-23 15:52:23 +04:00
|
|
|
options
|
2014-06-24 05:26:21 +04:00
|
|
|
end
|
2014-09-23 11:08:16 +04:00
|
|
|
|
2021-09-12 19:08:56 +03:00
|
|
|
def self.httpdate(date)
|
|
|
|
Time.httpdate(date)
|
|
|
|
rescue ArgumentError => e
|
|
|
|
# Some hosts (e.g., zlib.net) return similar to RFC 850 but 4
|
|
|
|
# digit year, sometimes.
|
|
|
|
/\A\s*
|
|
|
|
(?:Mon|Tues|Wednes|Thurs|Fri|Satur|Sun)day,\x20
|
|
|
|
(\d\d)-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{4})\x20
|
|
|
|
(\d\d):(\d\d):(\d\d)\x20
|
|
|
|
GMT
|
|
|
|
\s*\z/ix =~ date or raise
|
|
|
|
warn e.message
|
|
|
|
Time.utc($3, $2, $1, $4, $5, $6)
|
|
|
|
end
|
|
|
|
|
2016-01-23 10:30:32 +03:00
|
|
|
# Downloader.download(url, name, [dir, [since]])
|
2014-09-23 15:52:23 +04:00
|
|
|
#
|
2014-09-23 11:08:03 +04:00
|
|
|
# Update a file from url if newer version is available.
|
|
|
|
# Creates the file if the file doesn't yet exist; however, the
|
|
|
|
# directory where the file is being created has to exist already.
|
2016-01-23 10:30:32 +03:00
|
|
|
# The +since+ parameter can take the following values, with associated meanings:
|
|
|
|
# true ::
|
|
|
|
# Take the last-modified time of the current file on disk, and only download
|
|
|
|
# if the server has a file that was modified later. Download unconditionally
|
|
|
|
# if we don't have the file yet. Default.
|
|
|
|
# +some time value+ ::
|
|
|
|
# Use this time value instead of the time of modification of the file on disk.
|
|
|
|
# nil ::
|
|
|
|
# Only download the file if it doesn't exist yet.
|
|
|
|
# false ::
|
|
|
|
# always download url regardless of whether we already have a file,
|
|
|
|
# and regardless of modification times. (This is essentially just a waste of
|
|
|
|
# network resources, except in the case that the file we have is somehow damaged.
|
|
|
|
# Please note that using this recurringly might create or be seen as a
|
|
|
|
# denial of service attack.)
|
2014-09-23 15:52:23 +04:00
|
|
|
#
|
2014-09-23 11:08:03 +04:00
|
|
|
# Example usage:
|
2014-10-06 05:15:23 +04:00
|
|
|
# download 'http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt',
|
|
|
|
# 'UnicodeData.txt', 'enc/unicode/data'
|
2016-01-23 10:30:32 +03:00
|
|
|
def self.download(url, name, dir = nil, since = true, options = {})
|
2017-03-21 10:19:23 +03:00
|
|
|
options = options.dup
|
2017-05-21 19:45:35 +03:00
|
|
|
url = URI(url)
|
2017-03-21 10:19:23 +03:00
|
|
|
dryrun = options.delete(:dryrun)
|
2018-12-19 07:08:46 +03:00
|
|
|
|
2017-05-21 19:45:35 +03:00
|
|
|
if name
|
|
|
|
file = Pathname.new(under(dir, name))
|
|
|
|
else
|
|
|
|
name = File.basename(url.path)
|
|
|
|
end
|
2017-06-27 20:10:56 +03:00
|
|
|
cache_save = options.delete(:cache_save) {
|
|
|
|
ENV["CACHE_SAVE"] != "no"
|
|
|
|
}
|
2017-05-21 19:45:35 +03:00
|
|
|
cache = cache_file(url, name, options.delete(:cache_dir))
|
|
|
|
file ||= cache
|
|
|
|
if since.nil? and file.exist?
|
2014-11-19 18:06:04 +03:00
|
|
|
if $VERBOSE
|
2017-03-23 18:49:10 +03:00
|
|
|
$stdout.puts "#{file} already exists"
|
2014-11-19 18:06:04 +03:00
|
|
|
$stdout.flush
|
|
|
|
end
|
2017-06-27 20:10:56 +03:00
|
|
|
if cache_save
|
|
|
|
save_cache(cache, file, name)
|
|
|
|
end
|
2019-05-30 03:58:18 +03:00
|
|
|
return file.to_path
|
2014-11-19 18:06:04 +03:00
|
|
|
end
|
2017-03-23 18:49:10 +03:00
|
|
|
if dryrun
|
|
|
|
puts "Download #{url} into #{file}"
|
2017-05-21 19:45:35 +03:00
|
|
|
return
|
|
|
|
end
|
|
|
|
if link_cache(cache, file, name, $VERBOSE)
|
|
|
|
return file.to_path
|
2017-03-23 18:49:10 +03:00
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
if !https? and URI::HTTPS === url
|
2016-02-18 10:53:13 +03:00
|
|
|
warn "*** using http instead of https ***"
|
2017-05-21 19:45:35 +03:00
|
|
|
url.scheme = 'http'
|
|
|
|
url = URI(url.to_s)
|
2016-02-18 10:53:13 +03:00
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
if $VERBOSE
|
|
|
|
$stdout.print "downloading #{name} ... "
|
|
|
|
$stdout.flush
|
|
|
|
end
|
2021-10-28 11:42:36 +03:00
|
|
|
mtime = nil
|
2024-03-27 09:48:41 +03:00
|
|
|
ignore_http_client_errors = options.delete(:ignore_http_client_errors)
|
2021-10-28 11:42:36 +03:00
|
|
|
options = options.merge(http_options(file, since.nil? ? true : since))
|
2014-09-23 15:52:23 +04:00
|
|
|
begin
|
2020-11-23 11:06:37 +03:00
|
|
|
data = with_retry(10) do
|
2021-10-28 11:42:36 +03:00
|
|
|
data = url.read(options)
|
|
|
|
if mtime = data.meta["last-modified"]
|
|
|
|
mtime = Time.httpdate(mtime)
|
|
|
|
end
|
|
|
|
data
|
2017-12-27 16:32:59 +03:00
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
rescue OpenURI::HTTPError => http_error
|
2024-03-27 09:48:41 +03:00
|
|
|
case http_error.message
|
|
|
|
when /^304 / # 304 Not Modified
|
2014-09-28 06:54:59 +04:00
|
|
|
if $VERBOSE
|
2016-01-23 10:30:32 +03:00
|
|
|
$stdout.puts "#{name} not modified"
|
2014-09-28 06:54:59 +04:00
|
|
|
$stdout.flush
|
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2024-03-27 09:48:41 +03:00
|
|
|
when /^40/ # Net::HTTPClientError: 403 Forbidden, 404 Not Found
|
|
|
|
if ignore_http_client_errors
|
|
|
|
puts "Ignore #{url}: #{http_error.message}"
|
|
|
|
return file.to_path
|
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
raise
|
2014-10-20 14:48:52 +04:00
|
|
|
rescue Timeout::Error
|
2017-05-21 19:45:35 +03:00
|
|
|
if since.nil? and file.exist?
|
2014-10-20 14:48:52 +04:00
|
|
|
puts "Request for #{url} timed out, using old version."
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-10-20 14:48:52 +04:00
|
|
|
end
|
|
|
|
raise
|
|
|
|
rescue SocketError
|
2017-05-21 19:45:35 +03:00
|
|
|
if since.nil? and file.exist?
|
2014-10-20 14:48:52 +04:00
|
|
|
puts "No network connection, unable to download #{url}, using old version."
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-10-20 14:48:52 +04:00
|
|
|
end
|
|
|
|
raise
|
2014-09-23 11:08:03 +04:00
|
|
|
end
|
2017-07-09 07:49:15 +03:00
|
|
|
dest = (cache_save && cache && !cache.exist? ? cache : file)
|
2017-05-21 19:45:35 +03:00
|
|
|
dest.parent.mkpath
|
2023-07-28 00:09:08 +03:00
|
|
|
dest.unlink if dest.symlink? && !dest.exist?
|
2017-05-21 19:45:35 +03:00
|
|
|
dest.open("wb", 0600) do |f|
|
2014-09-23 15:52:23 +04:00
|
|
|
f.write(data)
|
|
|
|
f.chmod(mode_for(data))
|
2014-09-23 11:08:03 +04:00
|
|
|
end
|
2014-09-23 15:52:23 +04:00
|
|
|
if mtime
|
2017-05-21 19:45:35 +03:00
|
|
|
dest.utime(mtime, mtime)
|
2014-09-23 15:52:23 +04:00
|
|
|
end
|
2014-09-28 06:54:59 +04:00
|
|
|
if $VERBOSE
|
|
|
|
$stdout.puts "done"
|
|
|
|
$stdout.flush
|
|
|
|
end
|
2017-05-22 06:18:17 +03:00
|
|
|
if dest.eql?(cache)
|
|
|
|
link_cache(cache, file, name)
|
2017-06-27 20:10:56 +03:00
|
|
|
elsif cache_save
|
2017-05-22 06:18:17 +03:00
|
|
|
save_cache(cache, file, name)
|
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
return file.to_path
|
2014-09-23 15:52:23 +04:00
|
|
|
rescue => e
|
2017-12-27 16:32:59 +03:00
|
|
|
raise "failed to download #{name}\n#{e.class}: #{e.message}: #{url}"
|
2014-09-23 15:52:23 +04:00
|
|
|
end
|
2015-01-14 10:45:28 +03:00
|
|
|
|
|
|
|
def self.under(dir, name)
|
|
|
|
dir ? File.join(dir, File.basename(name)) : name
|
|
|
|
end
|
2017-05-21 19:45:35 +03:00
|
|
|
|
|
|
|
def self.cache_file(url, name, cache_dir = nil)
|
|
|
|
case cache_dir
|
|
|
|
when false
|
|
|
|
return nil
|
|
|
|
when nil
|
|
|
|
cache_dir = ENV['CACHE_DIR']
|
|
|
|
if !cache_dir or cache_dir.empty?
|
|
|
|
cache_dir = ".downloaded-cache"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
Pathname.new(cache_dir) + (name || File.basename(URI(url).path))
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.link_cache(cache, file, name, verbose = false)
|
|
|
|
return false unless cache and cache.exist?
|
|
|
|
return true if cache.eql?(file)
|
2017-05-22 07:10:51 +03:00
|
|
|
if /cygwin/ !~ RUBY_PLATFORM or /winsymlink:nativestrict/ =~ ENV['CYGWIN']
|
|
|
|
begin
|
2022-12-02 19:28:34 +03:00
|
|
|
link = cache.relative_path_from(file.parent)
|
|
|
|
rescue ArgumentError
|
|
|
|
abs = cache.expand_path
|
|
|
|
link = abs.relative_path_from(file.parent.expand_path)
|
2022-12-02 19:37:46 +03:00
|
|
|
if link.to_s.count("/") > abs.to_s.count("/")
|
2022-12-02 19:28:34 +03:00
|
|
|
link = abs
|
|
|
|
end
|
|
|
|
end
|
|
|
|
begin
|
|
|
|
file.make_symlink(link)
|
2017-05-22 07:10:51 +03:00
|
|
|
rescue SystemCallError
|
|
|
|
else
|
|
|
|
if verbose
|
|
|
|
$stdout.puts "made symlink #{name} to #{cache}"
|
|
|
|
$stdout.flush
|
|
|
|
end
|
|
|
|
return true
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
begin
|
|
|
|
file.make_link(cache)
|
|
|
|
rescue SystemCallError
|
|
|
|
else
|
|
|
|
if verbose
|
|
|
|
$stdout.puts "made link #{name} to #{cache}"
|
|
|
|
$stdout.flush
|
|
|
|
end
|
|
|
|
return true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.save_cache(cache, file, name)
|
2019-04-02 11:08:29 +03:00
|
|
|
return unless cache or cache.eql?(file)
|
|
|
|
begin
|
|
|
|
st = cache.stat
|
|
|
|
rescue
|
2017-05-21 19:45:35 +03:00
|
|
|
begin
|
|
|
|
file.rename(cache)
|
|
|
|
rescue
|
2019-04-02 11:08:29 +03:00
|
|
|
return
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
2019-04-02 11:08:29 +03:00
|
|
|
else
|
|
|
|
return unless st.mtime > file.lstat.mtime
|
|
|
|
file.unlink
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
2019-04-02 11:08:29 +03:00
|
|
|
link_cache(cache, file, name)
|
2017-05-21 19:45:35 +03:00
|
|
|
end
|
2017-12-27 16:32:59 +03:00
|
|
|
|
2018-08-11 07:22:14 +03:00
|
|
|
def self.with_retry(max_times, &block)
|
2017-12-27 16:32:59 +03:00
|
|
|
times = 0
|
|
|
|
begin
|
|
|
|
block.call
|
2021-10-28 11:42:36 +03:00
|
|
|
rescue Errno::ETIMEDOUT, SocketError, OpenURI::HTTPError, Net::ReadTimeout, Net::OpenTimeout, ArgumentError => e
|
2018-08-18 05:04:44 +03:00
|
|
|
raise if e.is_a?(OpenURI::HTTPError) && e.message !~ /^50[023] / # retry only 500, 502, 503 for http error
|
2017-12-27 16:32:59 +03:00
|
|
|
times += 1
|
|
|
|
if times <= max_times
|
|
|
|
$stderr.puts "retrying #{e.class} (#{e.message}) after #{times ** 2} seconds..."
|
|
|
|
sleep(times ** 2)
|
|
|
|
retry
|
2017-12-27 16:35:25 +03:00
|
|
|
else
|
|
|
|
raise
|
2017-12-27 16:32:59 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
private_class_method :with_retry
|
2013-06-14 08:01:54 +04:00
|
|
|
end
|
2014-09-28 15:27:22 +04:00
|
|
|
|
2016-02-02 10:08:13 +03:00
|
|
|
Downloader.https = https.freeze
|
2015-11-26 12:09:49 +03:00
|
|
|
|
2014-09-28 15:27:22 +04:00
|
|
|
if $0 == __FILE__
|
2016-01-23 10:30:32 +03:00
|
|
|
since = true
|
2016-05-18 10:04:55 +03:00
|
|
|
options = {}
|
2022-09-17 16:48:40 +03:00
|
|
|
dl = nil
|
|
|
|
(args = []).singleton_class.__send__(:define_method, :downloader?) do |arg|
|
|
|
|
!dl and args.empty? and (dl = Downloader.find(arg))
|
|
|
|
end
|
2014-09-28 15:27:22 +04:00
|
|
|
until ARGV.empty?
|
2022-09-17 16:48:40 +03:00
|
|
|
if ARGV[0] == '--'
|
|
|
|
ARGV.shift
|
|
|
|
break if ARGV.empty?
|
|
|
|
ARGV.shift if args.downloader? ARGV[0]
|
|
|
|
args.concat(ARGV)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
|
|
|
|
if dl and dl.get_option(ARGV, options)
|
|
|
|
# the downloader dealt with the arguments, and should be removed
|
|
|
|
# from ARGV.
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2014-09-28 15:27:22 +04:00
|
|
|
case ARGV[0]
|
2022-09-17 16:51:30 +03:00
|
|
|
when '-d', '--destdir'
|
2024-03-15 08:11:16 +03:00
|
|
|
## -d, --destdir DIRECTORY Download into the directory
|
2014-09-28 15:27:22 +04:00
|
|
|
destdir = ARGV[1]
|
2014-09-29 01:47:59 +04:00
|
|
|
ARGV.shift
|
2022-09-17 16:51:30 +03:00
|
|
|
when '-p', '--prefix'
|
2024-03-15 08:11:16 +03:00
|
|
|
## -p, --prefix Strip directory names from the name to download,
|
|
|
|
## and add the prefix instead.
|
2016-01-18 13:21:03 +03:00
|
|
|
prefix = ARGV[1]
|
|
|
|
ARGV.shift
|
2022-09-17 16:51:30 +03:00
|
|
|
when '-e', '--exist', '--non-existent-only'
|
2024-03-15 08:11:16 +03:00
|
|
|
## -e, --exist, --non-existent-only Skip already existent files.
|
2016-01-23 10:30:32 +03:00
|
|
|
since = nil
|
2022-09-17 16:51:30 +03:00
|
|
|
when '-a', '--always'
|
2024-03-15 08:11:16 +03:00
|
|
|
## -a, --always Download all files.
|
2016-01-23 10:30:32 +03:00
|
|
|
since = false
|
2022-09-17 16:51:30 +03:00
|
|
|
when '-u', '--update', '--if-modified'
|
2024-03-15 08:11:16 +03:00
|
|
|
## -u, --update, --if-modified Download newer files only.
|
2022-09-17 16:51:30 +03:00
|
|
|
since = true
|
2024-03-15 08:12:08 +03:00
|
|
|
when '-n', '--dry-run', '--dryrun'
|
|
|
|
## -n, --dry-run Do not download actually.
|
2017-03-21 10:19:23 +03:00
|
|
|
options[:dryrun] = true
|
2017-06-27 20:10:56 +03:00
|
|
|
when '--cache-dir'
|
2024-03-15 08:11:16 +03:00
|
|
|
## --cache-dir DIRECTORY Cache downloaded files in the directory.
|
2017-06-27 20:10:56 +03:00
|
|
|
options[:cache_dir] = ARGV[1]
|
|
|
|
ARGV.shift
|
|
|
|
when /\A--cache-dir=(.*)/m
|
|
|
|
options[:cache_dir] = $1
|
2024-03-15 08:11:16 +03:00
|
|
|
when /\A--help\z/
|
|
|
|
## --help Print this message
|
|
|
|
puts "Usage: #$0 [options] relative-url..."
|
|
|
|
File.foreach(__FILE__) do |line|
|
|
|
|
line.sub!(/^ *## /, "") or next
|
|
|
|
break if line.chomp!.empty?
|
|
|
|
opt, desc = line.split(/ {2,}/, 2)
|
|
|
|
printf " %-28s %s\n", opt, desc
|
|
|
|
end
|
|
|
|
exit
|
2014-09-28 15:27:22 +04:00
|
|
|
when /\A-/
|
|
|
|
abort "#{$0}: unknown option #{ARGV[0]}"
|
|
|
|
else
|
2022-09-17 16:48:40 +03:00
|
|
|
args << ARGV[0] unless args.downloader? ARGV[0]
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
2014-09-29 01:47:59 +04:00
|
|
|
ARGV.shift
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
|
|
|
$VERBOSE = true
|
|
|
|
if dl
|
2022-09-17 16:48:40 +03:00
|
|
|
args.each do |name|
|
2017-03-23 18:49:10 +03:00
|
|
|
dir = destdir
|
2016-11-30 20:29:19 +03:00
|
|
|
if prefix
|
2017-03-23 18:49:10 +03:00
|
|
|
name = name.sub(/\A\.\//, '')
|
2018-07-16 13:20:24 +03:00
|
|
|
destdir2 = destdir.sub(/\A\.\//, '')
|
|
|
|
if name.start_with?(destdir2+"/")
|
|
|
|
name = name[(destdir2.size+1)..-1]
|
2017-03-23 18:49:10 +03:00
|
|
|
if (dir = File.dirname(name)) == '.'
|
|
|
|
dir = destdir
|
|
|
|
else
|
|
|
|
dir = File.join(destdir, dir)
|
|
|
|
end
|
2016-11-30 20:29:19 +03:00
|
|
|
else
|
2017-03-23 18:49:10 +03:00
|
|
|
name = File.basename(name)
|
2016-11-30 20:29:19 +03:00
|
|
|
end
|
2017-03-23 18:49:10 +03:00
|
|
|
name = "#{prefix}/#{name}"
|
2016-11-30 20:29:19 +03:00
|
|
|
end
|
2017-03-23 18:49:10 +03:00
|
|
|
dl.download(name, dir, since, options)
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
|
|
|
else
|
2022-09-17 16:48:40 +03:00
|
|
|
abort "usage: #{$0} url name" unless args.size == 2
|
|
|
|
Downloader.download(args[0], args[1], destdir, since, options)
|
2014-09-28 15:27:22 +04:00
|
|
|
end
|
|
|
|
end
|