2015-11-05 16:44:41 -06:00
|
|
|
# encoding: UTF-8
|
|
|
|
|
|
2016-09-17 14:06:43 -05:00
|
|
|
require 'net/http'
|
2015-08-08 16:15:14 -05:00
|
|
|
require 'open-uri'
|
|
|
|
|
require 'fileutils'
|
2016-06-28 23:27:36 -07:00
|
|
|
require 'cgi'
|
2016-08-03 14:23:35 -05:00
|
|
|
require 'json'
|
2015-08-19 12:02:08 -05:00
|
|
|
require_relative 'wayback_machine_downloader/tidy_bytes'
|
2015-11-19 15:28:02 -06:00
|
|
|
require_relative 'wayback_machine_downloader/to_regex'
|
2016-09-17 13:37:13 -05:00
|
|
|
require_relative 'wayback_machine_downloader/archive_api'
|
2015-08-08 16:15:14 -05:00
|
|
|
|
2015-07-25 18:44:37 -05:00
|
|
|
class WaybackMachineDownloader
|
2015-08-08 16:15:14 -05:00
|
|
|
|
2016-09-17 13:37:13 -05:00
|
|
|
include ArchiveAPI
|
|
|
|
|
|
2016-09-18 12:23:40 -05:00
|
|
|
VERSION = "1.0.0"
|
2015-08-10 01:13:59 -05:00
|
|
|
|
2016-09-17 12:49:48 -05:00
|
|
|
attr_accessor :base_url, :directory, :from_timestamp, :to_timestamp, :only_filter, :exclude_filter, :all, :list, :threads_count
|
2015-08-08 16:15:14 -05:00
|
|
|
|
|
|
|
|
def initialize params
|
|
|
|
|
@base_url = params[:base_url]
|
2016-09-17 12:49:48 -05:00
|
|
|
@directory = params[:directory]
|
2016-07-30 14:08:01 -05:00
|
|
|
@from_timestamp = params[:from_timestamp].to_i
|
|
|
|
|
@to_timestamp = params[:to_timestamp].to_i
|
2015-11-19 15:28:02 -06:00
|
|
|
@only_filter = params[:only_filter]
|
2016-06-28 23:27:36 -07:00
|
|
|
@exclude_filter = params[:exclude_filter]
|
2016-07-31 09:51:27 -05:00
|
|
|
@all = params[:all]
|
2016-08-03 14:23:35 -05:00
|
|
|
@list = params[:list]
|
2016-09-04 23:38:38 +03:00
|
|
|
@threads_count = params[:threads_count].to_i
|
2015-07-25 18:44:37 -05:00
|
|
|
end
|
2015-08-08 16:15:14 -05:00
|
|
|
|
|
|
|
|
def backup_name
|
2016-08-16 11:47:47 -05:00
|
|
|
if @base_url.include? '//'
|
|
|
|
|
@base_url.split('/')[2]
|
|
|
|
|
else
|
|
|
|
|
@base_url
|
|
|
|
|
end
|
2015-08-08 16:15:14 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def backup_path
|
2016-09-17 12:49:48 -05:00
|
|
|
if @directory
|
|
|
|
|
if @directory[-1] == '/'
|
|
|
|
|
@directory
|
|
|
|
|
else
|
|
|
|
|
@directory + '/'
|
|
|
|
|
end
|
|
|
|
|
else
|
|
|
|
|
'websites/' + backup_name + '/'
|
|
|
|
|
end
|
2015-08-08 16:15:14 -05:00
|
|
|
end
|
|
|
|
|
|
2015-11-19 15:28:02 -06:00
|
|
|
def match_only_filter file_url
|
|
|
|
|
if @only_filter
|
|
|
|
|
only_filter_regex = @only_filter.to_regex
|
|
|
|
|
if only_filter_regex
|
|
|
|
|
only_filter_regex =~ file_url
|
|
|
|
|
else
|
|
|
|
|
file_url.downcase.include? @only_filter.downcase
|
|
|
|
|
end
|
|
|
|
|
else
|
|
|
|
|
true
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2016-06-28 23:27:36 -07:00
|
|
|
def match_exclude_filter file_url
|
|
|
|
|
if @exclude_filter
|
|
|
|
|
exclude_filter_regex = @exclude_filter.to_regex
|
|
|
|
|
if exclude_filter_regex
|
|
|
|
|
exclude_filter_regex =~ file_url
|
|
|
|
|
else
|
|
|
|
|
file_url.downcase.include? @exclude_filter.downcase
|
|
|
|
|
end
|
|
|
|
|
else
|
2016-07-28 17:58:54 -05:00
|
|
|
false
|
2016-06-28 23:27:36 -07:00
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2015-08-09 21:26:43 -05:00
|
|
|
def get_file_list_curated
|
2016-09-17 13:37:13 -05:00
|
|
|
index_file_list_raw = get_raw_list_from_api(@base_url)
|
|
|
|
|
all_file_list_raw = get_raw_list_from_api(@base_url + '/*')
|
2015-08-08 16:15:14 -05:00
|
|
|
file_list_curated = Hash.new
|
2015-08-15 15:37:37 -05:00
|
|
|
[index_file_list_raw, all_file_list_raw].each do |file|
|
|
|
|
|
file.each_line do |line|
|
2016-09-16 19:16:47 -05:00
|
|
|
next unless line.include?('/')
|
2016-09-16 12:32:31 -05:00
|
|
|
file_timestamp = line[0..13].to_i
|
|
|
|
|
file_url = line[15..-2]
|
2016-09-16 19:16:47 -05:00
|
|
|
file_id = file_url.split('/')[3..-1].join('/')
|
|
|
|
|
file_id = CGI::unescape file_id
|
2016-09-16 12:32:31 -05:00
|
|
|
file_id = file_id.tidy_bytes unless file_id == ""
|
2015-08-17 19:42:37 -05:00
|
|
|
if file_id.nil?
|
|
|
|
|
puts "Malformed file url, ignoring: #{file_url}"
|
2016-07-30 14:08:26 -05:00
|
|
|
else
|
2016-06-28 23:27:36 -07:00
|
|
|
if match_exclude_filter(file_url)
|
|
|
|
|
puts "File url matches exclude filter, ignoring: #{file_url}"
|
|
|
|
|
elsif not match_only_filter(file_url)
|
2016-07-28 17:59:59 -05:00
|
|
|
puts "File url doesn't match only filter, ignoring: #{file_url}"
|
2015-11-19 15:28:02 -06:00
|
|
|
elsif file_list_curated[file_id]
|
2015-08-15 15:37:37 -05:00
|
|
|
unless file_list_curated[file_id][:timestamp] > file_timestamp
|
|
|
|
|
file_list_curated[file_id] = {file_url: file_url, timestamp: file_timestamp}
|
|
|
|
|
end
|
|
|
|
|
else
|
2015-08-09 22:33:32 -05:00
|
|
|
file_list_curated[file_id] = {file_url: file_url, timestamp: file_timestamp}
|
|
|
|
|
end
|
2015-08-08 16:15:14 -05:00
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
2015-11-19 15:28:02 -06:00
|
|
|
file_list_curated
|
2015-08-08 16:15:14 -05:00
|
|
|
end
|
|
|
|
|
|
2015-11-05 16:19:03 -06:00
|
|
|
def get_file_list_by_timestamp
|
2015-08-15 15:37:37 -05:00
|
|
|
file_list_curated = get_file_list_curated
|
|
|
|
|
file_list_curated = file_list_curated.sort_by { |k,v| v[:timestamp] }.reverse
|
|
|
|
|
file_list_curated.map do |file_remote_info|
|
|
|
|
|
file_remote_info[1][:file_id] = file_remote_info[0]
|
|
|
|
|
file_remote_info[1]
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2016-08-03 14:23:35 -05:00
|
|
|
def list_files
|
|
|
|
|
puts "["
|
|
|
|
|
get_file_list_by_timestamp.each do |file|
|
|
|
|
|
puts file.to_json + ","
|
|
|
|
|
end
|
|
|
|
|
puts "]"
|
|
|
|
|
end
|
|
|
|
|
|
2015-08-08 16:15:14 -05:00
|
|
|
def download_files
|
2016-09-04 23:38:38 +03:00
|
|
|
start_time = Time.now
|
2015-10-20 13:00:52 -05:00
|
|
|
puts "Downloading #{@base_url} to #{backup_path} from Wayback Machine..."
|
2015-08-09 21:26:43 -05:00
|
|
|
puts
|
2016-09-04 23:38:38 +03:00
|
|
|
|
2015-11-06 13:11:26 -05:00
|
|
|
if file_list_by_timestamp.count == 0
|
2016-07-28 17:59:59 -05:00
|
|
|
puts "No files to download."
|
2016-08-01 17:01:39 -05:00
|
|
|
puts "Possible reasons:"
|
2016-07-28 17:59:59 -05:00
|
|
|
puts "\t* Site is not in Wayback Machine Archive."
|
2016-09-04 23:38:38 +03:00
|
|
|
puts "\t* From timestamp too much in the future." if @from_timestamp and @from_timestamp != 0
|
|
|
|
|
puts "\t* To timestamp too much in the past." if @to_timestamp and @to_timestamp != 0
|
2016-07-28 17:59:59 -05:00
|
|
|
puts "\t* Only filter too restrictive (#{only_filter.to_s})" if @only_filter
|
|
|
|
|
puts "\t* Exclude filter too wide (#{exclude_filter.to_s})" if @exclude_filter
|
2015-11-06 13:11:26 -05:00
|
|
|
return
|
|
|
|
|
end
|
2016-09-04 23:38:38 +03:00
|
|
|
|
|
|
|
|
threads = []
|
2016-09-15 19:59:42 -05:00
|
|
|
@processed_file_count = 0
|
|
|
|
|
@threads_count = 1 unless @threads_count != 0
|
|
|
|
|
@threads_count.times do
|
2016-09-04 23:38:38 +03:00
|
|
|
threads << Thread.new do
|
|
|
|
|
until file_queue.empty?
|
|
|
|
|
file_remote_info = file_queue.pop(true) rescue nil
|
|
|
|
|
download_file(file_remote_info) if file_remote_info
|
2015-08-08 16:15:14 -05:00
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
2016-09-04 23:38:38 +03:00
|
|
|
|
|
|
|
|
threads.each(&:join)
|
|
|
|
|
end_time = Time.now
|
2015-08-09 21:26:43 -05:00
|
|
|
puts
|
2016-09-15 20:00:35 -05:00
|
|
|
puts "Download completed in #{(end_time - start_time).round(2)}s, saved in #{backup_path} (#{file_list_by_timestamp.size} files)"
|
2015-08-10 01:13:59 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def structure_dir_path dir_path
|
|
|
|
|
begin
|
|
|
|
|
FileUtils::mkdir_p dir_path unless File.exists? dir_path
|
|
|
|
|
rescue Errno::EEXIST => e
|
2015-09-10 00:35:48 -05:00
|
|
|
error_to_string = e.to_s
|
|
|
|
|
puts "# #{error_to_string}"
|
|
|
|
|
if error_to_string.include? "File exists @ dir_s_mkdir - "
|
2015-09-10 00:43:21 -05:00
|
|
|
file_already_existing = error_to_string.split("File exists @ dir_s_mkdir - ")[-1]
|
2015-09-10 00:35:48 -05:00
|
|
|
elsif error_to_string.include? "File exists - "
|
2015-09-10 00:43:21 -05:00
|
|
|
file_already_existing = error_to_string.split("File exists - ")[-1]
|
2015-09-10 00:35:48 -05:00
|
|
|
else
|
2015-09-10 00:43:21 -05:00
|
|
|
raise "Unhandled directory restructure error # #{error_to_string}"
|
2015-09-10 00:35:48 -05:00
|
|
|
end
|
2015-08-10 01:13:59 -05:00
|
|
|
file_already_existing_temporary = file_already_existing + '.temp'
|
|
|
|
|
file_already_existing_permanent = file_already_existing + '/index.html'
|
|
|
|
|
FileUtils::mv file_already_existing, file_already_existing_temporary
|
|
|
|
|
FileUtils::mkdir_p file_already_existing
|
|
|
|
|
FileUtils::mv file_already_existing_temporary, file_already_existing_permanent
|
2015-08-24 18:00:36 -05:00
|
|
|
puts "#{file_already_existing} -> #{file_already_existing_permanent}"
|
2015-08-10 01:13:59 -05:00
|
|
|
structure_dir_path dir_path
|
|
|
|
|
end
|
2015-08-08 16:15:14 -05:00
|
|
|
end
|
|
|
|
|
|
2016-09-04 23:38:38 +03:00
|
|
|
def download_file file_remote_info
|
|
|
|
|
file_url = file_remote_info[:file_url]
|
|
|
|
|
file_id = file_remote_info[:file_id]
|
|
|
|
|
file_timestamp = file_remote_info[:timestamp]
|
|
|
|
|
file_path_elements = file_id.split('/')
|
|
|
|
|
if file_id == ""
|
|
|
|
|
dir_path = backup_path
|
|
|
|
|
file_path = backup_path + 'index.html'
|
|
|
|
|
elsif file_url[-1] == '/' or not file_path_elements[-1].include? '.'
|
|
|
|
|
dir_path = backup_path + file_path_elements[0..-1].join('/')
|
|
|
|
|
file_path = backup_path + file_path_elements[0..-1].join('/') + '/index.html'
|
|
|
|
|
else
|
|
|
|
|
dir_path = backup_path + file_path_elements[0..-2].join('/')
|
|
|
|
|
file_path = backup_path + file_path_elements[0..-1].join('/')
|
|
|
|
|
end
|
|
|
|
|
if Gem.win_platform?
|
|
|
|
|
file_path = file_path.gsub(/[:*?&=<>\\|]/) {|s| '%' + s.ord.to_s(16) }
|
|
|
|
|
end
|
|
|
|
|
unless File.exists? file_path
|
|
|
|
|
begin
|
|
|
|
|
structure_dir_path dir_path
|
|
|
|
|
open(file_path, "wb") do |file|
|
|
|
|
|
begin
|
|
|
|
|
open("http://web.archive.org/web/#{file_timestamp}id_/#{file_url}", "Accept-Encoding" => "plain") do |uri|
|
|
|
|
|
file.write(uri.read)
|
|
|
|
|
end
|
|
|
|
|
rescue OpenURI::HTTPError => e
|
|
|
|
|
puts "#{file_url} # #{e}"
|
|
|
|
|
if @all
|
|
|
|
|
file.write(e.io.read)
|
|
|
|
|
puts "#{file_path} saved anyway."
|
|
|
|
|
end
|
|
|
|
|
rescue StandardError => e
|
|
|
|
|
puts "#{file_url} # #{e}"
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
rescue StandardError => e
|
|
|
|
|
puts "#{file_url} # #{e}"
|
|
|
|
|
ensure
|
|
|
|
|
if not @all and File.exists?(file_path) and File.size(file_path) == 0
|
|
|
|
|
File.delete(file_path)
|
|
|
|
|
puts "#{file_path} was empty and was removed."
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
semaphore.synchronize do
|
|
|
|
|
@processed_file_count += 1
|
|
|
|
|
puts "#{file_url} -> #{file_path} (#{@processed_file_count}/#{file_list_by_timestamp.size})"
|
|
|
|
|
end
|
|
|
|
|
else
|
|
|
|
|
semaphore.synchronize do
|
|
|
|
|
@processed_file_count += 1
|
|
|
|
|
puts "#{file_url} # #{file_path} already exists. (#{@processed_file_count}/#{file_list_by_timestamp.size})"
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def file_queue
|
|
|
|
|
@file_queue ||= file_list_by_timestamp.each_with_object(Queue.new) { |file_info, q| q << file_info }
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def file_list_by_timestamp
|
|
|
|
|
@file_list_by_timestamp ||= get_file_list_by_timestamp
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def semaphore
|
|
|
|
|
@semaphore ||= Mutex.new
|
|
|
|
|
end
|
2015-07-25 18:44:37 -05:00
|
|
|
end
|