summaryrefslogtreecommitdiff
path: root/lib/bundler/compact_index_client
diff options
context:
space:
mode:
Diffstat (limited to 'lib/bundler/compact_index_client')
-rw-r--r--lib/bundler/compact_index_client/cache.rb120
-rw-r--r--lib/bundler/compact_index_client/cache_file.rb148
-rw-r--r--lib/bundler/compact_index_client/gem_parser.rb10
-rw-r--r--lib/bundler/compact_index_client/parser.rb84
-rw-r--r--lib/bundler/compact_index_client/updater.rb150
5 files changed, 365 insertions, 147 deletions
diff --git a/lib/bundler/compact_index_client/cache.rb b/lib/bundler/compact_index_client/cache.rb
index c2cd069ec1..bedd7f8028 100644
--- a/lib/bundler/compact_index_client/cache.rb
+++ b/lib/bundler/compact_index_client/cache.rb
@@ -7,103 +7,89 @@ module Bundler
class Cache
attr_reader :directory
- def initialize(directory)
+ def initialize(directory, fetcher = nil)
@directory = Pathname.new(directory).expand_path
- info_roots.each do |dir|
- SharedHelpers.filesystem_access(dir) do
- FileUtils.mkdir_p(dir)
- end
- end
- end
+ @updater = Updater.new(fetcher) if fetcher
+ @mutex = Thread::Mutex.new
+ @endpoints = Set.new
- def names
- lines(names_path)
+ @info_root = mkdir("info")
+ @special_characters_info_root = mkdir("info-special-characters")
+ @info_etag_root = mkdir("info-etags")
end
- def names_path
- directory.join("names")
+ def names
+ fetch("names", names_path, names_etag_path)
end
def versions
- versions_by_name = Hash.new {|hash, key| hash[key] = [] }
- info_checksums_by_name = {}
-
- lines(versions_path).each do |line|
- name, versions_string, info_checksum = line.split(" ", 3)
- info_checksums_by_name[name] = info_checksum || ""
- versions_string.split(",").each do |version|
- if version.start_with?("-")
- version = version[1..-1].split("-", 2).unshift(name)
- versions_by_name[name].delete(version)
- else
- version = version.split("-", 2).unshift(name)
- versions_by_name[name] << version
- end
- end
- end
-
- [versions_by_name, info_checksums_by_name]
+ fetch("versions", versions_path, versions_etag_path)
end
- def versions_path
- directory.join("versions")
- end
+ def info(name, remote_checksum = nil)
+ path = info_path(name)
- def checksums
- checksums = {}
-
- lines(versions_path).each do |line|
- name, _, checksum = line.split(" ", 3)
- checksums[name] = checksum
+ if remote_checksum && remote_checksum != SharedHelpers.checksum_for_file(path, :MD5)
+ fetch("info/#{name}", path, info_etag_path(name))
+ else
+ Bundler::CompactIndexClient.debug { "update skipped info/#{name} (#{remote_checksum ? "versions index checksum is nil" : "versions index checksum matches local"})" }
+ read(path)
end
-
- checksums
end
- def dependencies(name)
- lines(info_path(name)).map do |line|
- parse_gem(line)
- end
+ def reset!
+ @mutex.synchronize { @endpoints.clear }
end
+ private
+
+ def names_path = directory.join("names")
+ def names_etag_path = directory.join("names.etag")
+ def versions_path = directory.join("versions")
+ def versions_etag_path = directory.join("versions.etag")
+
def info_path(name)
name = name.to_s
- if name =~ /[^a-z0-9_-]/
+ # TODO: converge this into the info_root by hashing all filenames like info_etag_path
+ if /[^a-z0-9_-]/.match?(name)
name += "-#{SharedHelpers.digest(:MD5).hexdigest(name).downcase}"
- info_roots.last.join(name)
+ @special_characters_info_root.join(name)
else
- info_roots.first.join(name)
+ @info_root.join(name)
end
end
- def specific_dependency(name, version, platform)
- pattern = [version, platform].compact.join("-")
- return nil if pattern.empty?
+ def info_etag_path(name)
+ name = name.to_s
+ @info_etag_root.join("#{name}-#{SharedHelpers.digest(:MD5).hexdigest(name).downcase}")
+ end
- gem_lines = info_path(name).read
- gem_line = gem_lines[/^#{Regexp.escape(pattern)}\b.*/, 0]
- gem_line ? parse_gem(gem_line) : nil
+ def mkdir(name)
+ directory.join(name).tap do |dir|
+ SharedHelpers.filesystem_access(dir) do
+ FileUtils.mkdir_p(dir)
+ end
+ end
end
- private
+ def fetch(remote_path, path, etag_path)
+ if already_fetched?(remote_path)
+ Bundler::CompactIndexClient.debug { "already fetched #{remote_path}" }
+ else
+ Bundler::CompactIndexClient.debug { "fetching #{remote_path}" }
+ @updater&.update(remote_path, path, etag_path)
+ end
- def lines(path)
- return [] unless path.file?
- lines = SharedHelpers.filesystem_access(path, :read, &:read).split("\n")
- header = lines.index("---")
- header ? lines[header + 1..-1] : lines
+ read(path)
end
- def parse_gem(line)
- @dependency_parser ||= GemParser.new
- @dependency_parser.parse(line)
+ def already_fetched?(remote_path)
+ @mutex.synchronize { !@endpoints.add?(remote_path) }
end
- def info_roots
- [
- directory.join("info"),
- directory.join("info-special-characters"),
- ]
+ def read(path)
+ return unless path.file?
+ SharedHelpers.filesystem_access(path, :read, &:read)
end
end
end
diff --git a/lib/bundler/compact_index_client/cache_file.rb b/lib/bundler/compact_index_client/cache_file.rb
new file mode 100644
index 0000000000..299d683438
--- /dev/null
+++ b/lib/bundler/compact_index_client/cache_file.rb
@@ -0,0 +1,148 @@
+# frozen_string_literal: true
+
+require_relative "../vendored_fileutils"
+require "rubygems/package"
+
+module Bundler
+ class CompactIndexClient
+ # write cache files in a way that is robust to concurrent modifications
+ # if digests are given, the checksums will be verified
+ class CacheFile
+ DEFAULT_FILE_MODE = 0o644
+ private_constant :DEFAULT_FILE_MODE
+
+ class Error < RuntimeError; end
+ class ClosedError < Error; end
+
+ class DigestMismatchError < Error
+ def initialize(digests, expected_digests)
+ super "Calculated checksums #{digests.inspect} did not match expected #{expected_digests.inspect}."
+ end
+ end
+
+ # Initialize with a copy of the original file, then yield the instance.
+ def self.copy(path, &block)
+ new(path) do |file|
+ file.initialize_digests
+
+ SharedHelpers.filesystem_access(path, :read) do
+ path.open("rb") do |s|
+ file.open {|f| IO.copy_stream(s, f) }
+ end
+ end
+
+ yield file
+ end
+ end
+
+ # Write data to a temp file, then replace the original file with it verifying the digests if given.
+ def self.write(path, data, digests = nil)
+ return unless data
+ new(path) do |file|
+ file.digests = digests
+ file.write(data)
+ end
+ end
+
+ attr_reader :original_path, :path
+
+ def initialize(original_path, &block)
+ @original_path = original_path
+ @perm = original_path.file? ? original_path.stat.mode : DEFAULT_FILE_MODE
+ @path = original_path.sub(/$/, ".#{$$}.tmp")
+ return unless block_given?
+ begin
+ yield self
+ ensure
+ close
+ end
+ end
+
+ def size
+ path.size
+ end
+
+ # initialize the digests using CompactIndexClient::SUPPORTED_DIGESTS, or a subset based on keys.
+ def initialize_digests(keys = nil)
+ @digests = keys ? SUPPORTED_DIGESTS.slice(*keys) : SUPPORTED_DIGESTS.dup
+ @digests.transform_values! {|algo_class| SharedHelpers.digest(algo_class).new }
+ end
+
+ # reset the digests so they don't contain any previously read data
+ def reset_digests
+ @digests&.each_value(&:reset)
+ end
+
+ # set the digests that will be verified at the end
+ def digests=(expected_digests)
+ @expected_digests = expected_digests
+
+ if @expected_digests.nil?
+ @digests = nil
+ elsif @digests
+ @digests = @digests.slice(*@expected_digests.keys)
+ else
+ initialize_digests(@expected_digests.keys)
+ end
+ end
+
+ def digests?
+ @digests&.any?
+ end
+
+ # Open the temp file for writing, reusing original permissions, yielding the IO object.
+ def open(write_mode = "wb", perm = @perm, &block)
+ raise ClosedError, "Cannot reopen closed file" if @closed
+ SharedHelpers.filesystem_access(path, :write) do
+ path.open(write_mode, perm) do |f|
+ yield digests? ? Gem::Package::DigestIO.new(f, @digests) : f
+ end
+ end
+ end
+
+ # Returns false without appending when no digests since appending is too error prone to do without digests.
+ def append(data)
+ return false unless digests?
+ open("a") {|f| f.write data }
+ verify && commit
+ end
+
+ def write(data)
+ reset_digests
+ open {|f| f.write data }
+ commit!
+ end
+
+ def commit!
+ verify || raise(DigestMismatchError.new(@base64digests, @expected_digests))
+ commit
+ end
+
+ # Verify the digests, returning true on match, false on mismatch.
+ def verify
+ return true unless @expected_digests && digests?
+ @base64digests = @digests.transform_values!(&:base64digest)
+ @digests = nil
+ @base64digests.all? {|algo, digest| @expected_digests[algo] == digest }
+ end
+
+ # Replace the original file with the temp file without verifying digests.
+ # The file is permanently closed.
+ def commit
+ raise ClosedError, "Cannot commit closed file" if @closed
+ SharedHelpers.filesystem_access(original_path, :write) do
+ FileUtils.mv(path, original_path)
+ end
+ @closed = true
+ end
+
+ # Remove the temp file without replacing the original file.
+ # The file is permanently closed.
+ def close
+ return if @closed
+ FileUtils.remove_file(path) if @path&.file?
+ @closed = true
+ end
+ end
+ end
+end
diff --git a/lib/bundler/compact_index_client/gem_parser.rb b/lib/bundler/compact_index_client/gem_parser.rb
index e7bf4c6001..60a1817607 100644
--- a/lib/bundler/compact_index_client/gem_parser.rb
+++ b/lib/bundler/compact_index_client/gem_parser.rb
@@ -6,12 +6,15 @@ module Bundler
GemParser = Gem::Resolver::APISet::GemParser
else
class GemParser
+ EMPTY_ARRAY = [].freeze
+ private_constant :EMPTY_ARRAY
+
def parse(line)
version_and_platform, rest = line.split(" ", 2)
version, platform = version_and_platform.split("-", 2)
- dependencies, requirements = rest.split("|", 2).map {|s| s.split(",") } if rest
- dependencies = dependencies ? dependencies.map {|d| parse_dependency(d) } : []
- requirements = requirements ? requirements.map {|d| parse_dependency(d) } : []
+ dependencies, requirements = rest.split("|", 2).map! {|s| s.split(",") } if rest
+ dependencies = dependencies ? dependencies.map! {|d| parse_dependency(d) } : EMPTY_ARRAY
+ requirements = requirements ? requirements.map! {|d| parse_dependency(d) } : EMPTY_ARRAY
[version, platform, dependencies, requirements]
end
@@ -20,6 +23,7 @@ module Bundler
def parse_dependency(string)
dependency = string.split(":")
dependency[-1] = dependency[-1].split("&") if dependency.size > 1
+ dependency[0] = -dependency[0]
dependency
end
end
diff --git a/lib/bundler/compact_index_client/parser.rb b/lib/bundler/compact_index_client/parser.rb
new file mode 100644
index 0000000000..a227bc2cfd
--- /dev/null
+++ b/lib/bundler/compact_index_client/parser.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Bundler
+ class CompactIndexClient
+ class Parser
+ # `compact_index` - an object responding to #names, #versions, #info(name, checksum),
+ # returning the file contents as a string
+ def initialize(compact_index)
+ @compact_index = compact_index
+ @info_checksums = nil
+ @versions_by_name = nil
+ @available = nil
+ end
+
+ def names
+ lines(@compact_index.names)
+ end
+
+ def versions
+ @versions_by_name ||= Hash.new {|hash, key| hash[key] = [] }
+ @info_checksums = {}
+
+ lines(@compact_index.versions).each do |line|
+ name, versions_string, checksum = line.split(" ", 3)
+ @info_checksums[name] = checksum || ""
+ versions_string.split(",") do |version|
+ delete = version.delete_prefix!("-")
+ version = version.split("-", 2).unshift(name)
+ if delete
+ @versions_by_name[name].delete(version)
+ else
+ @versions_by_name[name] << version
+ end
+ end
+ end
+
+ @versions_by_name
+ end
+
+ def info(name)
+ data = @compact_index.info(name, info_checksums[name])
+ lines(data).map {|line| gem_parser.parse(line).unshift(name) }
+ end
+
+ def available?
+ return @available unless @available.nil?
+ @available = !info_checksums.empty?
+ end
+
+ private
+
+ def info_checksums
+ @info_checksums ||= lines(@compact_index.versions).each_with_object({}) do |line, checksums|
+ parse_version_checksum(line, checksums)
+ end
+ end
+
+ def lines(data)
+ return [] if data.nil? || data.empty?
+ lines = data.split("\n")
+ header = lines.index("---")
+ header ? lines[header + 1..-1] : lines
+ end
+
+ def gem_parser
+ @gem_parser ||= GemParser.new
+ end
+
+ # This is mostly the same as `split(" ", 3)` but it avoids allocating extra objects.
+ # This method gets called at least once for every gem when parsing versions.
+ def parse_version_checksum(line, checksums)
+ line.freeze # allows slicing into the string to not allocate a copy of the line
+ name_end = line.index(" ")
+ checksum_start = line.index(" ", name_end + 1) + 1
+ checksum_end = line.size - checksum_start
+ # freeze name since it is used as a hash key
+ # pre-freezing means a frozen copy isn't created
+ name = line[0, name_end].freeze
+ checksum = line[checksum_start, checksum_end]
+ checksums[name] = checksum
+ end
+ end
+ end
+end
diff --git a/lib/bundler/compact_index_client/updater.rb b/lib/bundler/compact_index_client/updater.rb
index 06486f98cb..88c7146900 100644
--- a/lib/bundler/compact_index_client/updater.rb
+++ b/lib/bundler/compact_index_client/updater.rb
@@ -1,107 +1,103 @@
# frozen_string_literal: true
-require_relative "../vendored_fileutils"
-
module Bundler
class CompactIndexClient
class Updater
- class MisMatchedChecksumError < Error
- def initialize(path, server_checksum, local_checksum)
- @path = path
- @server_checksum = server_checksum
- @local_checksum = local_checksum
- end
-
- def message
- "The checksum of /#{@path} does not match the checksum provided by the server! Something is wrong " \
- "(local checksum is #{@local_checksum.inspect}, was expecting #{@server_checksum.inspect})."
+ class MismatchedChecksumError < Error
+ def initialize(path, message)
+ super "The checksum of /#{path} does not match the checksum provided by the server! Something is wrong. #{message}"
end
end
def initialize(fetcher)
@fetcher = fetcher
- require_relative "../vendored_tmpdir"
end
- def update(local_path, remote_path, retrying = nil)
- headers = {}
-
- Bundler::Dir.mktmpdir("bundler-compact-index-") do |local_temp_dir|
- local_temp_path = Pathname.new(local_temp_dir).join(local_path.basename)
-
- # first try to fetch any new bytes on the existing file
- if retrying.nil? && local_path.file?
- SharedHelpers.filesystem_access(local_temp_path) do
- FileUtils.cp local_path, local_temp_path
- end
- headers["If-None-Match"] = etag_for(local_temp_path)
- headers["Range"] =
- if local_temp_path.size.nonzero?
- # Subtract a byte to ensure the range won't be empty.
- # Avoids 416 (Range Not Satisfiable) responses.
- "bytes=#{local_temp_path.size - 1}-"
- else
- "bytes=#{local_temp_path.size}-"
- end
- end
-
- response = @fetcher.call(remote_path, headers)
- return nil if response.is_a?(Net::HTTPNotModified)
+ def update(remote_path, local_path, etag_path)
+ append(remote_path, local_path, etag_path) || replace(remote_path, local_path, etag_path)
+ rescue CacheFile::DigestMismatchError => e
+ raise MismatchedChecksumError.new(remote_path, e.message)
+ rescue Zlib::GzipFile::Error
+ raise Bundler::HTTPError
+ end
- content = response.body
+ private
- etag = (response["ETag"] || "").gsub(%r{\AW/}, "")
- correct_response = SharedHelpers.filesystem_access(local_temp_path) do
- if response.is_a?(Net::HTTPPartialContent) && local_temp_path.size.nonzero?
- local_temp_path.open("a") {|f| f << slice_body(content, 1..-1) }
+ def append(remote_path, local_path, etag_path)
+ return false unless local_path.file? && local_path.size.nonzero?
- etag_for(local_temp_path) == etag
- else
- local_temp_path.open("wb") {|f| f << content }
+ CacheFile.copy(local_path) do |file|
+ etag = etag_path.read.tap(&:chomp!) if etag_path.file?
- etag.length.zero? || etag_for(local_temp_path) == etag
- end
- end
+ # Subtract a byte to ensure the range won't be empty.
+ # Avoids 416 (Range Not Satisfiable) responses.
+ response = @fetcher.call(remote_path, request_headers(etag, file.size - 1))
+ break true if response.is_a?(Gem::Net::HTTPNotModified)
- if correct_response
- SharedHelpers.filesystem_access(local_path) do
- FileUtils.mv(local_temp_path, local_path)
- end
- return nil
+ file.digests = parse_digests(response)
+ # server may ignore Range and return the full response
+ if response.is_a?(Gem::Net::HTTPPartialContent)
+ break false unless file.append(response.body.byteslice(1..-1))
+ else
+ file.write(response.body)
end
+ CacheFile.write(etag_path, etag_from_response(response))
+ true
+ end
+ end
- if retrying
- raise MisMatchedChecksumError.new(remote_path, etag, etag_for(local_temp_path))
- end
+ # request without range header to get the full file or a 304 Not Modified
+ def replace(remote_path, local_path, etag_path)
+ etag = etag_path.read.tap(&:chomp!) if etag_path.file?
+ response = @fetcher.call(remote_path, request_headers(etag))
+ return true if response.is_a?(Gem::Net::HTTPNotModified)
+ CacheFile.write(local_path, response.body, parse_digests(response))
+ CacheFile.write(etag_path, etag_from_response(response))
+ end
- update(local_path, remote_path, :retrying)
- end
- rescue Errno::EACCES
- raise Bundler::PermissionError,
- "Bundler does not have write access to create a temp directory " \
- "within #{Dir.tmpdir}. Bundler must have write access to your " \
- "systems temp directory to function properly. "
- rescue Zlib::GzipFile::Error
- raise Bundler::HTTPError
+ def request_headers(etag, range_start = nil)
+ headers = {}
+ headers["Range"] = "bytes=#{range_start}-" if range_start
+ headers["If-None-Match"] = %("#{etag}") if etag
+ headers
end
- def etag_for(path)
- sum = checksum_for_file(path)
- sum ? %("#{sum}") : nil
+ def etag_for_request(etag_path)
+ etag_path.read.tap(&:chomp!) if etag_path.file?
end
- def slice_body(body, range)
- body.byteslice(range)
+ def etag_from_response(response)
+ return unless response["ETag"]
+ etag = response["ETag"].delete_prefix("W/")
+ return if etag.delete_prefix!('"') && !etag.delete_suffix!('"')
+ etag
end
- def checksum_for_file(path)
- return nil unless path.file?
- # This must use File.read instead of Digest.file().hexdigest
- # because we need to preserve \n line endings on windows when calculating
- # the checksum
- SharedHelpers.filesystem_access(path, :read) do
- SharedHelpers.digest(:MD5).hexdigest(File.read(path))
+ # Unwraps and returns a Hash of digest algorithms and base64 values
+ # according to RFC 8941 Structured Field Values for HTTP.
+ # https://www.rfc-editor.org/rfc/rfc8941#name-parsing-a-byte-sequence
+ # Ignores unsupported algorithms.
+ def parse_digests(response)
+ return unless header = response["Repr-Digest"] || response["Digest"]
+ digests = {}
+ header.split(",") do |param|
+ algorithm, value = param.split("=", 2)
+ algorithm.strip!
+ algorithm.downcase!
+ next unless SUPPORTED_DIGESTS.key?(algorithm)
+ next unless value = byte_sequence(value)
+ digests[algorithm] = value
end
+ digests.empty? ? nil : digests
+ end
+
+ # Unwrap surrounding colons (byte sequence)
+ # The wrapping characters must be matched or we return nil.
+ # Also handles quotes because right now rubygems.org sends them.
+ def byte_sequence(value)
+ return if value.delete_prefix!(":") && !value.delete_suffix!(":")
+ return if value.delete_prefix!('"') && !value.delete_suffix!('"')
+ value
end
end
end