Skip to content

Commit af877dc

Browse files
authored
S3 TM Directory support (#3358)
1 parent 770d193 commit af877dc

11 files changed

Lines changed: 1242 additions & 65 deletions

gems/aws-sdk-s3/CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Unreleased Changes
22
------------------
33

4+
* Feature - Added `#upload_directory` and `#download_directory` to `Aws::S3::TransferManager` for bulk directory transfers.
5+
46
1.213.0 (2026-01-28)
57
------------------
68

gems/aws-sdk-s3/lib/aws-sdk-s3/customizations.rb

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,22 @@ module S3
77
autoload :Encryption, 'aws-sdk-s3/encryption'
88
autoload :EncryptionV2, 'aws-sdk-s3/encryption_v2'
99
autoload :EncryptionV3, 'aws-sdk-s3/encryption_v3'
10-
autoload :FilePart, 'aws-sdk-s3/file_part'
10+
autoload :LegacySigner, 'aws-sdk-s3/legacy_signer'
11+
12+
# transfer manager + multipart upload/download utilities
1113
autoload :DefaultExecutor, 'aws-sdk-s3/default_executor'
14+
autoload :FilePart, 'aws-sdk-s3/file_part'
1215
autoload :FileUploader, 'aws-sdk-s3/file_uploader'
1316
autoload :FileDownloader, 'aws-sdk-s3/file_downloader'
14-
autoload :LegacySigner, 'aws-sdk-s3/legacy_signer'
1517
autoload :MultipartDownloadError, 'aws-sdk-s3/multipart_download_error'
1618
autoload :MultipartFileUploader, 'aws-sdk-s3/multipart_file_uploader'
1719
autoload :MultipartStreamUploader, 'aws-sdk-s3/multipart_stream_uploader'
1820
autoload :MultipartUploadError, 'aws-sdk-s3/multipart_upload_error'
21+
autoload :DirectoryProgress, 'aws-sdk-s3/directory_progress'
22+
autoload :DirectoryDownloadError, 'aws-sdk-s3/directory_download_error'
23+
autoload :DirectoryDownloader, 'aws-sdk-s3/directory_downloader'
24+
autoload :DirectoryUploadError, 'aws-sdk-s3/directory_upload_error'
25+
autoload :DirectoryUploader, 'aws-sdk-s3/directory_uploader'
1926
autoload :ObjectCopier, 'aws-sdk-s3/object_copier'
2027
autoload :ObjectMultipartCopier, 'aws-sdk-s3/object_multipart_copier'
2128
autoload :PresignedPost, 'aws-sdk-s3/presigned_post'
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
# frozen_string_literal: true
2+
3+
module Aws
4+
module S3
5+
# Raised when DirectoryDownloader fails to download objects from S3 bucket
6+
class DirectoryDownloadError < StandardError
7+
def initialize(message, errors = [])
8+
@errors = errors
9+
super(message)
10+
end
11+
12+
# @return [Array<StandardError>] The list of errors encountered when downloading objects
13+
attr_reader :errors
14+
end
15+
end
16+
end
Lines changed: 230 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,230 @@
1+
# frozen_string_literal: true
2+
3+
module Aws
4+
module S3
5+
# @api private
6+
# This is a one-shot class that downloads objects from a bucket to a local directory.
7+
# This works as follows:
8+
# * ObjectProducer runs in a background thread, calling `list_objects_v2` and
9+
# pushing entries into a SizedQueue (max: 100).
10+
# * An internal executor pulls from that queue and posts work. Each task uses
11+
# FileDownloader to download objects then signals completion via `completion_queue`.
12+
#
13+
# We track how many tasks we posted, then pop that many times from `completion_queue`
14+
# to wait for everything to finish.
15+
#
16+
# Errors are collected in a mutex-protected array. On failure (unless ignore_failure is set),
17+
# we call abort which closes the queue - the producer catches ClosedQueueError and exits cleanly.
18+
class DirectoryDownloader
19+
def initialize(options = {})
20+
@client = options[:client] || Client.new
21+
@executor = options[:executor] || DefaultExecutor.new
22+
@logger = options[:logger]
23+
@producer = nil
24+
@mutex = Mutex.new
25+
end
26+
27+
attr_reader :client, :executor
28+
29+
def abort
30+
@producer&.close
31+
end
32+
33+
def download(destination, bucket:, **options)
34+
if File.exist?(destination)
35+
raise ArgumentError, 'invalid destination, expected a directory' unless File.directory?(destination)
36+
else
37+
FileUtils.mkdir_p(destination)
38+
end
39+
40+
download_opts = build_download_opts(destination, options)
41+
@producer = ObjectProducer.new(build_producer_opts(destination, bucket, options))
42+
downloader = FileDownloader.new(client: @client, executor: @executor)
43+
downloads, errors = process_download_queue(downloader, download_opts)
44+
build_result(downloads, errors)
45+
end
46+
47+
private
48+
49+
def build_download_opts(destination, opts)
50+
{
51+
destination: destination,
52+
ignore_failure: opts[:ignore_failure] || false
53+
}
54+
end
55+
56+
def build_producer_opts(destination, bucket, opts)
57+
{
58+
client: @client,
59+
directory_downloader: self,
60+
destination: destination,
61+
bucket: bucket,
62+
s3_prefix: opts[:s3_prefix],
63+
filter_callback: opts[:filter_callback],
64+
request_callback: opts[:request_callback]
65+
}
66+
end
67+
68+
def build_result(download_count, errors)
69+
if @producer&.closed?
70+
msg = "directory download failed: #{errors.map(&:message).join('; ')}"
71+
raise DirectoryDownloadError.new(msg, errors)
72+
else
73+
{
74+
completed_downloads: [download_count - errors.count, 0].max,
75+
failed_downloads: errors.count,
76+
errors: errors.any? ? errors : nil
77+
}.compact
78+
end
79+
end
80+
81+
def download_object(entry, downloader, errors, opts)
82+
raise entry.error if entry.error
83+
84+
FileUtils.mkdir_p(File.dirname(entry.path)) unless Dir.exist?(File.dirname(entry.path))
85+
downloader.download(entry.path, entry.params)
86+
@logger&.debug("Downloaded #{entry.params[:key]} from #{entry.params[:bucket]} to #{entry.path}")
87+
rescue StandardError => e
88+
@logger&.warn("Failed to download #{entry.params[:key]} from #{entry.params[:bucket]}: #{e.message}")
89+
@mutex.synchronize { errors << e }
90+
abort unless opts[:ignore_failure]
91+
end
92+
93+
def process_download_queue(downloader, opts)
94+
queue_executor = DefaultExecutor.new(max_threads: 2)
95+
completion_queue = Queue.new
96+
posted_count = 0
97+
errors = []
98+
begin
99+
@producer.each do |object|
100+
queue_executor.post(object) do |o|
101+
download_object(o, downloader, errors, opts)
102+
ensure
103+
completion_queue << :done
104+
end
105+
posted_count += 1
106+
end
107+
rescue ClosedQueueError
108+
# abort already requested
109+
rescue StandardError => e
110+
@mutex.synchronize { errors << e }
111+
abort
112+
end
113+
posted_count.times { completion_queue.pop }
114+
[posted_count, errors]
115+
ensure
116+
queue_executor&.shutdown
117+
end
118+
119+
# @api private
120+
class ObjectProducer
121+
include Enumerable
122+
123+
DEFAULT_QUEUE_SIZE = 100
124+
DONE_MARKER = :done
125+
126+
def initialize(opts = {})
127+
@directory_downloader = opts[:directory_downloader]
128+
@destination_dir = opts[:destination]
129+
@bucket = opts[:bucket]
130+
@client = opts[:client]
131+
@s3_prefix = opts[:s3_prefix]
132+
@filter_callback = opts[:filter_callback]
133+
@request_callback = opts[:request_callback]
134+
@object_queue = SizedQueue.new(DEFAULT_QUEUE_SIZE)
135+
end
136+
137+
def closed?
138+
@object_queue.closed?
139+
end
140+
141+
def close
142+
@object_queue.close
143+
@object_queue.clear
144+
end
145+
146+
def each
147+
producer_thread = Thread.new do
148+
stream_objects
149+
@object_queue << DONE_MARKER
150+
rescue ClosedQueueError
151+
# abort requested
152+
rescue StandardError => e
153+
close
154+
raise e
155+
end
156+
157+
while (object = @object_queue.shift) && object != DONE_MARKER
158+
yield object
159+
end
160+
ensure
161+
producer_thread.value
162+
end
163+
164+
private
165+
166+
def apply_request_callback(key, params)
167+
callback_params = @request_callback.call(key, params.dup)
168+
return params unless callback_params.is_a?(Hash) && callback_params.any?
169+
170+
params.merge(callback_params)
171+
end
172+
173+
def build_object_entry(key)
174+
params = { bucket: @bucket, key: key }
175+
params = apply_request_callback(key, params) if @request_callback
176+
error = validate_key(key)
177+
return DownloadEntry.new(path: '', params: params, error: error) if error
178+
179+
full_path = normalize_path(File.join(@destination_dir, key))
180+
DownloadEntry.new(path: full_path, params: params, error: error)
181+
end
182+
183+
def include_object?(obj)
184+
return true unless @filter_callback
185+
186+
@filter_callback.call(obj)
187+
end
188+
189+
def directory_marker?(obj)
190+
obj.key.end_with?('/') && obj.size.zero?
191+
end
192+
193+
def normalize_path(path)
194+
return path if File::SEPARATOR == '/'
195+
196+
path.tr('/', File::SEPARATOR)
197+
end
198+
199+
def stream_objects(continuation_token: nil)
200+
resp = @client.list_objects_v2(bucket: @bucket, prefix: @s3_prefix, continuation_token: continuation_token)
201+
resp.contents&.each do |o|
202+
next if directory_marker?(o)
203+
next unless include_object?(o)
204+
205+
@object_queue << build_object_entry(o.key)
206+
end
207+
stream_objects(continuation_token: resp.next_continuation_token) if resp.next_continuation_token
208+
end
209+
210+
def validate_key(key)
211+
segments = key.split('/')
212+
return unless segments.any? { |s| %w[. ..].include?(s) }
213+
214+
DirectoryDownloadError.new("invalid key '#{key}': contains '.' or '..' path segments")
215+
end
216+
217+
# @api private
218+
class DownloadEntry
219+
def initialize(opts = {})
220+
@path = opts[:path]
221+
@params = opts[:params]
222+
@error = opts[:error]
223+
end
224+
225+
attr_reader :path, :params, :error
226+
end
227+
end
228+
end
229+
end
230+
end
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
# frozen_string_literal: true
2+
3+
module Aws
4+
module S3
5+
# Raised when DirectoryUploader fails to upload files to S3 bucket
6+
class DirectoryUploadError < StandardError
7+
def initialize(message, errors = [])
8+
@errors = errors
9+
super(message)
10+
end
11+
12+
# @return [Array<StandardError>] The list of errors encountered when uploading files
13+
attr_reader :errors
14+
end
15+
end
16+
end

0 commit comments

Comments
 (0)