From 0e44b9ddb4531d2d0f8d8fabf7747e045833824a Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Thu, 8 Jan 2026 14:35:25 +0100 Subject: [PATCH 1/7] Compare fog vs storage-cli benchmarks --- lib/cloud_controller/benchmark/blobstore.rb | 103 +++++++++++++------- lib/cloud_controller/dependency_locator.rb | 15 +-- 2 files changed, 79 insertions(+), 39 deletions(-) diff --git a/lib/cloud_controller/benchmark/blobstore.rb b/lib/cloud_controller/benchmark/blobstore.rb index 141026af084..4fb11699d15 100644 --- a/lib/cloud_controller/benchmark/blobstore.rb +++ b/lib/cloud_controller/benchmark/blobstore.rb @@ -14,35 +14,60 @@ def perform zip_output_dir = Dir.mktmpdir zip_file = zip_resources(resource_dir, zip_output_dir) - package_guid, resource_timing = upload_package(zip_file) - puts("package upload timing: #{resource_timing * 1000}ms") - - resource_timing = download_package(package_guid, resource_dir) - puts("package download timing: #{resource_timing * 1000}ms") - - bytes_read, resource_timing = download_buildpacks(resource_dir) - puts("downloaded #{Buildpack.count} buildpacks, total #{bytes_read} bytes read") - puts("buildpack download timing: #{resource_timing * 1000}ms") - - droplet_guid, resource_timing = upload_droplet(zip_file) - puts("droplet upload timing: #{resource_timing * 1000}ms") - - resource_timing = download_droplet(droplet_guid, resource_dir) - puts("droplet download timing: #{resource_timing * 1000}ms") + package_guid, resource_timing = upload_package(zip_file, package_blobstore_client) + puts("package upload timing fog: #{resource_timing * 1000}ms") + package_guid_cli, resource_timing_cli = upload_package(zip_file, package_blobstore_client_storage_cli) + puts("package upload timing storage-cli: #{resource_timing_cli * 1000}ms") + + resource_timing = download_package(package_guid, resource_dir, package_blobstore_client) + puts("package download timing fog: #{resource_timing * 1000}ms") + resource_timing_cli = download_package(package_guid_cli, resource_dir, package_blobstore_client_storage_cli) + puts("package download timing storage-cli: #{resource_timing_cli * 1000}ms") + + bytes_read, resource_timing = download_buildpacks(resource_dir, buildpack_blobstore_client) + bytes_read_cli, resource_timing_cli = download_buildpacks(resource_dir, buildpack_blobstore_client_storage_cli) + puts("downloaded #{Buildpack.count} buildpacks, total fog #{bytes_read} bytes read") + puts("downloaded #{Buildpack.count} buildpacks, total storage-cli #{bytes_read_cli} bytes read") + puts("buildpack download timing fog: #{resource_timing * 1000}ms") + puts("buildpack download timing storage-cli: #{resource_timing_cli * 1000}ms") + + droplet_guid, resource_timing = upload_droplet(zip_file, droplet_blobstore_client) + puts("droplet upload timing fog: #{resource_timing * 1000}ms") + droplet_guid_cli, resource_timing_cli = upload_droplet(zip_file, droplet_blobstore_client_storage_cli) + puts("droplet upload timing storage-cli: #{resource_timing_cli * 1000}ms") + + resource_timing = download_droplet(droplet_guid, resource_dir, droplet_blobstore_client) + puts("droplet download timing fog: #{resource_timing * 1000}ms") + resource_timing_cli = download_droplet(droplet_guid_cli, resource_dir, droplet_blobstore_client_storage_cli) + puts("droplet download timing storage-cli: #{resource_timing_cli * 1000}ms") big_droplet_file = Tempfile.new('big-droplet', resource_dir) big_droplet_file.write('abc' * 1024 * 1024 * 100) - big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path) - puts("big droplet upload timing: #{resource_timing * 1000}ms") - - resource_timing = download_droplet(big_droplet_guid, resource_dir) - puts("big droplet download timing: #{resource_timing * 1000}ms") + big_droplet_file.flush + big_droplet_file.rewind + big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path, droplet_blobstore_client) + big_droplet_file_cli = Tempfile.new('big-droplet', resource_dir) + big_droplet_file_cli.write('abc' * 1024 * 1024 * 100) + big_droplet_file_cli.flush + big_droplet_file_cli.rewind + big_droplet_guid_cli, resource_timing_cli = upload_droplet(big_droplet_file_cli.path, droplet_blobstore_client_storage_cli) + puts("big droplet upload timing fog: #{resource_timing * 1000}ms") + puts("big droplet upload timing storage-cli: #{resource_timing_cli * 1000}ms") + resource_timing = download_droplet(big_droplet_guid, resource_dir, droplet_blobstore_client) + puts("big droplet download timing fog: #{resource_timing * 1000}ms") + resource_timing_cli = download_droplet(big_droplet_guid_cli, resource_dir, droplet_blobstore_client_storage_cli) + puts("big droplet download timing storage-cli: #{resource_timing_cli * 1000}ms") ensure FileUtils.remove_dir(resource_dir, true) FileUtils.remove_dir(zip_output_dir, true) + big_droplet_file.close + big_droplet_file_cli.close package_blobstore_client.delete(package_guid) if package_guid droplet_blobstore_client.delete(droplet_guid) if droplet_guid droplet_blobstore_client.delete(big_droplet_guid) if big_droplet_guid + package_blobstore_client_storage_cli.delete(package_guid_cli) if package_guid_cli + droplet_blobstore_client_storage_cli.delete(droplet_guid_cli) if droplet_guid_cli + droplet_blobstore_client_storage_cli.delete(big_droplet_guid_cli) if big_droplet_guid_cli end def resource_match(dir_path) @@ -55,24 +80,24 @@ def resource_match(dir_path) end end - def upload_package(package_path) - copy_to_blobstore(package_path, package_blobstore_client) + def upload_package(package_path, client = package_blobstore_client) + copy_to_blobstore(package_path, client) end - def download_package(package_guid, tmp_dir) + def download_package(package_guid, tmp_dir, client = package_blobstore_client) tempfile = Tempfile.new('package-download-benchmark', tmp_dir) ::Benchmark.realtime do - package_blobstore_client.download_from_blobstore(package_guid, tempfile.path) + client.download_from_blobstore(package_guid, tempfile.path) end end - def download_buildpacks(tmp_dir) + def download_buildpacks(tmp_dir, client = buildpack_blobstore_client) tempfile = Tempfile.new('buildpack-download-benchmark', tmp_dir) bytes_read = 0 timing = ::Benchmark.realtime do bytes_read = Buildpack.map do |buildpack| - buildpack_blobstore_client.download_from_blobstore(buildpack.key, tempfile.path) + client.download_from_blobstore(buildpack.key, tempfile.path) File.stat(tempfile.path).size end.sum end @@ -80,15 +105,15 @@ def download_buildpacks(tmp_dir) [bytes_read, timing] end - def upload_droplet(droplet_path) - copy_to_blobstore(droplet_path, droplet_blobstore_client) + def upload_droplet(droplet_path, client = droplet_blobstore_client) + copy_to_blobstore(droplet_path, client) end - def download_droplet(droplet_guid, tmp_dir) + def download_droplet(droplet_guid, tmp_dir, client = droplet_blobstore_client) tempfile = Tempfile.new('droplet-download-benchmark', tmp_dir) ::Benchmark.realtime do - droplet_blobstore_client.download_from_blobstore(droplet_guid, tempfile.path) + client.download_from_blobstore(droplet_guid, tempfile.path) end end @@ -128,15 +153,27 @@ def copy_to_blobstore(path, client) end def buildpack_blobstore_client - @buildpack_blobstore_client ||= CloudController::DependencyLocator.instance.buildpack_blobstore + @buildpack_blobstore_client_fog ||= CloudController::DependencyLocator.instance.buildpack_blobstore(blobstore_type: 'fog') + end + + def buildpack_blobstore_client_storage_cli + @buildpack_blobstore_client_storage_cli ||= CloudController::DependencyLocator.instance.buildpack_blobstore(blobstore_type: 'storage-cli') end def droplet_blobstore_client - @droplet_blobstore_client ||= CloudController::DependencyLocator.instance.droplet_blobstore + @droplet_blobstore_client_fog ||= CloudController::DependencyLocator.instance.droplet_blobstore(blobstore_type: 'fog') + end + + def droplet_blobstore_client_storage_cli + @droplet_blobstore_client_storage_cli ||= CloudController::DependencyLocator.instance.droplet_blobstore(blobstore_type: 'storage-cli') end def package_blobstore_client - @package_blobstore_client ||= CloudController::DependencyLocator.instance.package_blobstore + @package_blobstore_client_fog ||= CloudController::DependencyLocator.instance.package_blobstore(blobstore_type: 'fog') + end + + def package_blobstore_client_storage_cli + @package_blobstore_client_storage_cli ||= CloudController::DependencyLocator.instance.package_blobstore(blobstore_type: 'storage-cli') end def resource_pool diff --git a/lib/cloud_controller/dependency_locator.rb b/lib/cloud_controller/dependency_locator.rb index 6d7aabc4fb8..88ec34a8358 100644 --- a/lib/cloud_controller/dependency_locator.rb +++ b/lib/cloud_controller/dependency_locator.rb @@ -131,8 +131,9 @@ def index_stopper @dependencies[:index_stopper] || register(:index_stopper, IndexStopper.new(runners)) end - def droplet_blobstore - options = config.get(:droplets) + def droplet_blobstore(blobstore_type: nil) + options = config.get(:droplets).dup + options[:blobstore_type] = blobstore_type if blobstore_type Blobstore::ClientProvider.provide( options: options, @@ -152,8 +153,9 @@ def buildpack_cache_blobstore ) end - def package_blobstore - options = config.get(:packages) + def package_blobstore(blobstore_type: nil) + options = config.get(:packages).dup + options[:blobstore_type] = blobstore_type if blobstore_type Blobstore::ClientProvider.provide( options: options, @@ -183,8 +185,9 @@ def global_app_bits_cache ) end - def buildpack_blobstore - options = config.get(:buildpacks) + def buildpack_blobstore(blobstore_type: nil) + options = config.get(:buildpacks).dup + options[:blobstore_type] = blobstore_type if blobstore_type Blobstore::ClientProvider.provide( options: options, From 3fcfcae0f9292a48eb14e3814d6f31b3ef9ef91c Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Thu, 8 Jan 2026 16:32:53 +0100 Subject: [PATCH 2/7] restructure perform method --- lib/cloud_controller/benchmark/blobstore.rb | 156 ++++++++++++-------- 1 file changed, 95 insertions(+), 61 deletions(-) diff --git a/lib/cloud_controller/benchmark/blobstore.rb b/lib/cloud_controller/benchmark/blobstore.rb index 4fb11699d15..7cda7609297 100644 --- a/lib/cloud_controller/benchmark/blobstore.rb +++ b/lib/cloud_controller/benchmark/blobstore.rb @@ -14,60 +14,12 @@ def perform zip_output_dir = Dir.mktmpdir zip_file = zip_resources(resource_dir, zip_output_dir) - package_guid, resource_timing = upload_package(zip_file, package_blobstore_client) - puts("package upload timing fog: #{resource_timing * 1000}ms") - package_guid_cli, resource_timing_cli = upload_package(zip_file, package_blobstore_client_storage_cli) - puts("package upload timing storage-cli: #{resource_timing_cli * 1000}ms") - - resource_timing = download_package(package_guid, resource_dir, package_blobstore_client) - puts("package download timing fog: #{resource_timing * 1000}ms") - resource_timing_cli = download_package(package_guid_cli, resource_dir, package_blobstore_client_storage_cli) - puts("package download timing storage-cli: #{resource_timing_cli * 1000}ms") - - bytes_read, resource_timing = download_buildpacks(resource_dir, buildpack_blobstore_client) - bytes_read_cli, resource_timing_cli = download_buildpacks(resource_dir, buildpack_blobstore_client_storage_cli) - puts("downloaded #{Buildpack.count} buildpacks, total fog #{bytes_read} bytes read") - puts("downloaded #{Buildpack.count} buildpacks, total storage-cli #{bytes_read_cli} bytes read") - puts("buildpack download timing fog: #{resource_timing * 1000}ms") - puts("buildpack download timing storage-cli: #{resource_timing_cli * 1000}ms") - - droplet_guid, resource_timing = upload_droplet(zip_file, droplet_blobstore_client) - puts("droplet upload timing fog: #{resource_timing * 1000}ms") - droplet_guid_cli, resource_timing_cli = upload_droplet(zip_file, droplet_blobstore_client_storage_cli) - puts("droplet upload timing storage-cli: #{resource_timing_cli * 1000}ms") - - resource_timing = download_droplet(droplet_guid, resource_dir, droplet_blobstore_client) - puts("droplet download timing fog: #{resource_timing * 1000}ms") - resource_timing_cli = download_droplet(droplet_guid_cli, resource_dir, droplet_blobstore_client_storage_cli) - puts("droplet download timing storage-cli: #{resource_timing_cli * 1000}ms") - - big_droplet_file = Tempfile.new('big-droplet', resource_dir) - big_droplet_file.write('abc' * 1024 * 1024 * 100) - big_droplet_file.flush - big_droplet_file.rewind - big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path, droplet_blobstore_client) - big_droplet_file_cli = Tempfile.new('big-droplet', resource_dir) - big_droplet_file_cli.write('abc' * 1024 * 1024 * 100) - big_droplet_file_cli.flush - big_droplet_file_cli.rewind - big_droplet_guid_cli, resource_timing_cli = upload_droplet(big_droplet_file_cli.path, droplet_blobstore_client_storage_cli) - puts("big droplet upload timing fog: #{resource_timing * 1000}ms") - puts("big droplet upload timing storage-cli: #{resource_timing_cli * 1000}ms") - resource_timing = download_droplet(big_droplet_guid, resource_dir, droplet_blobstore_client) - puts("big droplet download timing fog: #{resource_timing * 1000}ms") - resource_timing_cli = download_droplet(big_droplet_guid_cli, resource_dir, droplet_blobstore_client_storage_cli) - puts("big droplet download timing storage-cli: #{resource_timing_cli * 1000}ms") + benchmark_packages(zip_file, resource_dir) + benchmark_buildpacks(resource_dir) + benchmark_droplets(zip_file, resource_dir) + benchmark_big_droplets(resource_dir) ensure - FileUtils.remove_dir(resource_dir, true) - FileUtils.remove_dir(zip_output_dir, true) - big_droplet_file.close - big_droplet_file_cli.close - package_blobstore_client.delete(package_guid) if package_guid - droplet_blobstore_client.delete(droplet_guid) if droplet_guid - droplet_blobstore_client.delete(big_droplet_guid) if big_droplet_guid - package_blobstore_client_storage_cli.delete(package_guid_cli) if package_guid_cli - droplet_blobstore_client_storage_cli.delete(droplet_guid_cli) if droplet_guid_cli - droplet_blobstore_client_storage_cli.delete(big_droplet_guid_cli) if big_droplet_guid_cli + cleanup(resource_dir, zip_output_dir) end def resource_match(dir_path) @@ -80,18 +32,18 @@ def resource_match(dir_path) end end - def upload_package(package_path, client = package_blobstore_client) + def upload_package(package_path, client=package_blobstore_client) copy_to_blobstore(package_path, client) end - def download_package(package_guid, tmp_dir, client = package_blobstore_client) + def download_package(package_guid, tmp_dir, client=package_blobstore_client) tempfile = Tempfile.new('package-download-benchmark', tmp_dir) ::Benchmark.realtime do client.download_from_blobstore(package_guid, tempfile.path) end end - def download_buildpacks(tmp_dir, client = buildpack_blobstore_client) + def download_buildpacks(tmp_dir, client=buildpack_blobstore_client) tempfile = Tempfile.new('buildpack-download-benchmark', tmp_dir) bytes_read = 0 @@ -105,11 +57,11 @@ def download_buildpacks(tmp_dir, client = buildpack_blobstore_client) [bytes_read, timing] end - def upload_droplet(droplet_path, client = droplet_blobstore_client) + def upload_droplet(droplet_path, client=droplet_blobstore_client) copy_to_blobstore(droplet_path, client) end - def download_droplet(droplet_guid, tmp_dir, client = droplet_blobstore_client) + def download_droplet(droplet_guid, tmp_dir, client=droplet_blobstore_client) tempfile = Tempfile.new('droplet-download-benchmark', tmp_dir) ::Benchmark.realtime do @@ -117,6 +69,88 @@ def download_droplet(droplet_guid, tmp_dir, client = droplet_blobstore_client) end end + def benchmark_packages(zip_file, resource_dir) + fog_guid, fog_time = upload_package(zip_file, package_blobstore_client) + cli_guid, cli_time = upload_package(zip_file, package_blobstore_client_storage_cli) + + puts("package upload timing fog: #{fog_time * 1000}ms") + puts("package upload timing storage-cli: #{cli_time * 1000}ms") + + fog_dl = download_package(fog_guid, resource_dir, package_blobstore_client) + cli_dl = download_package(cli_guid, resource_dir, package_blobstore_client_storage_cli) + + puts("package download timing fog: #{fog_dl * 1000}ms") + puts("package download timing storage-cli: #{cli_dl * 1000}ms") + + remember_cleanup(:package, fog_guid, cli_guid) + end + + def benchmark_buildpacks(resource_dir) + fog_bytes, fog_time = download_buildpacks(resource_dir, buildpack_blobstore_client) + cli_bytes, cli_time = download_buildpacks(resource_dir, buildpack_blobstore_client_storage_cli) + + puts("downloaded #{Buildpack.count} buildpacks, total fog #{fog_bytes} bytes read") + puts("downloaded #{Buildpack.count} buildpacks, total storage-cli #{cli_bytes} bytes read") + puts("buildpack download timing fog: #{fog_time * 1000}ms") + puts("buildpack download timing storage-cli: #{cli_time * 1000}ms") + end + + def benchmark_droplets(zip_file, resource_dir) + fog_guid, fog_time = upload_droplet(zip_file, droplet_blobstore_client) + cli_guid, cli_time = upload_droplet(zip_file, droplet_blobstore_client_storage_cli) + + puts("droplet upload timing fog: #{fog_time * 1000}ms") + puts("droplet upload timing storage-cli: #{cli_time * 1000}ms") + + fog_dl = download_droplet(fog_guid, resource_dir, droplet_blobstore_client) + cli_dl = download_droplet(cli_guid, resource_dir, droplet_blobstore_client_storage_cli) + + puts("droplet download timing fog: #{fog_dl * 1000}ms") + puts("droplet download timing storage-cli: #{cli_dl * 1000}ms") + + remember_cleanup(:droplet, fog_guid, cli_guid) + end + + def benchmark_big_droplets(resource_dir) + fog_guid, fog_time = upload_big_droplet(resource_dir, droplet_blobstore_client) + cli_guid, cli_time = upload_big_droplet(resource_dir, droplet_blobstore_client_storage_cli) + + puts("big droplet upload timing fog: #{fog_time * 1000}ms") + puts("big droplet upload timing storage-cli: #{cli_time * 1000}ms") + + fog_dl = download_droplet(fog_guid, resource_dir, droplet_blobstore_client) + cli_dl = download_droplet(cli_guid, resource_dir, droplet_blobstore_client_storage_cli) + + puts("big droplet download timing fog: #{fog_dl * 1000}ms") + puts("big droplet download timing storage-cli: #{cli_dl * 1000}ms") + + remember_cleanup(:droplet, fog_guid, cli_guid) + end + + def remember_cleanup(type, fog_guid, cli_guid) + cleanup_items << [type, fog_guid, cli_guid] + end + + def cleanup(resource_dir, zip_output_dir) + FileUtils.remove_dir(resource_dir, true) + FileUtils.remove_dir(zip_output_dir, true) + + cleanup_items.each do |type, fog_guid, cli_guid| + client_fog, client_cli = + case type + when :package then [package_blobstore_client, package_blobstore_client_storage_cli] + when :droplet then [droplet_blobstore_client, droplet_blobstore_client_storage_cli] + end + + client_fog.delete(fog_guid) if fog_guid + client_cli.delete(cli_guid) if cli_guid + end + end + + def cleanup_items + @cleanup_items ||= [] + end + private def generate_resources @@ -153,7 +187,7 @@ def copy_to_blobstore(path, client) end def buildpack_blobstore_client - @buildpack_blobstore_client_fog ||= CloudController::DependencyLocator.instance.buildpack_blobstore(blobstore_type: 'fog') + @buildpack_blobstore_client ||= CloudController::DependencyLocator.instance.buildpack_blobstore(blobstore_type: 'fog') end def buildpack_blobstore_client_storage_cli @@ -161,7 +195,7 @@ def buildpack_blobstore_client_storage_cli end def droplet_blobstore_client - @droplet_blobstore_client_fog ||= CloudController::DependencyLocator.instance.droplet_blobstore(blobstore_type: 'fog') + @droplet_blobstore_client ||= CloudController::DependencyLocator.instance.droplet_blobstore(blobstore_type: 'fog') end def droplet_blobstore_client_storage_cli @@ -169,7 +203,7 @@ def droplet_blobstore_client_storage_cli end def package_blobstore_client - @package_blobstore_client_fog ||= CloudController::DependencyLocator.instance.package_blobstore(blobstore_type: 'fog') + @package_blobstore_client ||= CloudController::DependencyLocator.instance.package_blobstore(blobstore_type: 'fog') end def package_blobstore_client_storage_cli From 6740b342805bcc935e4801d6975fd2b48b34b857 Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Thu, 15 Jan 2026 12:44:06 +0100 Subject: [PATCH 3/7] prepare ccng for blobstore benchmark errand --- lib/cloud_controller/benchmark/blobstore.rb | 153 +++++------------- lib/cloud_controller/config.rb | 1 + .../blobstore_benchmarks_schema.rb | 82 ++++++++++ lib/cloud_controller/dependency_locator.rb | 15 +- lib/tasks/blobstore_benchmarks.rake | 3 +- 5 files changed, 132 insertions(+), 122 deletions(-) create mode 100644 lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb diff --git a/lib/cloud_controller/benchmark/blobstore.rb b/lib/cloud_controller/benchmark/blobstore.rb index 7cda7609297..141026af084 100644 --- a/lib/cloud_controller/benchmark/blobstore.rb +++ b/lib/cloud_controller/benchmark/blobstore.rb @@ -14,12 +14,35 @@ def perform zip_output_dir = Dir.mktmpdir zip_file = zip_resources(resource_dir, zip_output_dir) - benchmark_packages(zip_file, resource_dir) - benchmark_buildpacks(resource_dir) - benchmark_droplets(zip_file, resource_dir) - benchmark_big_droplets(resource_dir) + package_guid, resource_timing = upload_package(zip_file) + puts("package upload timing: #{resource_timing * 1000}ms") + + resource_timing = download_package(package_guid, resource_dir) + puts("package download timing: #{resource_timing * 1000}ms") + + bytes_read, resource_timing = download_buildpacks(resource_dir) + puts("downloaded #{Buildpack.count} buildpacks, total #{bytes_read} bytes read") + puts("buildpack download timing: #{resource_timing * 1000}ms") + + droplet_guid, resource_timing = upload_droplet(zip_file) + puts("droplet upload timing: #{resource_timing * 1000}ms") + + resource_timing = download_droplet(droplet_guid, resource_dir) + puts("droplet download timing: #{resource_timing * 1000}ms") + + big_droplet_file = Tempfile.new('big-droplet', resource_dir) + big_droplet_file.write('abc' * 1024 * 1024 * 100) + big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path) + puts("big droplet upload timing: #{resource_timing * 1000}ms") + + resource_timing = download_droplet(big_droplet_guid, resource_dir) + puts("big droplet download timing: #{resource_timing * 1000}ms") ensure - cleanup(resource_dir, zip_output_dir) + FileUtils.remove_dir(resource_dir, true) + FileUtils.remove_dir(zip_output_dir, true) + package_blobstore_client.delete(package_guid) if package_guid + droplet_blobstore_client.delete(droplet_guid) if droplet_guid + droplet_blobstore_client.delete(big_droplet_guid) if big_droplet_guid end def resource_match(dir_path) @@ -32,24 +55,24 @@ def resource_match(dir_path) end end - def upload_package(package_path, client=package_blobstore_client) - copy_to_blobstore(package_path, client) + def upload_package(package_path) + copy_to_blobstore(package_path, package_blobstore_client) end - def download_package(package_guid, tmp_dir, client=package_blobstore_client) + def download_package(package_guid, tmp_dir) tempfile = Tempfile.new('package-download-benchmark', tmp_dir) ::Benchmark.realtime do - client.download_from_blobstore(package_guid, tempfile.path) + package_blobstore_client.download_from_blobstore(package_guid, tempfile.path) end end - def download_buildpacks(tmp_dir, client=buildpack_blobstore_client) + def download_buildpacks(tmp_dir) tempfile = Tempfile.new('buildpack-download-benchmark', tmp_dir) bytes_read = 0 timing = ::Benchmark.realtime do bytes_read = Buildpack.map do |buildpack| - client.download_from_blobstore(buildpack.key, tempfile.path) + buildpack_blobstore_client.download_from_blobstore(buildpack.key, tempfile.path) File.stat(tempfile.path).size end.sum end @@ -57,100 +80,18 @@ def download_buildpacks(tmp_dir, client=buildpack_blobstore_client) [bytes_read, timing] end - def upload_droplet(droplet_path, client=droplet_blobstore_client) - copy_to_blobstore(droplet_path, client) + def upload_droplet(droplet_path) + copy_to_blobstore(droplet_path, droplet_blobstore_client) end - def download_droplet(droplet_guid, tmp_dir, client=droplet_blobstore_client) + def download_droplet(droplet_guid, tmp_dir) tempfile = Tempfile.new('droplet-download-benchmark', tmp_dir) ::Benchmark.realtime do - client.download_from_blobstore(droplet_guid, tempfile.path) - end - end - - def benchmark_packages(zip_file, resource_dir) - fog_guid, fog_time = upload_package(zip_file, package_blobstore_client) - cli_guid, cli_time = upload_package(zip_file, package_blobstore_client_storage_cli) - - puts("package upload timing fog: #{fog_time * 1000}ms") - puts("package upload timing storage-cli: #{cli_time * 1000}ms") - - fog_dl = download_package(fog_guid, resource_dir, package_blobstore_client) - cli_dl = download_package(cli_guid, resource_dir, package_blobstore_client_storage_cli) - - puts("package download timing fog: #{fog_dl * 1000}ms") - puts("package download timing storage-cli: #{cli_dl * 1000}ms") - - remember_cleanup(:package, fog_guid, cli_guid) - end - - def benchmark_buildpacks(resource_dir) - fog_bytes, fog_time = download_buildpacks(resource_dir, buildpack_blobstore_client) - cli_bytes, cli_time = download_buildpacks(resource_dir, buildpack_blobstore_client_storage_cli) - - puts("downloaded #{Buildpack.count} buildpacks, total fog #{fog_bytes} bytes read") - puts("downloaded #{Buildpack.count} buildpacks, total storage-cli #{cli_bytes} bytes read") - puts("buildpack download timing fog: #{fog_time * 1000}ms") - puts("buildpack download timing storage-cli: #{cli_time * 1000}ms") - end - - def benchmark_droplets(zip_file, resource_dir) - fog_guid, fog_time = upload_droplet(zip_file, droplet_blobstore_client) - cli_guid, cli_time = upload_droplet(zip_file, droplet_blobstore_client_storage_cli) - - puts("droplet upload timing fog: #{fog_time * 1000}ms") - puts("droplet upload timing storage-cli: #{cli_time * 1000}ms") - - fog_dl = download_droplet(fog_guid, resource_dir, droplet_blobstore_client) - cli_dl = download_droplet(cli_guid, resource_dir, droplet_blobstore_client_storage_cli) - - puts("droplet download timing fog: #{fog_dl * 1000}ms") - puts("droplet download timing storage-cli: #{cli_dl * 1000}ms") - - remember_cleanup(:droplet, fog_guid, cli_guid) - end - - def benchmark_big_droplets(resource_dir) - fog_guid, fog_time = upload_big_droplet(resource_dir, droplet_blobstore_client) - cli_guid, cli_time = upload_big_droplet(resource_dir, droplet_blobstore_client_storage_cli) - - puts("big droplet upload timing fog: #{fog_time * 1000}ms") - puts("big droplet upload timing storage-cli: #{cli_time * 1000}ms") - - fog_dl = download_droplet(fog_guid, resource_dir, droplet_blobstore_client) - cli_dl = download_droplet(cli_guid, resource_dir, droplet_blobstore_client_storage_cli) - - puts("big droplet download timing fog: #{fog_dl * 1000}ms") - puts("big droplet download timing storage-cli: #{cli_dl * 1000}ms") - - remember_cleanup(:droplet, fog_guid, cli_guid) - end - - def remember_cleanup(type, fog_guid, cli_guid) - cleanup_items << [type, fog_guid, cli_guid] - end - - def cleanup(resource_dir, zip_output_dir) - FileUtils.remove_dir(resource_dir, true) - FileUtils.remove_dir(zip_output_dir, true) - - cleanup_items.each do |type, fog_guid, cli_guid| - client_fog, client_cli = - case type - when :package then [package_blobstore_client, package_blobstore_client_storage_cli] - when :droplet then [droplet_blobstore_client, droplet_blobstore_client_storage_cli] - end - - client_fog.delete(fog_guid) if fog_guid - client_cli.delete(cli_guid) if cli_guid + droplet_blobstore_client.download_from_blobstore(droplet_guid, tempfile.path) end end - def cleanup_items - @cleanup_items ||= [] - end - private def generate_resources @@ -187,27 +128,15 @@ def copy_to_blobstore(path, client) end def buildpack_blobstore_client - @buildpack_blobstore_client ||= CloudController::DependencyLocator.instance.buildpack_blobstore(blobstore_type: 'fog') - end - - def buildpack_blobstore_client_storage_cli - @buildpack_blobstore_client_storage_cli ||= CloudController::DependencyLocator.instance.buildpack_blobstore(blobstore_type: 'storage-cli') + @buildpack_blobstore_client ||= CloudController::DependencyLocator.instance.buildpack_blobstore end def droplet_blobstore_client - @droplet_blobstore_client ||= CloudController::DependencyLocator.instance.droplet_blobstore(blobstore_type: 'fog') - end - - def droplet_blobstore_client_storage_cli - @droplet_blobstore_client_storage_cli ||= CloudController::DependencyLocator.instance.droplet_blobstore(blobstore_type: 'storage-cli') + @droplet_blobstore_client ||= CloudController::DependencyLocator.instance.droplet_blobstore end def package_blobstore_client - @package_blobstore_client ||= CloudController::DependencyLocator.instance.package_blobstore(blobstore_type: 'fog') - end - - def package_blobstore_client_storage_cli - @package_blobstore_client_storage_cli ||= CloudController::DependencyLocator.instance.package_blobstore(blobstore_type: 'storage-cli') + @package_blobstore_client ||= CloudController::DependencyLocator.instance.package_blobstore end def resource_pool diff --git a/lib/cloud_controller/config.rb b/lib/cloud_controller/config.rb index cd46300044f..78f0d6679cb 100644 --- a/lib/cloud_controller/config.rb +++ b/lib/cloud_controller/config.rb @@ -12,6 +12,7 @@ require 'cloud_controller/config_schemas/worker_schema' require 'cloud_controller/config_schemas/deployment_updater_schema' require 'cloud_controller/config_schemas/rotate_database_key_schema' +require 'cloud_controller/config_schemas/blobstore_benchmarks_schema' require 'utils/hash_utils' module VCAP::CloudController diff --git a/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb b/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb new file mode 100644 index 00000000000..f2e50bbf262 --- /dev/null +++ b/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb @@ -0,0 +1,82 @@ +require 'vcap/config' + +module VCAP::CloudController + module ConfigSchemas + class BlobstoreBenchmarksSchema < VCAP::Config + # rubocop:disable Metrics/BlockLength + define_schema do + blobstore_section = { + optional(:blobstore_type) => String, + optional(:blobstore_provider) => String, + + optional(:fog_connection) => Hash, + optional(:connection_config) => Hash, + optional(:webdav_config) => Hash, + + optional(:fog_aws_storage_options) => Hash, + optional(:fog_gcp_storage_options) => Hash, + + optional(:resource_directory_key) => String, + optional(:buildpack_directory_key) => String, + optional(:app_package_directory_key) => String, + optional(:droplet_directory_key) => String, + + optional(:maximum_size) => Integer, + optional(:minimum_size) => Integer, + optional(:max_package_size) => Integer, + optional(:max_valid_packages_stored) => Integer, + optional(:max_staged_droplets_stored) => Integer + } + + { + optional(:logging) => { + optional(:level) => String, + optional(:file) => String, + optional(:syslog) => String, + optional(:stdout_sink_enabled) => bool + }, + + db: { + optional(:database) => Hash, + optional(:db_connection_string) => String, + optional(:max_connections) => Integer, + optional(:pool_timeout) => Integer, + optional(:log_level) => String, + optional(:log_db_queries) => bool, + optional(:ssl_verify_hostname) => bool, + optional(:connection_validation_timeout) => Integer, + optional(:ca_cert_path) => String + }, + optional(:storage_cli_config_file_resource_pool) => String, + optional(:storage_cli_config_file_buildpacks) => String, + optional(:storage_cli_config_file_packages) => String, + optional(:storage_cli_config_file_droplets) => String, + + optional(:db_encryption_key) => enum(String, NilClass), + optional(:database_encryption) => { + keys: Hash, + current_key_label: String, + optional(:pbkdf2_hmac_iterations) => Integer + }, + + resource_pool: blobstore_section, + buildpacks: blobstore_section, + packages: blobstore_section, + droplets: blobstore_section, + + optional(:pid_filename) => String, + optional(:index) => Integer, + optional(:name) => String, + optional(:default_app_ssh_access) => bool + } + end + # rubocop:enable Metrics/BlockLength + + class << self + def configure_components(config) + ResourcePool.instance = ResourcePool.new(config) + end + end + end + end +end diff --git a/lib/cloud_controller/dependency_locator.rb b/lib/cloud_controller/dependency_locator.rb index 88ec34a8358..6d7aabc4fb8 100644 --- a/lib/cloud_controller/dependency_locator.rb +++ b/lib/cloud_controller/dependency_locator.rb @@ -131,9 +131,8 @@ def index_stopper @dependencies[:index_stopper] || register(:index_stopper, IndexStopper.new(runners)) end - def droplet_blobstore(blobstore_type: nil) - options = config.get(:droplets).dup - options[:blobstore_type] = blobstore_type if blobstore_type + def droplet_blobstore + options = config.get(:droplets) Blobstore::ClientProvider.provide( options: options, @@ -153,9 +152,8 @@ def buildpack_cache_blobstore ) end - def package_blobstore(blobstore_type: nil) - options = config.get(:packages).dup - options[:blobstore_type] = blobstore_type if blobstore_type + def package_blobstore + options = config.get(:packages) Blobstore::ClientProvider.provide( options: options, @@ -185,9 +183,8 @@ def global_app_bits_cache ) end - def buildpack_blobstore(blobstore_type: nil) - options = config.get(:buildpacks).dup - options[:blobstore_type] = blobstore_type if blobstore_type + def buildpack_blobstore + options = config.get(:buildpacks) Blobstore::ClientProvider.provide( options: options, diff --git a/lib/tasks/blobstore_benchmarks.rake b/lib/tasks/blobstore_benchmarks.rake index 1969bec1d12..906df9ee620 100644 --- a/lib/tasks/blobstore_benchmarks.rake +++ b/lib/tasks/blobstore_benchmarks.rake @@ -3,7 +3,8 @@ require 'cloud_controller/benchmark/blobstore' namespace :benchmarks do desc 'Perform blobstore benchmark' task perform_blobstore_benchmark: :environment do - BackgroundJobEnvironment.new(RakeConfig.config).setup_environment do + RakeConfig.context = :blobstore_benchmarks + BoshErrandEnvironment.new(RakeConfig.config).setup_environment do VCAP::CloudController::Benchmark::Blobstore.new.perform end end From c2b5e4b21622d615c353108b64c3f6db0bd74043 Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Fri, 16 Jan 2026 14:14:02 +0100 Subject: [PATCH 4/7] finalize benchmark schema --- .../blobstore_benchmarks_schema.rb | 45 +++++++++---------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb b/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb index f2e50bbf262..c19f8b34bdf 100644 --- a/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb +++ b/lib/cloud_controller/config_schemas/blobstore_benchmarks_schema.rb @@ -6,15 +6,14 @@ class BlobstoreBenchmarksSchema < VCAP::Config # rubocop:disable Metrics/BlockLength define_schema do blobstore_section = { - optional(:blobstore_type) => String, - optional(:blobstore_provider) => String, + blobstore_type: String, + blobstore_provider: String, - optional(:fog_connection) => Hash, optional(:connection_config) => Hash, - optional(:webdav_config) => Hash, + optional(:fog_connection) => Hash, - optional(:fog_aws_storage_options) => Hash, - optional(:fog_gcp_storage_options) => Hash, + fog_aws_storage_options: Hash, + fog_gcp_storage_options: Hash, optional(:resource_directory_key) => String, optional(:buildpack_directory_key) => String, @@ -37,22 +36,22 @@ class BlobstoreBenchmarksSchema < VCAP::Config }, db: { - optional(:database) => Hash, - optional(:db_connection_string) => String, - optional(:max_connections) => Integer, - optional(:pool_timeout) => Integer, - optional(:log_level) => String, - optional(:log_db_queries) => bool, - optional(:ssl_verify_hostname) => bool, - optional(:connection_validation_timeout) => Integer, + optional(:database) => Hash, # db connection hash for sequel\ + max_connections: Integer, # max connections in the connection pool + pool_timeout: Integer, # timeout before raising an error when connection can't be established to the db + log_level: String, # debug, info, etc. + log_db_queries: bool, + ssl_verify_hostname: bool, + connection_validation_timeout: Integer, optional(:ca_cert_path) => String }, - optional(:storage_cli_config_file_resource_pool) => String, - optional(:storage_cli_config_file_buildpacks) => String, - optional(:storage_cli_config_file_packages) => String, - optional(:storage_cli_config_file_droplets) => String, + storage_cli_config_file_resource_pool: String, + storage_cli_config_file_buildpacks: String, + storage_cli_config_file_packages: String, + storage_cli_config_file_droplets: String, + + db_encryption_key: enum(String, NilClass), - optional(:db_encryption_key) => enum(String, NilClass), optional(:database_encryption) => { keys: Hash, current_key_label: String, @@ -64,10 +63,10 @@ class BlobstoreBenchmarksSchema < VCAP::Config packages: blobstore_section, droplets: blobstore_section, - optional(:pid_filename) => String, - optional(:index) => Integer, - optional(:name) => String, - optional(:default_app_ssh_access) => bool + pid_filename: String, + index: Integer, # Component index (cc-0, cc-1, etc) + name: String, # Component name (api_z1, api_z2) + default_app_ssh_access: bool } end # rubocop:enable Metrics/BlockLength From 4ca4df7c8bb4adffd6232a750741af695de94b7f Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Sun, 18 Jan 2026 11:37:43 +0100 Subject: [PATCH 5/7] add more upload file sizes --- lib/cloud_controller/benchmark/blobstore.rb | 23 +++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/lib/cloud_controller/benchmark/blobstore.rb b/lib/cloud_controller/benchmark/blobstore.rb index 141026af084..c8e734be396 100644 --- a/lib/cloud_controller/benchmark/blobstore.rb +++ b/lib/cloud_controller/benchmark/blobstore.rb @@ -35,6 +35,29 @@ def perform big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path) puts("big droplet upload timing: #{resource_timing * 1000}ms") + [ + ['0.005MB', (0.005 * 1024 * 1024).to_i], + ['10MB', 10 * 1024 * 1024], + ['200MB', 200 * 1024 * 1024], + ['500MB', 500 * 1024 * 1024] + ].each do |label, size| + tempfile = Tempfile.new("big-droplet-#{label}", resource_dir) + File.open(tempfile.path, 'wb') do |f| + chunk = '0' * (1024 * 1024) # 1MB chunk + written = 0 + while written < size + to_write = [chunk.bytesize, size - written].min + f.write(chunk.byteslice(0, to_write)) + written += to_write + end + end + + big_droplet_guid, resource_timing = upload_droplet(tempfile.path) + puts("big droplet #{label} upload timing: #{resource_timing * 1000}ms") + + tempfile.close! + end + resource_timing = download_droplet(big_droplet_guid, resource_dir) puts("big droplet download timing: #{resource_timing * 1000}ms") ensure From 31414e46eb4f567bb891977a087aa824b949a9db Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Mon, 19 Jan 2026 10:09:25 +0100 Subject: [PATCH 6/7] add more bigger files for upload time measuring --- lib/cloud_controller/benchmark/blobstore.rb | 176 +++++++++++--------- 1 file changed, 99 insertions(+), 77 deletions(-) diff --git a/lib/cloud_controller/benchmark/blobstore.rb b/lib/cloud_controller/benchmark/blobstore.rb index c8e734be396..79027ca615e 100644 --- a/lib/cloud_controller/benchmark/blobstore.rb +++ b/lib/cloud_controller/benchmark/blobstore.rb @@ -1,77 +1,78 @@ +# frozen_string_literal: true + require 'benchmark' require 'find' require 'zip' +require 'tempfile' +require 'fileutils' +require 'securerandom' module VCAP::CloudController module Benchmark class Blobstore + SIZES = [ + ['0.005MB', (0.005 * 1024 * 1024).to_i], + ['0.01MB', (0.01 * 1024 * 1024).to_i], + ['0.1MB', (0.1 * 1024 * 1024).to_i], + ['1MB', 1 * 1024 * 1024], + ['10MB', 10 * 1024 * 1024], + ['100MB', 100 * 1024 * 1024], + ['200MB', 200 * 1024 * 1024], + ['300MB', 300 * 1024 * 1024], + ['400MB', 400 * 1024 * 1024], + ['500MB', 500 * 1024 * 1024], + ['600MB', 600 * 1024 * 1024], + ['700MB', 700 * 1024 * 1024], + ['800MB', 800 * 1024 * 1024], + ['900MB', 900 * 1024 * 1024], + ['1000MB', 1000 * 1024 * 1024] + ].freeze + + CHUNK_1MB = '0'.b * (1024 * 1024) + def perform + big_droplet_guids = [] resource_dir = generate_resources - - resource_timing = resource_match(resource_dir) - puts("resource match timing: #{resource_timing * 1000}ms") + log_timing('resource match timing', resource_match(resource_dir)) zip_output_dir = Dir.mktmpdir zip_file = zip_resources(resource_dir, zip_output_dir) - package_guid, resource_timing = upload_package(zip_file) - puts("package upload timing: #{resource_timing * 1000}ms") - - resource_timing = download_package(package_guid, resource_dir) - puts("package download timing: #{resource_timing * 1000}ms") + package_guid, timing = upload_package(zip_file) + log_timing('package upload timing', timing) + log_timing('package download timing', download_package(package_guid, resource_dir)) - bytes_read, resource_timing = download_buildpacks(resource_dir) + bytes_read, timing = download_buildpacks(resource_dir) puts("downloaded #{Buildpack.count} buildpacks, total #{bytes_read} bytes read") - puts("buildpack download timing: #{resource_timing * 1000}ms") - - droplet_guid, resource_timing = upload_droplet(zip_file) - puts("droplet upload timing: #{resource_timing * 1000}ms") - - resource_timing = download_droplet(droplet_guid, resource_dir) - puts("droplet download timing: #{resource_timing * 1000}ms") - - big_droplet_file = Tempfile.new('big-droplet', resource_dir) - big_droplet_file.write('abc' * 1024 * 1024 * 100) - big_droplet_guid, resource_timing = upload_droplet(big_droplet_file.path) - puts("big droplet upload timing: #{resource_timing * 1000}ms") - - [ - ['0.005MB', (0.005 * 1024 * 1024).to_i], - ['10MB', 10 * 1024 * 1024], - ['200MB', 200 * 1024 * 1024], - ['500MB', 500 * 1024 * 1024] - ].each do |label, size| - tempfile = Tempfile.new("big-droplet-#{label}", resource_dir) - File.open(tempfile.path, 'wb') do |f| - chunk = '0' * (1024 * 1024) # 1MB chunk - written = 0 - while written < size - to_write = [chunk.bytesize, size - written].min - f.write(chunk.byteslice(0, to_write)) - written += to_write - end - end + log_timing('buildpack download timing', timing) - big_droplet_guid, resource_timing = upload_droplet(tempfile.path) - puts("big droplet #{label} upload timing: #{resource_timing * 1000}ms") + droplet_guid, timing = upload_droplet(zip_file) + log_timing('droplet upload timing', timing) + log_timing('droplet download timing', download_droplet(droplet_guid, resource_dir)) - tempfile.close! - end + SIZES.each do |label, bytes| + Tempfile.create(["big-droplet-#{label}", '.bin'], resource_dir) do |tempfile| + write_file_of_size(tempfile.path, bytes) - resource_timing = download_droplet(big_droplet_guid, resource_dir) - puts("big droplet download timing: #{resource_timing * 1000}ms") + guid, upload_timing = upload_droplet(tempfile.path) + big_droplet_guids << guid + log_timing("big droplet #{label} upload timing", upload_timing) + log_timing("big droplet #{label} download timing", download_droplet(guid, resource_dir)) + end + end ensure - FileUtils.remove_dir(resource_dir, true) - FileUtils.remove_dir(zip_output_dir, true) - package_blobstore_client.delete(package_guid) if package_guid - droplet_blobstore_client.delete(droplet_guid) if droplet_guid - droplet_blobstore_client.delete(big_droplet_guid) if big_droplet_guid + FileUtils.remove_dir(resource_dir, true) if resource_dir + FileUtils.remove_dir(zip_output_dir, true) if zip_output_dir + + safe_delete(package_blobstore_client, package_guid) + safe_delete(droplet_blobstore_client, droplet_guid) + Array(big_droplet_guids).each { |g| safe_delete(droplet_blobstore_client, g) } end def resource_match(dir_path) - resources = Find.find(dir_path). - select { |f| File.file?(f) }. - map { |f| { 'size' => File.stat(f).size, 'sha1' => Digester.new.digest_path(f) } } + resources = Find.find(dir_path) + .select { |f| File.file?(f) } + .map { |f| { 'size' => File.stat(f).size, 'sha1' => Digester.new.digest_path(f) } } ::Benchmark.realtime do resource_pool.match_resources(resources) @@ -83,24 +84,24 @@ def upload_package(package_path) end def download_package(package_guid, tmp_dir) - tempfile = Tempfile.new('package-download-benchmark', tmp_dir) - ::Benchmark.realtime do - package_blobstore_client.download_from_blobstore(package_guid, tempfile.path) + Tempfile.create('package-download-benchmark', tmp_dir) do |tempfile| + ::Benchmark.realtime do + package_blobstore_client.download_from_blobstore(package_guid, tempfile.path) + end end end def download_buildpacks(tmp_dir) - tempfile = Tempfile.new('buildpack-download-benchmark', tmp_dir) - bytes_read = 0 - - timing = ::Benchmark.realtime do - bytes_read = Buildpack.map do |buildpack| - buildpack_blobstore_client.download_from_blobstore(buildpack.key, tempfile.path) - File.stat(tempfile.path).size - end.sum + Tempfile.create('buildpack-download-benchmark', tmp_dir) do |tempfile| + bytes_read = 0 + timing = ::Benchmark.realtime do + bytes_read = Buildpack.map do |buildpack| + buildpack_blobstore_client.download_from_blobstore(buildpack.key, tempfile.path) + File.stat(tempfile.path).size + end.sum + end + [bytes_read, timing] end - - [bytes_read, timing] end def upload_droplet(droplet_path) @@ -108,21 +109,44 @@ def upload_droplet(droplet_path) end def download_droplet(droplet_guid, tmp_dir) - tempfile = Tempfile.new('droplet-download-benchmark', tmp_dir) - - ::Benchmark.realtime do - droplet_blobstore_client.download_from_blobstore(droplet_guid, tempfile.path) + Tempfile.create('droplet-download-benchmark', tmp_dir) do |tempfile| + ::Benchmark.realtime do + droplet_blobstore_client.download_from_blobstore(droplet_guid, tempfile.path) + end end end private + def log_timing(label, seconds) + puts("#{label}: #{(seconds * 1000).round(3)}ms") + end + + def safe_delete(client, guid) + return if guid.nil? + + client.delete(guid) + rescue StandardError => e + # don't fail the benchmark run if cleanup fails + warn("cleanup failed for guid=#{guid}: #{e.class}: #{e.message}") + end + + def write_file_of_size(path, bytes) + File.open(path, 'wb') do |f| + remaining = bytes + while remaining > 0 + to_write = [CHUNK_1MB.bytesize, remaining].min + f.write(CHUNK_1MB, to_write) + remaining -= to_write + end + end + end + def generate_resources dir = Dir.mktmpdir - 100.times.each do |i| - f = File.open(File.join(dir, i.to_s), 'w') - f.write('foo' * (65_536 + i)) + 100.times do |i| + File.write(File.join(dir, i.to_s), 'foo' * (65_536 + i)) end dir @@ -131,11 +155,9 @@ def generate_resources def zip_resources(resource_dir, output_dir) zip_file = File.join(output_dir, 'zipped_package') Zip::File.open(zip_file, create: true) do |zipfile| - Find.find(resource_dir). - select { |f| File.file?(f) }. - each do |file| - zipfile.add(File.basename(file), file) - end + Find.find(resource_dir) + .select { |f| File.file?(f) } + .each { |file| zipfile.add(File.basename(file), file) } end zip_file end From 91dc3248777643712ea7545a6d890fa900d4b1ae Mon Sep 17 00:00:00 2001 From: Katharina Przybill <30441792+kathap@users.noreply.github.com> Date: Mon, 19 Jan 2026 12:36:23 +0100 Subject: [PATCH 7/7] enhance benchmark output --- lib/cloud_controller/benchmark/blobstore.rb | 24 +++++++++++---------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/lib/cloud_controller/benchmark/blobstore.rb b/lib/cloud_controller/benchmark/blobstore.rb index 79027ca615e..632209a9dd6 100644 --- a/lib/cloud_controller/benchmark/blobstore.rb +++ b/lib/cloud_controller/benchmark/blobstore.rb @@ -46,9 +46,7 @@ def perform puts("downloaded #{Buildpack.count} buildpacks, total #{bytes_read} bytes read") log_timing('buildpack download timing', timing) - droplet_guid, timing = upload_droplet(zip_file) - log_timing('droplet upload timing', timing) - log_timing('droplet download timing', download_droplet(droplet_guid, resource_dir)) + droplet_results = [] SIZES.each do |label, bytes| Tempfile.create(["big-droplet-#{label}", '.bin'], resource_dir) do |tempfile| @@ -56,10 +54,14 @@ def perform guid, upload_timing = upload_droplet(tempfile.path) big_droplet_guids << guid - log_timing("big droplet #{label} upload timing", upload_timing) - log_timing("big droplet #{label} download timing", download_droplet(guid, resource_dir)) + droplet_results << { label: "droplet #{label}", guid: guid, upload_timing: upload_timing } end end + + droplet_results.each do |r| + log_timing("#{r[:label]} upload timing", r[:upload_timing]) + log_timing("#{r[:label]} download timing", download_droplet(r[:guid], resource_dir)) + end ensure FileUtils.remove_dir(resource_dir, true) if resource_dir FileUtils.remove_dir(zip_output_dir, true) if zip_output_dir @@ -70,9 +72,9 @@ def perform end def resource_match(dir_path) - resources = Find.find(dir_path) - .select { |f| File.file?(f) } - .map { |f| { 'size' => File.stat(f).size, 'sha1' => Digester.new.digest_path(f) } } + resources = Find.find(dir_path). + select { |f| File.file?(f) }. + map { |f| { 'size' => File.stat(f).size, 'sha1' => Digester.new.digest_path(f) } } ::Benchmark.realtime do resource_pool.match_resources(resources) @@ -155,9 +157,9 @@ def generate_resources def zip_resources(resource_dir, output_dir) zip_file = File.join(output_dir, 'zipped_package') Zip::File.open(zip_file, create: true) do |zipfile| - Find.find(resource_dir) - .select { |f| File.file?(f) } - .each { |file| zipfile.add(File.basename(file), file) } + Find.find(resource_dir). + select { |f| File.file?(f) }. + each { |file| zipfile.add(File.basename(file), file) } end zip_file end