From e96a9f85e8c44750317fe86ff888d44203cfa3c6 Mon Sep 17 00:00:00 2001 From: Stephen Hodgson Date: Sun, 14 Dec 2025 16:15:13 -0500 Subject: [PATCH 1/4] feat(toolkit/cache): add compression level support - Added compression level support end-to-end: option plumbed through options/cache/cacheHttpClient/tar so tar creation sets gzip/zstd env and upload options carry the level. - Expanded tests: updated saveCache/saveCacheV2 expectations for new parameter, added size-driven compression tests in tar suite, adjusted options tests for clamping/overrides, and kept tar tests prettier-compliant. - Docs: refreshed cache README to describe compression-level behavior and defaults. --- packages/cache/README.md | 14 +- packages/cache/__tests__/options.test.ts | 36 ++++- packages/cache/__tests__/saveCache.test.ts | 22 ++- packages/cache/__tests__/saveCacheV2.test.ts | 36 +++-- packages/cache/__tests__/tar.test.ts | 131 ++++++++++++++++-- packages/cache/src/cache.ts | 29 ++-- .../cache/src/internal/cacheHttpClient.ts | 4 +- packages/cache/src/internal/tar.ts | 86 ++++++++++-- packages/cache/src/options.ts | 27 +++- 9 files changed, 326 insertions(+), 59 deletions(-) diff --git a/packages/cache/README.md b/packages/cache/README.md index 0f743848f6..5102dec2a8 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -24,9 +24,9 @@ Read more about the change & access the migration guide: [reference to the annou This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). -#### Save Cache +### Save Cache -Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails. +Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved successfully and throws an error if cache upload fails. ```js const cache = require('@actions/cache'); @@ -38,7 +38,13 @@ const key = 'npm-foobar-d5ea0750' const cacheId = await cache.saveCache(paths, key) ``` -#### Restore Cache +You can control archive compression when saving. Provide `compressionLevel` in `UploadOptions` (0 = no compression, 9 = maximum, default = 6) or set the `CACHE_COMPRESSION_LEVEL` environment variable: + +```js +const cacheId = await cache.saveCache(paths, key, {compressionLevel: 3}) +``` + +### Restore Cache Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found. @@ -56,7 +62,7 @@ const restoreKeys = [ const cacheKey = await cache.restoreCache(paths, key, restoreKeys) ``` -##### Cache segment restore timeout +### Cache segment restore timeout A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss. diff --git a/packages/cache/__tests__/options.test.ts b/packages/cache/__tests__/options.test.ts index b4c5a1f170..a1ff61a7fa 100644 --- a/packages/cache/__tests__/options.test.ts +++ b/packages/cache/__tests__/options.test.ts @@ -11,6 +11,14 @@ const downloadConcurrency = 8 const timeoutInMs = 30000 const segmentTimeoutInMs = 600000 const lookupOnly = false +const compressionLevel = 6 + +afterEach(() => { + delete process.env.CACHE_UPLOAD_CONCURRENCY + delete process.env.CACHE_UPLOAD_CHUNK_SIZE + delete process.env.CACHE_COMPRESSION_LEVEL + delete process.env.SEGMENT_DOWNLOAD_TIMEOUT_MINS +}) test('getDownloadOptions sets defaults', async () => { const actualOptions = getDownloadOptions() @@ -44,7 +52,8 @@ test('getUploadOptions sets defaults', async () => { const expectedOptions: UploadOptions = { uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024, - useAzureSdk: false + useAzureSdk: false, + compressionLevel } const actualOptions = getUploadOptions() @@ -55,7 +64,8 @@ test('getUploadOptions overrides all settings', async () => { const expectedOptions: UploadOptions = { uploadConcurrency: 2, uploadChunkSize: 16 * 1024 * 1024, - useAzureSdk: true + useAzureSdk: true, + compressionLevel: 3 } const actualOptions = getUploadOptions(expectedOptions) @@ -67,11 +77,13 @@ test('env variables override all getUploadOptions settings', async () => { const expectedOptions: UploadOptions = { uploadConcurrency: 16, uploadChunkSize: 64 * 1024 * 1024, - useAzureSdk: true + useAzureSdk: true, + compressionLevel: 8 } process.env.CACHE_UPLOAD_CONCURRENCY = '16' process.env.CACHE_UPLOAD_CHUNK_SIZE = '64' + process.env.CACHE_COMPRESSION_LEVEL = '8' const actualOptions = getUploadOptions(expectedOptions) expect(actualOptions).toEqual(expectedOptions) @@ -81,16 +93,32 @@ test('env variables override all getUploadOptions settings but do not exceed cap const expectedOptions: UploadOptions = { uploadConcurrency: 32, uploadChunkSize: 128 * 1024 * 1024, - useAzureSdk: true + useAzureSdk: true, + compressionLevel: 9 } process.env.CACHE_UPLOAD_CONCURRENCY = '64' process.env.CACHE_UPLOAD_CHUNK_SIZE = '256' + process.env.CACHE_COMPRESSION_LEVEL = '12' const actualOptions = getUploadOptions(expectedOptions) expect(actualOptions).toEqual(expectedOptions) }) +test('compression level clamps and floors values', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024, + useAzureSdk: false, + compressionLevel: 0 + } + + process.env.CACHE_COMPRESSION_LEVEL = '-1.7' + + const actualOptions = getUploadOptions() + expect(actualOptions).toEqual(expectedOptions) +}) + test('getDownloadOptions overrides download timeout minutes', async () => { const expectedOptions: DownloadOptions = { useAzureSdk: false, diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index e0fd7f201b..7dd213580e 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -75,7 +75,8 @@ test('save with large cache outputs should fail', async () => { expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -127,7 +128,8 @@ test('save with large cache outputs should fail in GHES with error message', asy expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -175,7 +177,8 @@ test('save with large cache outputs should fail in GHES without error message', expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -277,7 +280,8 @@ test('save with server error should fail', async () => { expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1) @@ -324,14 +328,20 @@ test('save with valid inputs uploads a cache', async () => { expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledWith( cacheId, archiveFile, '', - undefined + expect.objectContaining({ + useAzureSdk: false, + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024, + compressionLevel: 6 + }) ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 1916e4f81c..ac353cff1f 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -143,7 +143,8 @@ test('save cache fails if a signedUploadURL was not passed', async () => { archiveSizeBytes: archiveFileSize, // These should always match useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, - uploadConcurrency: 8 + uploadConcurrency: 8, + compressionLevel: 6 } const createCacheEntryMock = jest @@ -178,7 +179,8 @@ test('save cache fails if a signedUploadURL was not passed', async () => { expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(saveCacheMock).toHaveBeenCalledWith( @@ -201,7 +203,8 @@ test('finalize save cache failure', async () => { archiveSizeBytes: archiveFileSize, // These should always match useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, - uploadConcurrency: 8 + uploadConcurrency: 8, + compressionLevel: 6 } const createCacheEntryMock = jest @@ -241,7 +244,8 @@ test('finalize save cache failure', async () => { expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(saveCacheMock).toHaveBeenCalledWith( @@ -275,7 +279,8 @@ test('save with valid inputs uploads a cache', async () => { archiveSizeBytes: archiveFileSize, // These should always match useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, - uploadConcurrency: 8 + uploadConcurrency: 8, + compressionLevel: 6 } jest @@ -316,7 +321,8 @@ test('save with valid inputs uploads a cache', async () => { expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ @@ -341,7 +347,8 @@ test('save with extremely large cache should succeed in v2 (no size limit)', asy archiveSizeBytes: archiveFileSize, useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, - uploadConcurrency: 8 + uploadConcurrency: 8, + compressionLevel: 6 } jest @@ -382,7 +389,8 @@ test('save with extremely large cache should succeed in v2 (no size limit)', asy expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ @@ -446,7 +454,8 @@ test('save with finalize cache entry failure and specific error message', async archiveSizeBytes: archiveFileSize, useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, - uploadConcurrency: 8 + uploadConcurrency: 8, + compressionLevel: 6 } const createCacheEntryMock = jest @@ -488,7 +497,8 @@ test('save with finalize cache entry failure and specific error message', async expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(saveCacheMock).toHaveBeenCalledWith( @@ -521,7 +531,8 @@ test('save with multiple large caches should succeed in v2 (testing 50GB)', asyn archiveSizeBytes: archiveFileSize, useAzureSdk: true, uploadChunkSize: 64 * 1024 * 1024, - uploadConcurrency: 8 + uploadConcurrency: 8, + compressionLevel: 6 } jest @@ -562,7 +573,8 @@ test('save with multiple large caches should succeed in v2 (testing 50GB)', asyn expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, - compression + compression, + 6 ) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ diff --git a/packages/cache/__tests__/tar.test.ts b/packages/cache/__tests__/tar.test.ts index 4145d9a946..0fe85090e3 100644 --- a/packages/cache/__tests__/tar.test.ts +++ b/packages/cache/__tests__/tar.test.ts @@ -11,8 +11,7 @@ import { } from '../src/internal/constants' import * as tar from '../src/internal/tar' import * as utils from '../src/internal/cacheUtils' -// eslint-disable-next-line @typescript-eslint/no-require-imports -import fs = require('fs') +import fs from 'fs' jest.mock('@actions/exec') jest.mock('@actions/io') @@ -233,13 +232,16 @@ test('zstd create tar', async () => { .concat(IS_MAC ? ['--delay-directory-restore'] : []) .concat([ '--use-compress-program', - IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + IS_WINDOWS ? '"zstd -T0 --long=30 -6"' : 'zstdmt --long=30 -6' ]) .join(' '), undefined, // args { cwd: archiveFolder, - env: expect.objectContaining(defaultEnv) + env: expect.objectContaining({ + ...defaultEnv, + ZSTD_CLEVEL: '6' + }) } ) }) @@ -285,21 +287,27 @@ test('zstd create tar with windows BSDtar', async () => { undefined, // args { cwd: archiveFolder, - env: expect.objectContaining(defaultEnv) + env: expect.objectContaining({ + ...defaultEnv, + ZSTD_CLEVEL: '6' + }) } ) expect(execMock).toHaveBeenNthCalledWith( 2, [ - 'zstd -T0 --long=30 --force -o', + 'zstd -T0 --long=30 --force -6 -o', CacheFilename.Zstd.replace(/\\/g, '/'), TarFilename.replace(/\\/g, '/') ].join(' '), undefined, // args { cwd: archiveFolder, - env: expect.objectContaining(defaultEnv) + env: expect.objectContaining({ + ...defaultEnv, + ZSTD_CLEVEL: '6' + }) } ) } @@ -340,11 +348,118 @@ test('gzip create tar', async () => { undefined, // args { cwd: archiveFolder, - env: expect.objectContaining(defaultEnv) + env: expect.objectContaining({ + ...defaultEnv, + GZIP: '-6' + }) } ) }) +test('compression level controls zstd archive size', async () => { + const sourceDirectories = ['a'] + + const runAndGetSize = async ( + level: number, + folder: string + ): Promise<{size: number; env?: NodeJS.ProcessEnv}> => { + await fs.promises.mkdir(folder, {recursive: true}) + + let capturedEnv: NodeJS.ProcessEnv | undefined + const execMock = jest + .spyOn(exec, 'exec') + .mockImplementation(async (_cmd, _args, options) => { + capturedEnv = options?.env + const target = path.join( + options?.cwd ?? '', + utils.getCacheFileName(CompressionMethod.Zstd) + ) + const size = Math.max(1, 2000 - level * 100) + await fs.promises.writeFile(target, Buffer.alloc(size, 1)) + return 0 + }) + + await tar.createTar( + folder, + sourceDirectories, + CompressionMethod.Zstd, + level + ) + execMock.mockRestore() + const {size} = await fs.promises.stat( + path.join(folder, utils.getCacheFileName(CompressionMethod.Zstd)) + ) + + return {size, env: capturedEnv} + } + + const {size: size0, env: env0} = await runAndGetSize( + 0, + path.join(getTempDir(), 'zstd-0') + ) + const {size: size9, env: env9} = await runAndGetSize( + 9, + path.join(getTempDir(), 'zstd-9') + ) + + expect(size0).toBe(2000) + expect(size9).toBe(1100) + expect(env0?.ZSTD_CLEVEL).toBe('1') + expect(env9?.ZSTD_CLEVEL).toBe('9') +}) + +test('compression level controls gzip archive size', async () => { + const sourceDirectories = ['a'] + + const runAndGetSize = async ( + level: number, + folder: string + ): Promise<{size: number; env?: NodeJS.ProcessEnv}> => { + await fs.promises.mkdir(folder, {recursive: true}) + + let capturedEnv: NodeJS.ProcessEnv | undefined + const execMock = jest + .spyOn(exec, 'exec') + .mockImplementation(async (_cmd, _args, options) => { + capturedEnv = options?.env + const target = path.join( + options?.cwd ?? '', + utils.getCacheFileName(CompressionMethod.Gzip) + ) + const size = Math.max(1, 2000 - level * 100) + await fs.promises.writeFile(target, Buffer.alloc(size, 1)) + return 0 + }) + + await tar.createTar( + folder, + sourceDirectories, + CompressionMethod.Gzip, + level + ) + execMock.mockRestore() + const {size} = await fs.promises.stat( + path.join(folder, utils.getCacheFileName(CompressionMethod.Gzip)) + ) + + return {size, env: capturedEnv} + } + + const {size: size0, env: env0} = await runAndGetSize( + 0, + path.join(getTempDir(), 'gzip-0') + ) + const {size: size9, env: env9} = await runAndGetSize( + 9, + path.join(getTempDir(), 'gzip-9') + ) + + expect(size0).toBe(2000) + expect(size9).toBe(1100) + expect(env0?.GZIP).toBe('-0') + expect(env9?.GZIP).toBe('-9') +}) + test('zstd list tar', async () => { const execMock = jest.spyOn(exec, 'exec') diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1cc910f0a9..e4c7269693 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -4,7 +4,7 @@ import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' +import {DownloadOptions, UploadOptions, getUploadOptions} from './options' import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, @@ -337,7 +337,7 @@ async function restoreCacheV2( if (typedError.name === ValidationError.name) { throw error } else { - // Supress all non-validation cache related errors because caching should be optional + // Suppress all non-validation cache related errors because caching should be optional // Log server errors (5xx) as errors, all other errors as warnings if ( typedError instanceof HttpClientError && @@ -406,6 +406,7 @@ async function saveCacheV1( enableCrossOsArchive = false ): Promise { const compressionMethod = await utils.getCompressionMethod() + const uploadOptions = getUploadOptions(options) let cacheId = -1 const cachePaths = await utils.resolvePaths(paths) @@ -427,7 +428,12 @@ async function saveCacheV1( core.debug(`Archive Path: ${archivePath}`) try { - await createTar(archiveFolder, cachePaths, compressionMethod) + await createTar( + archiveFolder, + cachePaths, + compressionMethod, + uploadOptions.compressionLevel + ) if (core.isDebug()) { await listTar(archivePath, compressionMethod) } @@ -471,7 +477,7 @@ async function saveCacheV1( } core.debug(`Saving Cache (ID: ${cacheId})`) - await cacheHttpClient.saveCache(cacheId, archivePath, '', options) + await cacheHttpClient.saveCache(cacheId, archivePath, '', uploadOptions) } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { @@ -520,12 +526,12 @@ async function saveCacheV2( // Override UploadOptions to force the use of Azure // ...options goes first because we want to override the default values // set in UploadOptions with these specific figures - options = { + const uploadOptions = getUploadOptions({ ...options, uploadChunkSize: 64 * 1024 * 1024, // 64 MiB uploadConcurrency: 8, // 8 workers for parallel upload useAzureSdk: true - } + }) const compressionMethod = await utils.getCompressionMethod() const twirpClient = cacheTwirpClient.internalCacheTwirpClient() let cacheId = -1 @@ -549,7 +555,12 @@ async function saveCacheV2( core.debug(`Archive Path: ${archivePath}`) try { - await createTar(archiveFolder, cachePaths, compressionMethod) + await createTar( + archiveFolder, + cachePaths, + compressionMethod, + uploadOptions.compressionLevel + ) if (core.isDebug()) { await listTar(archivePath, compressionMethod) } @@ -558,7 +569,7 @@ async function saveCacheV2( core.debug(`File Size: ${archiveFileSize}`) // Set the archive size in the options, will be used to display the upload progress - options.archiveSizeBytes = archiveFileSize + uploadOptions.archiveSizeBytes = archiveFileSize core.debug('Reserving Cache') const version = utils.getCacheVersion( @@ -594,7 +605,7 @@ async function saveCacheV2( cacheId, archivePath, signedUploadUrl, - options + uploadOptions ) const finalizeRequest: FinalizeCacheEntryUploadRequest = { diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 2470555bb1..910f0cd76e 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -107,7 +107,7 @@ export async function getCacheEntry( const cacheResult = response.result const cacheDownloadUrl = cacheResult?.archiveLocation if (!cacheDownloadUrl) { - // Cache achiveLocation not found. This should never happen, and hence bail out. + // Cache archiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.') } core.setSecret(cacheDownloadUrl) @@ -347,7 +347,7 @@ export async function saveCache( await uploadFile(httpClient, cacheId, archivePath, options) // Commit Cache - core.debug('Commiting cache') + core.debug('Committing cache') const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( `Cache Size: ~${Math.round( diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts index adf610694f..6c4da656d3 100644 --- a/packages/cache/src/internal/tar.ts +++ b/packages/cache/src/internal/tar.ts @@ -13,6 +13,15 @@ import { } from './constants' const IS_WINDOWS = process.platform === 'win32' +const DEFAULT_COMPRESSION_LEVEL = 6 + +function normalizeCompressionLevel(level?: number): number { + if (typeof level !== 'number' || !isFinite(level)) { + return DEFAULT_COMPRESSION_LEVEL + } + + return Math.min(9, Math.max(0, Math.floor(level))) +} // Returns tar path and type: BSD or GNU async function getTarPath(): Promise { @@ -61,7 +70,7 @@ async function getTarArgs( const cacheFileName = utils.getCacheFileName(compressionMethod) const tarFile = 'cache.tar' const workingDirectory = getWorkingDirectory() - // Speficic args for BSD tar on windows for workaround + // Specific args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && compressionMethod !== CompressionMethod.Gzip && @@ -128,10 +137,13 @@ async function getTarArgs( async function getCommands( compressionMethod: CompressionMethod, type: string, - archivePath = '' + archivePath = '', + compressionLevel = DEFAULT_COMPRESSION_LEVEL ): Promise { let args + const normalizedCompressionLevel = normalizeCompressionLevel(compressionLevel) + const tarPath = await getTarPath() const tarArgs = await getTarArgs( tarPath, @@ -142,7 +154,11 @@ async function getCommands( const compressionArgs = type !== 'create' ? await getDecompressionProgram(tarPath, compressionMethod, archivePath) - : await getCompressionProgram(tarPath, compressionMethod) + : await getCompressionProgram( + tarPath, + compressionMethod, + normalizedCompressionLevel + ) const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && compressionMethod !== CompressionMethod.Gzip && @@ -212,9 +228,12 @@ async function getDecompressionProgram( // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. async function getCompressionProgram( tarPath: ArchiveTool, - compressionMethod: CompressionMethod + compressionMethod: CompressionMethod, + compressionLevel = DEFAULT_COMPRESSION_LEVEL ): Promise { const cacheFileName = utils.getCacheFileName(compressionMethod) + const normalizedCompressionLevel = normalizeCompressionLevel(compressionLevel) + const zstdCompressionLevel = Math.max(1, normalizedCompressionLevel) const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && compressionMethod !== CompressionMethod.Gzip && @@ -223,34 +242,49 @@ async function getCompressionProgram( case CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ - 'zstd -T0 --long=30 --force -o', + `zstd -T0 --long=30 --force -${zstdCompressionLevel} -o`, cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), TarFilename ] : [ '--use-compress-program', - IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + IS_WINDOWS + ? `"zstd -T0 --long=30 -${zstdCompressionLevel}"` + : `zstdmt --long=30 -${zstdCompressionLevel}` ] case CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ - 'zstd -T0 --force -o', + `zstd -T0 --force -${zstdCompressionLevel} -o`, cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), TarFilename ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'] + : [ + '--use-compress-program', + IS_WINDOWS + ? `"zstd -T0 -${zstdCompressionLevel}"` + : `zstdmt -${zstdCompressionLevel}` + ] default: return ['-z'] } } // Executes all commands as separate processes -async function execCommands(commands: string[], cwd?: string): Promise { +async function execCommands( + commands: string[], + cwd?: string, + extraEnv?: NodeJS.ProcessEnv +): Promise { for (const command of commands) { try { await exec(command, undefined, { cwd, - env: {...(process.env as object), MSYS: 'winsymlinks:nativestrict'} + env: { + ...(process.env as object), + MSYS: 'winsymlinks:nativestrict', + ...extraEnv + } }) } catch (error) { throw new Error( @@ -285,13 +319,39 @@ export async function extractTar( export async function createTar( archiveFolder: string, sourceDirectories: string[], - compressionMethod: CompressionMethod + compressionMethod: CompressionMethod, + compressionLevel = DEFAULT_COMPRESSION_LEVEL ): Promise { + const normalizedCompressionLevel = normalizeCompressionLevel(compressionLevel) // Write source directories to manifest.txt to avoid command length limits writeFileSync( path.join(archiveFolder, ManifestFilename), sourceDirectories.join('\n') ) - const commands = await getCommands(compressionMethod, 'create') - await execCommands(commands, archiveFolder) + const commands = await getCommands( + compressionMethod, + 'create', + '', + normalizedCompressionLevel + ) + const compressionEnv = getCompressionEnv( + compressionMethod, + normalizedCompressionLevel + ) + await execCommands(commands, archiveFolder, compressionEnv) +} + +function getCompressionEnv( + compressionMethod: CompressionMethod, + compressionLevel: number +): NodeJS.ProcessEnv | undefined { + switch (compressionMethod) { + case CompressionMethod.Gzip: + return {GZIP: `-${compressionLevel}`} + case CompressionMethod.Zstd: + case CompressionMethod.ZstdWithoutLong: + return {ZSTD_CLEVEL: `${Math.max(1, compressionLevel)}`} + default: + return undefined + } } diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index 3e4063f279..58b647ab18 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -24,6 +24,14 @@ export interface UploadOptions { * @default 32MB */ uploadChunkSize?: number + /** + * Compression level to use when creating the cache archive + * + * Range: 0 (no compression) to 9 (maximum compression) + * + * @default 6 + */ + compressionLevel?: number /** * Archive size in bytes */ @@ -92,7 +100,8 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { const result: UploadOptions = { useAzureSdk: false, uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 + uploadChunkSize: 32 * 1024 * 1024, + compressionLevel: 6 } if (copy) { @@ -107,6 +116,10 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { if (typeof copy.uploadChunkSize === 'number') { result.uploadChunkSize = copy.uploadChunkSize } + + if (typeof copy.compressionLevel === 'number') { + result.compressionLevel = copy.compressionLevel + } } /** @@ -128,9 +141,21 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { ) : result.uploadChunkSize + // Clamp the compression level between 0 and 9 + const envCompressionLevel = Number(process.env['CACHE_COMPRESSION_LEVEL']) + if (!isNaN(envCompressionLevel)) { + result.compressionLevel = envCompressionLevel + } + const normalizedCompressionLevel = Math.min( + 9, + Math.max(0, Math.floor(result.compressionLevel ?? 6)) + ) + result.compressionLevel = normalizedCompressionLevel + core.debug(`Use Azure SDK: ${result.useAzureSdk}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`) core.debug(`Upload chunk size: ${result.uploadChunkSize}`) + core.debug(`Compression level: ${result.compressionLevel}`) return result } From 897cd99e447303452e8bbae568bad9021daa347c Mon Sep 17 00:00:00 2001 From: Stephen Hodgson Date: Sun, 14 Dec 2025 16:36:55 -0500 Subject: [PATCH 2/4] no compression as plain tar --- packages/artifact/package-lock.json | 4 +-- packages/cache/README.md | 2 +- packages/cache/__tests__/tar.test.ts | 44 +++++++++++++++++++++++ packages/cache/package-lock.json | 4 +-- packages/cache/src/cache.ts | 8 +++-- packages/cache/src/internal/cacheUtils.ts | 39 ++++++++++++++++---- packages/cache/src/internal/constants.ts | 6 ++-- packages/cache/src/internal/tar.ts | 38 ++++++++++++-------- 8 files changed, 115 insertions(+), 30 deletions(-) diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json index 18a29ce111..d46ce5a562 100644 --- a/packages/artifact/package-lock.json +++ b/packages/artifact/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/artifact", - "version": "5.0.0", + "version": "5.0.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@actions/artifact", - "version": "5.0.0", + "version": "5.0.1", "license": "MIT", "dependencies": { "@actions/core": "^2.0.0", diff --git a/packages/cache/README.md b/packages/cache/README.md index 5102dec2a8..09847321e8 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -38,7 +38,7 @@ const key = 'npm-foobar-d5ea0750' const cacheId = await cache.saveCache(paths, key) ``` -You can control archive compression when saving. Provide `compressionLevel` in `UploadOptions` (0 = no compression, 9 = maximum, default = 6) or set the `CACHE_COMPRESSION_LEVEL` environment variable: +You can control archive compression when saving. Provide `compressionLevel` in `UploadOptions` (0 = no compression/plain tar, 9 = maximum, default = 6) or set the `CACHE_COMPRESSION_LEVEL` environment variable: ```js const cacheId = await cache.saveCache(paths, key, {compressionLevel: 3}) diff --git a/packages/cache/__tests__/tar.test.ts b/packages/cache/__tests__/tar.test.ts index 0fe85090e3..daa651df20 100644 --- a/packages/cache/__tests__/tar.test.ts +++ b/packages/cache/__tests__/tar.test.ts @@ -356,6 +356,50 @@ test('gzip create tar', async () => { ) }) +test('tar create without compression', async () => { + const execMock = jest.spyOn(exec, 'exec') + + const archiveFolder = getTempDir() + const workspace = process.env['GITHUB_WORKSPACE'] + const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`] + + await fs.promises.mkdir(archiveFolder, {recursive: true}) + + await tar.createTar( + archiveFolder, + sourceDirectories, + CompressionMethod.Tar, + 0 + ) + + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath + + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + [ + `"${tarPath}"`, + '--posix', + '-cf', + TarFilename.replace(/\\/g, '/'), + '--exclude', + TarFilename.replace(/\\/g, '/'), + '-P', + '-C', + IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace, + '--files-from', + ManifestFilename + ] + .concat(IS_WINDOWS ? ['--force-local'] : []) + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .join(' '), + undefined, + { + cwd: archiveFolder, + env: expect.objectContaining(defaultEnv) + } + ) +}) + test('compression level controls zstd archive size', async () => { const sourceDirectories = ['a'] diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 2fc455fcfa..5b6bc5fcfc 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/cache", - "version": "5.0.0", + "version": "5.0.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@actions/cache", - "version": "5.0.0", + "version": "5.0.1", "license": "MIT", "dependencies": { "@actions/core": "^2.0.0", diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index e4c7269693..85ae86e6c6 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -405,8 +405,10 @@ async function saveCacheV1( options?: UploadOptions, enableCrossOsArchive = false ): Promise { - const compressionMethod = await utils.getCompressionMethod() const uploadOptions = getUploadOptions(options) + const compressionMethod = await utils.getCompressionMethod( + uploadOptions.compressionLevel + ) let cacheId = -1 const cachePaths = await utils.resolvePaths(paths) @@ -532,7 +534,9 @@ async function saveCacheV2( uploadConcurrency: 8, // 8 workers for parallel upload useAzureSdk: true }) - const compressionMethod = await utils.getCompressionMethod() + const compressionMethod = await utils.getCompressionMethod( + uploadOptions.compressionLevel + ) const twirpClient = cacheTwirpClient.internalCacheTwirpClient() let cacheId = -1 diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index de9053eae0..c813561e64 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -10,7 +10,8 @@ import * as util from 'util' import { CacheFilename, CompressionMethod, - GnuTarPathOnWindows + GnuTarPathOnWindows, + TarFilename } from './constants' const versionSalt = '1.0' @@ -98,8 +99,30 @@ async function getVersion( return versionOutput } -// Use zstandard if possible to maximize cache performance -export async function getCompressionMethod(): Promise { +function normalizeCompressionLevel(level?: number): number { + if (typeof level === 'number' && isFinite(level)) { + return Math.min(9, Math.max(0, Math.floor(level))) + } + + const envCompressionLevel = Number(process.env['CACHE_COMPRESSION_LEVEL']) + if (!isNaN(envCompressionLevel)) { + return Math.min(9, Math.max(0, Math.floor(envCompressionLevel))) + } + + return 6 +} + +// Use zstandard if possible to maximize cache performance. When compression +// level is explicitly 0, skip compression and create a plain tar archive. +export async function getCompressionMethod( + compressionLevel?: number +): Promise { + const normalizedCompressionLevel = normalizeCompressionLevel(compressionLevel) + if (normalizedCompressionLevel === 0) { + core.debug('Compression level 0 detected; using uncompressed tar') + return CompressionMethod.Tar + } + const versionOutput = await getVersion('zstd', ['--quiet']) const version = semver.clean(versionOutput) core.debug(`zstd version: ${version}`) @@ -112,9 +135,13 @@ export async function getCompressionMethod(): Promise { } export function getCacheFileName(compressionMethod: CompressionMethod): string { - return compressionMethod === CompressionMethod.Gzip - ? CacheFilename.Gzip - : CacheFilename.Zstd + if (compressionMethod === CompressionMethod.Gzip) { + return CacheFilename.Gzip + } + if (compressionMethod === CompressionMethod.Tar) { + return TarFilename + } + return CacheFilename.Zstd } export async function getGnuTarPathOnWindows(): Promise { diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index 8c5d1ee440..7660610e84 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -1,6 +1,7 @@ export enum CacheFilename { Gzip = 'cache.tgz', - Zstd = 'cache.tzst' + Zstd = 'cache.tzst', + Tar = 'cache.tar' } export enum CompressionMethod { @@ -8,7 +9,8 @@ export enum CompressionMethod { // Long range mode was added to zstd in v1.3.2. // This enum is for earlier version of zstd that does not have --long support ZstdWithoutLong = 'zstd-without-long', - Zstd = 'zstd' + Zstd = 'zstd', + Tar = 'tar' } export enum ArchiveToolType { diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts index 6c4da656d3..483a9d25f8 100644 --- a/packages/cache/src/internal/tar.ts +++ b/packages/cache/src/internal/tar.ts @@ -73,8 +73,9 @@ async function getTarArgs( // Specific args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && - compressionMethod !== CompressionMethod.Gzip && - IS_WINDOWS + IS_WINDOWS && + (compressionMethod === CompressionMethod.Zstd || + compressionMethod === CompressionMethod.ZstdWithoutLong) // Method specific args switch (type) { @@ -140,8 +141,6 @@ async function getCommands( archivePath = '', compressionLevel = DEFAULT_COMPRESSION_LEVEL ): Promise { - let args - const normalizedCompressionLevel = normalizeCompressionLevel(compressionLevel) const tarPath = await getTarPath() @@ -161,20 +160,22 @@ async function getCommands( ) const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && - compressionMethod !== CompressionMethod.Gzip && - IS_WINDOWS + IS_WINDOWS && + (compressionMethod === CompressionMethod.Zstd || + compressionMethod === CompressionMethod.ZstdWithoutLong) - if (BSD_TAR_ZSTD && type !== 'create') { - args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')] - } else { - args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')] - } + const commandParts = + BSD_TAR_ZSTD && type !== 'create' + ? [[...compressionArgs].join(' '), [...tarArgs].join(' ')] + : [[...tarArgs].join(' '), [...compressionArgs].join(' ')] + + const commands = commandParts.filter(part => part.trim().length > 0) if (BSD_TAR_ZSTD) { - return args + return commands } - return [args.join(' ')] + return [commands.join(' ')] } function getWorkingDirectory(): string { @@ -193,8 +194,9 @@ async function getDecompressionProgram( // Using 30 here because we also support 32-bit self-hosted runners. const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && - compressionMethod !== CompressionMethod.Gzip && - IS_WINDOWS + IS_WINDOWS && + (compressionMethod === CompressionMethod.Zstd || + compressionMethod === CompressionMethod.ZstdWithoutLong) switch (compressionMethod) { case CompressionMethod.Zstd: return BSD_TAR_ZSTD @@ -215,6 +217,8 @@ async function getDecompressionProgram( archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'] + case CompressionMethod.Tar: + return [] default: return ['-z'] } @@ -265,6 +269,8 @@ async function getCompressionProgram( ? `"zstd -T0 -${zstdCompressionLevel}"` : `zstdmt -${zstdCompressionLevel}` ] + case CompressionMethod.Tar: + return [] default: return ['-z'] } @@ -351,6 +357,8 @@ function getCompressionEnv( case CompressionMethod.Zstd: case CompressionMethod.ZstdWithoutLong: return {ZSTD_CLEVEL: `${Math.max(1, compressionLevel)}`} + case CompressionMethod.Tar: + return undefined default: return undefined } From e82e5d7762c449f05b7a8ca9f8db6c2486a1dfe0 Mon Sep 17 00:00:00 2001 From: Stephen Hodgson Date: Sun, 14 Dec 2025 16:44:46 -0500 Subject: [PATCH 3/4] PR feedback --- packages/cache/src/options.ts | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index 58b647ab18..b45a9624da 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -142,15 +142,17 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { : result.uploadChunkSize // Clamp the compression level between 0 and 9 - const envCompressionLevel = Number(process.env['CACHE_COMPRESSION_LEVEL']) - if (!isNaN(envCompressionLevel)) { - result.compressionLevel = envCompressionLevel - } - const normalizedCompressionLevel = Math.min( - 9, - Math.max(0, Math.floor(result.compressionLevel ?? 6)) + result.compressionLevel = !isNaN( + Number(process.env['CACHE_COMPRESSION_LEVEL']) ) - result.compressionLevel = normalizedCompressionLevel + ? Math.min( + 9, + Math.max( + 0, + Math.floor(Number(process.env['CACHE_COMPRESSION_LEVEL'])) + ) + ) + : Math.min(9, Math.max(0, Math.floor(result.compressionLevel ?? 6))) core.debug(`Use Azure SDK: ${result.useAzureSdk}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`) From eda4d47bb50172af991fa745e73ee6576e5de0bf Mon Sep 17 00:00:00 2001 From: Stephen Hodgson Date: Sun, 14 Dec 2025 16:48:43 -0500 Subject: [PATCH 4/4] update clamp --- packages/cache/src/internal/tar.ts | 2 ++ packages/cache/src/options.ts | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts index 483a9d25f8..a1ea70e301 100644 --- a/packages/cache/src/internal/tar.ts +++ b/packages/cache/src/internal/tar.ts @@ -237,6 +237,8 @@ async function getCompressionProgram( ): Promise { const cacheFileName = utils.getCacheFileName(compressionMethod) const normalizedCompressionLevel = normalizeCompressionLevel(compressionLevel) + // zstd treats level 0 as invalid (gzip supports it), so clamp zstd to 1+ to avoid errors + // See: https://github.com/facebook/zstd/issues/1680 const zstdCompressionLevel = Math.max(1, normalizedCompressionLevel) const BSD_TAR_ZSTD = tarPath.type === ArchiveToolType.BSD && diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index b45a9624da..3f2edec349 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -142,17 +142,17 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { : result.uploadChunkSize // Clamp the compression level between 0 and 9 - result.compressionLevel = !isNaN( - Number(process.env['CACHE_COMPRESSION_LEVEL']) - ) - ? Math.min( - 9, - Math.max( - 0, - Math.floor(Number(process.env['CACHE_COMPRESSION_LEVEL'])) - ) + result.compressionLevel = Math.min( + 9, + Math.max( + 0, + Math.floor( + !isNaN(Number(process.env['CACHE_COMPRESSION_LEVEL'])) + ? Number(process.env['CACHE_COMPRESSION_LEVEL']) + : result.compressionLevel ?? 6 ) - : Math.min(9, Math.max(0, Math.floor(result.compressionLevel ?? 6))) + ) + ) core.debug(`Use Azure SDK: ${result.useAzureSdk}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`)