diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 6b62621a18..8c223911c6 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -1337,14 +1337,14 @@ var require_util = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path18 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path19 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path18 && path18[0] !== "/") { - path18 = `/${path18}`; + if (path19 && path19[0] !== "/") { + path19 = `/${path19}`; } - return new URL(`${origin}${path18}`); + return new URL(`${origin}${path19}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -1795,39 +1795,39 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path18, + path19, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path18); + debuglog("trailers received from %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path18, + path19, error3.message ); }); @@ -1876,9 +1876,9 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -1941,7 +1941,7 @@ var require_request = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path18, + path: path19, method, body, headers, @@ -1956,11 +1956,11 @@ var require_request = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path18 !== "string") { + if (typeof path19 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path18[0] !== "/" && !(path18.startsWith("http://") || path18.startsWith("https://")) && method !== "CONNECT") { + } else if (path19[0] !== "/" && !(path19.startsWith("http://") || path19.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path18)) { + } else if (invalidPathRegex.test(path19)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -2023,7 +2023,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path18, query) : path18; + this.path = query ? buildURL(path19, query) : path19; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6536,7 +6536,7 @@ var require_client_h1 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path18, host, upgrade, blocking, reset } = request2; + const { method, path: path19, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -6602,7 +6602,7 @@ var require_client_h1 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path18} HTTP/1.1\r + let header = `${method} ${path19} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -7128,7 +7128,7 @@ var require_client_h2 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path18, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path19, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -7195,7 +7195,7 @@ var require_client_h2 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path18; + headers[HTTP2_HEADER_PATH] = path19; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -7548,9 +7548,9 @@ var require_redirect_handler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path18 = search ? `${pathname}${search}` : pathname; + const path19 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path18; + this.opts.path = path19; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -8784,10 +8784,10 @@ var require_proxy_agent = __commonJS({ }; const { origin, - path: path18 = "/", + path: path19 = "/", headers = {} } = opts; - opts.path = origin + path18; + opts.path = origin + path19; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -10708,20 +10708,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path18) { - if (typeof path18 !== "string") { - return path18; + function safeUrl(path19) { + if (typeof path19 !== "string") { + return path19; } - const pathSegments = path18.split("?"); + const pathSegments = path19.split("?"); if (pathSegments.length !== 2) { - return path18; + return path19; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path18, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path18); + function matchKey(mockDispatch2, { path: path19, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path19); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10743,7 +10743,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path18 }) => matchValue(safeUrl(path18), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path19 }) => matchValue(safeUrl(path19), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10781,9 +10781,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path18, method, body, headers, query } = opts; + const { path: path19, method, body, headers, query } = opts; return { - path: path18, + path: path19, method, body, headers, @@ -11246,10 +11246,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path18, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path19, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path18, + Path: path19, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -16130,9 +16130,9 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path18) { - for (let i = 0; i < path18.length; ++i) { - const code = path18.charCodeAt(i); + function validateCookiePath(path19) { + for (let i = 0; i < path19.length; ++i) { + const code = path19.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -18726,11 +18726,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path18 = opts.path; + let path19 = opts.path; if (!opts.path.startsWith("/")) { - path18 = `/${path18}`; + path19 = `/${path19}`; } - url2 = new URL(util.parseOrigin(url2).origin + path18); + url2 = new URL(util.parseOrigin(url2).origin + path19); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -20033,7 +20033,7 @@ var require_path_utils = __commonJS({ exports2.toPosixPath = toPosixPath; exports2.toWin32Path = toWin32Path; exports2.toPlatformPath = toPlatformPath; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -20041,7 +20041,7 @@ var require_path_utils = __commonJS({ return pth.replace(/[/]/g, "\\"); } function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path18.sep); + return pth.replace(/[/\\]/g, path19.sep); } } }); @@ -20124,7 +20124,7 @@ var require_io_util = __commonJS({ exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; var fs19 = __importStar2(require("fs")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { @@ -20179,7 +20179,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path18.extname(filePath).toUpperCase(); + const upperExt = path19.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -20203,11 +20203,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path18.dirname(filePath); - const upperName = path18.basename(filePath).toUpperCase(); + const directory = path19.dirname(filePath); + const upperName = path19.basename(filePath).toUpperCase(); for (const actualName of yield (0, exports2.readdir)(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path18.join(directory, actualName); + filePath = path19.join(directory, actualName); break; } } @@ -20319,7 +20319,7 @@ var require_io = __commonJS({ exports2.which = which7; exports2.findInPath = findInPath; var assert_1 = require("assert"); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util()); function cp(source_1, dest_1) { return __awaiter2(this, arguments, void 0, function* (source, dest, options = {}) { @@ -20328,7 +20328,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path18.join(dest, path18.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path19.join(dest, path19.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -20340,7 +20340,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path18.relative(source, newDest) === "") { + if (path19.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -20352,7 +20352,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path18.join(dest, path18.basename(source)); + dest = path19.join(dest, path19.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -20363,7 +20363,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path18.dirname(dest)); + yield mkdirP(path19.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -20422,7 +20422,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path18.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path19.delimiter)) { if (extension) { extensions.push(extension); } @@ -20435,12 +20435,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path18.sep)) { + if (tool.includes(path19.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path18.delimiter)) { + for (const p of process.env.PATH.split(path19.delimiter)) { if (p) { directories.push(p); } @@ -20448,7 +20448,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path18.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path19.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -20578,7 +20578,7 @@ var require_toolrunner = __commonJS({ var os4 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var io7 = __importStar2(require_io()); var ioUtil = __importStar2(require_io_util()); var timers_1 = require("timers"); @@ -20793,7 +20793,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path18.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path19.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -21346,7 +21346,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -21372,7 +21372,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path18.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path19.delimiter}${process.env["PATH"]}`; } function getInput2(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; @@ -21509,8 +21509,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path18 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path18} does not exist${os_1.EOL}`); + const path19 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path19} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -22335,14 +22335,14 @@ var require_util9 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path18 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path19 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path18 && path18[0] !== "/") { - path18 = `/${path18}`; + if (path19 && path19[0] !== "/") { + path19 = `/${path19}`; } - return new URL(`${origin}${path18}`); + return new URL(`${origin}${path19}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -22793,39 +22793,39 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path18, + path19, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path18); + debuglog("trailers received from %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path18, + path19, error3.message ); }); @@ -22874,9 +22874,9 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -22939,7 +22939,7 @@ var require_request3 = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path18, + path: path19, method, body, headers, @@ -22954,11 +22954,11 @@ var require_request3 = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path18 !== "string") { + if (typeof path19 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path18[0] !== "/" && !(path18.startsWith("http://") || path18.startsWith("https://")) && method !== "CONNECT") { + } else if (path19[0] !== "/" && !(path19.startsWith("http://") || path19.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path18)) { + } else if (invalidPathRegex.test(path19)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -23021,7 +23021,7 @@ var require_request3 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path18, query) : path18; + this.path = query ? buildURL(path19, query) : path19; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -27534,7 +27534,7 @@ var require_client_h12 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path18, host, upgrade, blocking, reset } = request2; + const { method, path: path19, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -27600,7 +27600,7 @@ var require_client_h12 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path18} HTTP/1.1\r + let header = `${method} ${path19} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -28126,7 +28126,7 @@ var require_client_h22 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path18, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path19, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -28193,7 +28193,7 @@ var require_client_h22 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path18; + headers[HTTP2_HEADER_PATH] = path19; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -28546,9 +28546,9 @@ var require_redirect_handler2 = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path18 = search ? `${pathname}${search}` : pathname; + const path19 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path18; + this.opts.path = path19; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -29782,10 +29782,10 @@ var require_proxy_agent2 = __commonJS({ }; const { origin, - path: path18 = "/", + path: path19 = "/", headers = {} } = opts; - opts.path = origin + path18; + opts.path = origin + path19; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -31706,20 +31706,20 @@ var require_mock_utils2 = __commonJS({ } return true; } - function safeUrl(path18) { - if (typeof path18 !== "string") { - return path18; + function safeUrl(path19) { + if (typeof path19 !== "string") { + return path19; } - const pathSegments = path18.split("?"); + const pathSegments = path19.split("?"); if (pathSegments.length !== 2) { - return path18; + return path19; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path18, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path18); + function matchKey(mockDispatch2, { path: path19, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path19); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -31741,7 +31741,7 @@ var require_mock_utils2 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path18 }) => matchValue(safeUrl(path18), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path19 }) => matchValue(safeUrl(path19), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -31779,9 +31779,9 @@ var require_mock_utils2 = __commonJS({ } } function buildKey(opts) { - const { path: path18, method, body, headers, query } = opts; + const { path: path19, method, body, headers, query } = opts; return { - path: path18, + path: path19, method, body, headers, @@ -32244,10 +32244,10 @@ var require_pending_interceptors_formatter2 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path18, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path19, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path18, + Path: path19, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -37128,9 +37128,9 @@ var require_util14 = __commonJS({ } } } - function validateCookiePath(path18) { - for (let i = 0; i < path18.length; ++i) { - const code = path18.charCodeAt(i); + function validateCookiePath(path19) { + for (let i = 0; i < path19.length; ++i) { + const code = path19.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -39724,11 +39724,11 @@ var require_undici2 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path18 = opts.path; + let path19 = opts.path; if (!opts.path.startsWith("/")) { - path18 = `/${path18}`; + path19 = `/${path19}`; } - url2 = new URL(util.parseOrigin(url2).origin + path18); + url2 = new URL(util.parseOrigin(url2).origin + path19); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -46427,7 +46427,7 @@ var require_light = __commonJS({ } return this.Events.trigger("scheduled", { args: this.args, options: this.options }); } - async doExecute(chained, clearGlobalState, run3, free) { + async doExecute(chained, clearGlobalState, run2, free) { var error3, eventInfo, passed; if (this.retryCount === 0) { this._assertStatus("RUNNING"); @@ -46447,10 +46447,10 @@ var require_light = __commonJS({ } } catch (error1) { error3 = error1; - return this._onFailure(error3, eventInfo, clearGlobalState, run3, free); + return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); } } - doExpire(clearGlobalState, run3, free) { + doExpire(clearGlobalState, run2, free) { var error3, eventInfo; if (this._states.jobStatus(this.options.id === "RUNNING")) { this._states.next(this.options.id); @@ -46458,9 +46458,9 @@ var require_light = __commonJS({ this._assertStatus("EXECUTING"); eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; error3 = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); - return this._onFailure(error3, eventInfo, clearGlobalState, run3, free); + return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); } - async _onFailure(error3, eventInfo, clearGlobalState, run3, free) { + async _onFailure(error3, eventInfo, clearGlobalState, run2, free) { var retry2, retryAfter; if (clearGlobalState()) { retry2 = await this.Events.trigger("failed", error3, eventInfo); @@ -46468,7 +46468,7 @@ var require_light = __commonJS({ retryAfter = ~~retry2; this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); this.retryCount++; - return run3(retryAfter); + return run2(retryAfter); } else { this.doDone(eventInfo); await free(this.options, eventInfo); @@ -47106,17 +47106,17 @@ var require_light = __commonJS({ } } _run(index, job, wait) { - var clearGlobalState, free, run3; + var clearGlobalState, free, run2; job.doRun(); clearGlobalState = this._clearGlobalState.bind(this, index); - run3 = this._run.bind(this, index, job); + run2 = this._run.bind(this, index, job); free = this._free.bind(this, index, job); return this._scheduled[index] = { timeout: setTimeout(() => { - return job.doExecute(this._limiter, clearGlobalState, run3, free); + return job.doExecute(this._limiter, clearGlobalState, run2, free); }, wait), expiration: job.options.expiration != null ? setTimeout(function() { - return job.doExpire(clearGlobalState, run3, free); + return job.doExpire(clearGlobalState, run2, free); }, wait + job.options.expiration) : void 0, job }; @@ -47411,14 +47411,14 @@ var require_helpers = __commonJS({ "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path18, name, argument) { - if (Array.isArray(path18)) { - this.path = path18; - this.property = path18.reduce(function(sum, item) { + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path19, name, argument) { + if (Array.isArray(path19)) { + this.path = path19; + this.property = path19.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); - } else if (path18 !== void 0) { - this.property = path18; + } else if (path19 !== void 0) { + this.property = path19; } if (message) { this.message = message; @@ -47509,16 +47509,16 @@ var require_helpers = __commonJS({ name: { value: "SchemaError", enumerable: false } } ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path18, base, schemas) { + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path19, base, schemas) { this.schema = schema2; this.options = options; - if (Array.isArray(path18)) { - this.path = path18; - this.propertyPath = path18.reduce(function(sum, item) { + if (Array.isArray(path19)) { + this.path = path19; + this.propertyPath = path19.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); } else { - this.propertyPath = path18; + this.propertyPath = path19; } this.base = base; this.schemas = schemas; @@ -47527,10 +47527,10 @@ var require_helpers = __commonJS({ return uri.resolve(this.base, target); }; SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path18 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var path19 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); var id = schema2.$id || schema2.id; var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path18, base, Object.create(this.schemas)); + var ctx = new SchemaContext(schema2, this.options, path19, base, Object.create(this.schemas)); if (id && !ctx.schemas[base]) { ctx.schemas[base] = schema2; } @@ -48833,7 +48833,7 @@ var require_internal_path_helper = __commonJS({ exports2.hasRoot = hasRoot; exports2.normalizeSeparators = normalizeSeparators; exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname5(p) { @@ -48841,7 +48841,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path18.dirname(p); + let result = path19.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -48878,7 +48878,7 @@ var require_internal_path_helper = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path18.sep; + root += path19.sep; } return root + itemPath; } @@ -48912,10 +48912,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path18.sep)) { + if (!p.endsWith(path19.sep)) { return p; } - if (p === path18.sep) { + if (p === path19.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -49260,7 +49260,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path18 = (function() { + var path19 = (function() { try { return require("path"); } catch (e) { @@ -49268,7 +49268,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path18.sep; + minimatch.sep = path19.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand2 = require_brace_expansion(); var plTypes = { @@ -49357,8 +49357,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path18.sep !== "/") { - pattern = pattern.split(path18.sep).join("/"); + if (!options.allowWindowsEscape && path19.sep !== "/") { + pattern = pattern.split(path19.sep).join("/"); } this.options = options; this.set = []; @@ -49728,8 +49728,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path18.sep !== "/") { - f = f.split(path18.sep).join("/"); + if (path19.sep !== "/") { + f = f.split(path19.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -49875,7 +49875,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -49890,12 +49890,12 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path18.sep); + this.segments = itemPath.split(path19.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename2 = path18.basename(remaining); + const basename2 = path19.basename(remaining); this.segments.unshift(basename2); remaining = dir; dir = pathHelper.dirname(remaining); @@ -49913,7 +49913,7 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path18.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path19.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -49924,12 +49924,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path18.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path19.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path18.sep; + result += path19.sep; } result += this.segments[i]; } @@ -49987,7 +49987,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var minimatch_1 = require_minimatch(); @@ -50016,7 +50016,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path18.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path19.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -50040,8 +50040,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path18.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path18.sep}`; + if (!itemPath.endsWith(path19.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path19.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -50076,9 +50076,9 @@ var require_internal_pattern = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path18.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path19.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path18.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path19.sep}`)) { homedir = homedir || os4.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -50162,8 +50162,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path18, level) { - this.path = path18; + constructor(path19, level) { + this.path = path19; this.level = level; } }; @@ -50307,7 +50307,7 @@ var require_internal_globber = __commonJS({ var core17 = __importStar2(require_core()); var fs19 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -50383,7 +50383,7 @@ var require_internal_globber = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path18.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path19.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -50393,7 +50393,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path18.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path19.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50555,7 +50555,7 @@ var require_internal_hash_files = __commonJS({ var fs19 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function hashFiles2(globber_1, currentWorkspace_1) { return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; @@ -50571,7 +50571,7 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path18.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path19.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } @@ -51957,7 +51957,7 @@ var require_cacheUtils = __commonJS({ var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs19 = __importStar2(require("fs")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); var constants_1 = require_constants12(); @@ -51977,9 +51977,9 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path18.join(baseLocation, "actions", "temp"); + tempDirectory = path19.join(baseLocation, "actions", "temp"); } - const dest = path18.join(tempDirectory, crypto2.randomUUID()); + const dest = path19.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); @@ -52001,7 +52001,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path18.relative(workspace, file).replace(new RegExp(`\\${path18.sep}`, "g"), "/"); + const relativeFile = path19.relative(workspace, file).replace(new RegExp(`\\${path19.sep}`, "g"), "/"); core17.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -52528,13 +52528,13 @@ function __disposeResources(env) { } return next(); } -function __rewriteRelativeImportExtension(path18, preserveJsx) { - if (typeof path18 === "string" && /^\.\.?\//.test(path18)) { - return path18.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { +function __rewriteRelativeImportExtension(path19, preserveJsx) { + if (typeof path19 === "string" && /^\.\.?\//.test(path19)) { + return path19.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; }); } - return path18; + return path19; } var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; var init_tslib_es6 = __esm({ @@ -56948,8 +56948,8 @@ var require_getClient = __commonJS({ } const { allowInsecureConnection, httpClient } = clientOptions; const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path18, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path18, args, { allowInsecureConnection, ...requestOptions }); + const client = (path19, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path19, args, { allowInsecureConnection, ...requestOptions }); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); @@ -60820,15 +60820,15 @@ var require_urlHelpers2 = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path18 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path18.startsWith("/")) { - path18 = path18.substring(1); + let path19 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path19.startsWith("/")) { + path19 = path19.substring(1); } - if (isAbsoluteUrl(path18)) { - requestUrl = path18; + if (isAbsoluteUrl(path19)) { + requestUrl = path19; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path18); + requestUrl = appendPath(requestUrl, path19); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -60874,9 +60874,9 @@ var require_urlHelpers2 = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path18 = pathToAppend.substring(0, searchStart); + const path19 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path18; + newPath = newPath + path19; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -63127,10 +63127,10 @@ var require_utils_common = __commonJS({ var constants_js_1 = require_constants15(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 || "/"; - path18 = escape(path18); - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 || "/"; + path19 = escape(path19); + urlParsed.pathname = path19; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -63215,9 +63215,9 @@ var require_utils_common = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 ? path18.endsWith("/") ? `${path18}${name}` : `${path18}/${name}` : name; - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 ? path19.endsWith("/") ? `${path19}${name}` : `${path19}/${name}` : name; + urlParsed.pathname = path19; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -64444,9 +64444,9 @@ var require_StorageSharedKeyCredentialPolicy = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path18}`; + canonicalizedResourceString += `/${this.factory.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -65185,10 +65185,10 @@ var require_utils_common2 = __commonJS({ var constants_js_1 = require_constants16(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 || "/"; - path18 = escape(path18); - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 || "/"; + path19 = escape(path19); + urlParsed.pathname = path19; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -65273,9 +65273,9 @@ var require_utils_common2 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 ? path18.endsWith("/") ? `${path18}${name}` : `${path18}/${name}` : name; - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 ? path19.endsWith("/") ? `${path19}${name}` : `${path19}/${name}` : name; + urlParsed.pathname = path19; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -66196,9 +66196,9 @@ var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path18}`; + canonicalizedResourceString += `/${this.factory.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -66828,9 +66828,9 @@ var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path18}`; + canonicalizedResourceString += `/${options.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -67175,9 +67175,9 @@ var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path18}`; + canonicalizedResourceString += `/${options.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -88832,8 +88832,8 @@ var require_BlobBatch = __commonJS({ if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path18 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path18 || path18 === "") { + const path19 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path19 || path19 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -88911,8 +88911,8 @@ var require_BlobBatchClient = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path18 = (0, utils_common_js_1.getURLPath)(url2); - if (path18 && path18 !== "/") { + const path19 = (0, utils_common_js_1.getURLPath)(url2); + if (path19 && path19 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -98221,7 +98221,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io7 = __importStar2(require_io()); var fs_1 = require("fs"); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); var IS_WINDOWS = process.platform === "win32"; @@ -98267,13 +98267,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path18.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path19.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -98319,7 +98319,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -98328,7 +98328,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -98343,7 +98343,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -98352,7 +98352,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -98390,7 +98390,7 @@ var require_tar = __commonJS({ } function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path18.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path19.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -98472,7 +98472,7 @@ var require_cache5 = __commonJS({ exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; var core17 = __importStar2(require_core()); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); @@ -98567,7 +98567,7 @@ var require_cache5 = __commonJS({ core17.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path18.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core17.isDebug()) { @@ -98636,7 +98636,7 @@ var require_cache5 = __commonJS({ core17.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path18.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core17.debug(`Archive path: ${archivePath}`); core17.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -98698,7 +98698,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path18.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98762,7 +98762,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path18.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -99189,7 +99189,7 @@ var require_tool_cache = __commonJS({ var fs19 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var httpm = __importStar2(require_lib()); var semver9 = __importStar2(require_semver2()); var stream2 = __importStar2(require("stream")); @@ -99210,8 +99210,8 @@ var require_tool_cache = __commonJS({ var userAgent2 = "actions/tool-cache"; function downloadTool2(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - dest = dest || path18.join(_getTempDirectory(), crypto2.randomUUID()); - yield io7.mkdirP(path18.dirname(dest)); + dest = dest || path19.join(_getTempDirectory(), crypto2.randomUUID()); + yield io7.mkdirP(path19.dirname(dest)); core17.debug(`Downloading ${url2}`); core17.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -99301,7 +99301,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path18.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path19.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -99473,7 +99473,7 @@ var require_tool_cache = __commonJS({ } const destPath = yield _createToolPath(tool, version, arch2); for (const itemName of fs19.readdirSync(sourceDir)) { - const s = path18.join(sourceDir, itemName); + const s = path19.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch2); @@ -99490,7 +99490,7 @@ var require_tool_cache = __commonJS({ throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path18.join(destFolder, targetFile); + const destPath = path19.join(destFolder, targetFile); core17.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); @@ -99513,7 +99513,7 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path18.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path19.join(_getCacheDirectory(), toolName, versionSpec, arch2); core17.debug(`checking cache: ${cachePath}`); if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) { core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); @@ -99527,12 +99527,12 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch2) { const versions = []; arch2 = arch2 || os4.arch(); - const toolPath = path18.join(_getCacheDirectory(), toolName); + const toolPath = path19.join(_getCacheDirectory(), toolName); if (fs19.existsSync(toolPath)) { const children = fs19.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path18.join(toolPath, child, arch2 || ""); + const fullPath = path19.join(toolPath, child, arch2 || ""); if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) { versions.push(child); } @@ -99584,7 +99584,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter2(this, void 0, void 0, function* () { if (!dest) { - dest = path18.join(_getTempDirectory(), crypto2.randomUUID()); + dest = path19.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; @@ -99592,7 +99592,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { - const folderPath = path18.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); core17.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); @@ -99602,7 +99602,7 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch2) { - const folderPath = path18.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs19.writeFileSync(markerPath, ""); core17.debug("finished caching tool"); @@ -102384,13 +102384,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.validateArtifactName = validateArtifactName; - function validateFilePath(path18) { - if (!path18) { + function validateFilePath(path19) { + if (!path19) { throw new Error(`Provided file path input during validation is empty`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path18.includes(invalidCharacterKey)) { - throw new Error(`The path for one of the files in artifact is not valid: ${path18}. Contains the following character: ${errorMessageForCharacter} + if (path19.includes(invalidCharacterKey)) { + throw new Error(`The path for one of the files in artifact is not valid: ${path19}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -103291,8 +103291,8 @@ var require_minimatch2 = __commonJS({ return new Minimatch(pattern, options).match(p); }; module2.exports = minimatch; - var path18 = require_path(); - minimatch.sep = path18.sep; + var path19 = require_path(); + minimatch.sep = path19.sep; var GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); minimatch.GLOBSTAR = GLOBSTAR; var expand2 = require_brace_expansion2(); @@ -103802,8 +103802,8 @@ var require_minimatch2 = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; const options = this.options; - if (path18.sep !== "/") { - f = f.split(path18.sep).join("/"); + if (path19.sep !== "/") { + f = f.split(path19.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -103901,8 +103901,8 @@ var require_readdir_glob = __commonJS({ }); }); } - async function* exploreWalkAsync(dir, path18, followSymlinks, useStat, shouldSkip, strict) { - let files = await readdir(path18 + dir, strict); + async function* exploreWalkAsync(dir, path19, followSymlinks, useStat, shouldSkip, strict) { + let files = await readdir(path19 + dir, strict); for (const file of files) { let name = file.name; if (name === void 0) { @@ -103911,7 +103911,7 @@ var require_readdir_glob = __commonJS({ } const filename = dir + "/" + name; const relative2 = filename.slice(1); - const absolute = path18 + "/" + relative2; + const absolute = path19 + "/" + relative2; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -103925,15 +103925,15 @@ var require_readdir_glob = __commonJS({ if (stats.isDirectory()) { if (!shouldSkip(relative2)) { yield { relative: relative2, absolute, stats }; - yield* exploreWalkAsync(filename, path18, followSymlinks, useStat, shouldSkip, false); + yield* exploreWalkAsync(filename, path19, followSymlinks, useStat, shouldSkip, false); } } else { yield { relative: relative2, absolute, stats }; } } } - async function* explore(path18, followSymlinks, useStat, shouldSkip) { - yield* exploreWalkAsync("", path18, followSymlinks, useStat, shouldSkip, true); + async function* explore(path19, followSymlinks, useStat, shouldSkip) { + yield* exploreWalkAsync("", path19, followSymlinks, useStat, shouldSkip, true); } function readOptions(options) { return { @@ -104471,8 +104471,8 @@ var require_async = __commonJS({ return callback(null, results); } while (readyTasks.length && runningTasks < concurrency) { - var run3 = readyTasks.shift(); - run3(); + var run2 = readyTasks.shift(); + run2(); } } function addListener(taskName, fn) { @@ -105971,14 +105971,14 @@ var require_polyfills = __commonJS({ fs19.fstatSync = statFixSync(fs19.fstatSync); fs19.lstatSync = statFixSync(fs19.lstatSync); if (fs19.chmod && !fs19.lchmod) { - fs19.lchmod = function(path18, mode, cb) { + fs19.lchmod = function(path19, mode, cb) { if (cb) process.nextTick(cb); }; fs19.lchmodSync = function() { }; } if (fs19.chown && !fs19.lchown) { - fs19.lchown = function(path18, uid, gid, cb) { + fs19.lchown = function(path19, uid, gid, cb) { if (cb) process.nextTick(cb); }; fs19.lchownSync = function() { @@ -106045,9 +106045,9 @@ var require_polyfills = __commonJS({ }; })(fs19.readSync); function patchLchmod(fs20) { - fs20.lchmod = function(path18, mode, callback) { + fs20.lchmod = function(path19, mode, callback) { fs20.open( - path18, + path19, constants.O_WRONLY | constants.O_SYMLINK, mode, function(err, fd) { @@ -106063,8 +106063,8 @@ var require_polyfills = __commonJS({ } ); }; - fs20.lchmodSync = function(path18, mode) { - var fd = fs20.openSync(path18, constants.O_WRONLY | constants.O_SYMLINK, mode); + fs20.lchmodSync = function(path19, mode) { + var fd = fs20.openSync(path19, constants.O_WRONLY | constants.O_SYMLINK, mode); var threw = true; var ret; try { @@ -106085,8 +106085,8 @@ var require_polyfills = __commonJS({ } function patchLutimes(fs20) { if (constants.hasOwnProperty("O_SYMLINK") && fs20.futimes) { - fs20.lutimes = function(path18, at, mt, cb) { - fs20.open(path18, constants.O_SYMLINK, function(er, fd) { + fs20.lutimes = function(path19, at, mt, cb) { + fs20.open(path19, constants.O_SYMLINK, function(er, fd) { if (er) { if (cb) cb(er); return; @@ -106098,8 +106098,8 @@ var require_polyfills = __commonJS({ }); }); }; - fs20.lutimesSync = function(path18, at, mt) { - var fd = fs20.openSync(path18, constants.O_SYMLINK); + fs20.lutimesSync = function(path19, at, mt) { + var fd = fs20.openSync(path19, constants.O_SYMLINK); var ret; var threw = true; try { @@ -106217,11 +106217,11 @@ var require_legacy_streams = __commonJS({ ReadStream, WriteStream }; - function ReadStream(path18, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path18, options); + function ReadStream(path19, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path19, options); Stream.call(this); var self2 = this; - this.path = path18; + this.path = path19; this.fd = null; this.readable = true; this.paused = false; @@ -106266,10 +106266,10 @@ var require_legacy_streams = __commonJS({ self2._read(); }); } - function WriteStream(path18, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path18, options); + function WriteStream(path19, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path19, options); Stream.call(this); - this.path = path18; + this.path = path19; this.fd = null; this.writable = true; this.flags = "w"; @@ -106412,14 +106412,14 @@ var require_graceful_fs = __commonJS({ fs20.createWriteStream = createWriteStream3; var fs$readFile = fs20.readFile; fs20.readFile = readFile; - function readFile(path18, options, cb) { + function readFile(path19, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$readFile(path18, options, cb); - function go$readFile(path19, options2, cb2, startTime) { - return fs$readFile(path19, options2, function(err) { + return go$readFile(path19, options, cb); + function go$readFile(path20, options2, cb2, startTime) { + return fs$readFile(path20, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$readFile, [path19, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$readFile, [path20, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106429,14 +106429,14 @@ var require_graceful_fs = __commonJS({ } var fs$writeFile = fs20.writeFile; fs20.writeFile = writeFile; - function writeFile(path18, data, options, cb) { + function writeFile(path19, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$writeFile(path18, data, options, cb); - function go$writeFile(path19, data2, options2, cb2, startTime) { - return fs$writeFile(path19, data2, options2, function(err) { + return go$writeFile(path19, data, options, cb); + function go$writeFile(path20, data2, options2, cb2, startTime) { + return fs$writeFile(path20, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$writeFile, [path19, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$writeFile, [path20, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106447,14 +106447,14 @@ var require_graceful_fs = __commonJS({ var fs$appendFile = fs20.appendFile; if (fs$appendFile) fs20.appendFile = appendFile; - function appendFile(path18, data, options, cb) { + function appendFile(path19, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$appendFile(path18, data, options, cb); - function go$appendFile(path19, data2, options2, cb2, startTime) { - return fs$appendFile(path19, data2, options2, function(err) { + return go$appendFile(path19, data, options, cb); + function go$appendFile(path20, data2, options2, cb2, startTime) { + return fs$appendFile(path20, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$appendFile, [path19, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$appendFile, [path20, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106485,31 +106485,31 @@ var require_graceful_fs = __commonJS({ var fs$readdir = fs20.readdir; fs20.readdir = readdir; var noReaddirOptionVersions = /^v[0-5]\./; - function readdir(path18, options, cb) { + function readdir(path19, options, cb) { if (typeof options === "function") cb = options, options = null; - var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path19, options2, cb2, startTime) { - return fs$readdir(path19, fs$readdirCallback( - path19, + var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path20, options2, cb2, startTime) { + return fs$readdir(path20, fs$readdirCallback( + path20, options2, cb2, startTime )); - } : function go$readdir2(path19, options2, cb2, startTime) { - return fs$readdir(path19, options2, fs$readdirCallback( - path19, + } : function go$readdir2(path20, options2, cb2, startTime) { + return fs$readdir(path20, options2, fs$readdirCallback( + path20, options2, cb2, startTime )); }; - return go$readdir(path18, options, cb); - function fs$readdirCallback(path19, options2, cb2, startTime) { + return go$readdir(path19, options, cb); + function fs$readdirCallback(path20, options2, cb2, startTime) { return function(err, files) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) enqueue([ go$readdir, - [path19, options2, cb2], + [path20, options2, cb2], err, startTime || Date.now(), Date.now() @@ -106580,7 +106580,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - function ReadStream(path18, options) { + function ReadStream(path19, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this; else @@ -106600,7 +106600,7 @@ var require_graceful_fs = __commonJS({ } }); } - function WriteStream(path18, options) { + function WriteStream(path19, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this; else @@ -106618,22 +106618,22 @@ var require_graceful_fs = __commonJS({ } }); } - function createReadStream2(path18, options) { - return new fs20.ReadStream(path18, options); + function createReadStream2(path19, options) { + return new fs20.ReadStream(path19, options); } - function createWriteStream3(path18, options) { - return new fs20.WriteStream(path18, options); + function createWriteStream3(path19, options) { + return new fs20.WriteStream(path19, options); } var fs$open = fs20.open; fs20.open = open; - function open(path18, flags, mode, cb) { + function open(path19, flags, mode, cb) { if (typeof mode === "function") cb = mode, mode = null; - return go$open(path18, flags, mode, cb); - function go$open(path19, flags2, mode2, cb2, startTime) { - return fs$open(path19, flags2, mode2, function(err, fd) { + return go$open(path19, flags, mode, cb); + function go$open(path20, flags2, mode2, cb2, startTime) { + return fs$open(path20, flags2, mode2, function(err, fd) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$open, [path19, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$open, [path20, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -108734,22 +108734,22 @@ var require_lazystream = __commonJS({ // node_modules/normalize-path/index.js var require_normalize_path = __commonJS({ "node_modules/normalize-path/index.js"(exports2, module2) { - module2.exports = function(path18, stripTrailing) { - if (typeof path18 !== "string") { + module2.exports = function(path19, stripTrailing) { + if (typeof path19 !== "string") { throw new TypeError("expected path to be a string"); } - if (path18 === "\\" || path18 === "/") return "/"; - var len = path18.length; - if (len <= 1) return path18; + if (path19 === "\\" || path19 === "/") return "/"; + var len = path19.length; + if (len <= 1) return path19; var prefix = ""; - if (len > 4 && path18[3] === "\\") { - var ch = path18[2]; - if ((ch === "?" || ch === ".") && path18.slice(0, 2) === "\\\\") { - path18 = path18.slice(2); + if (len > 4 && path19[3] === "\\") { + var ch = path19[2]; + if ((ch === "?" || ch === ".") && path19.slice(0, 2) === "\\\\") { + path19 = path19.slice(2); prefix = "//"; } } - var segs = path18.split(/[/\\]+/); + var segs = path19.split(/[/\\]+/); if (stripTrailing !== false && segs[segs.length - 1] === "") { segs.pop(); } @@ -117371,11 +117371,11 @@ var require_commonjs20 = __commonJS({ return (f) => f.length === len && f !== "." && f !== ".."; }; var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; - var path18 = { + var path19 = { win32: { sep: "\\" }, posix: { sep: "/" } }; - exports2.sep = defaultPlatform === "win32" ? path18.win32.sep : path18.posix.sep; + exports2.sep = defaultPlatform === "win32" ? path19.win32.sep : path19.posix.sep; exports2.minimatch.sep = exports2.sep; exports2.GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); exports2.minimatch.GLOBSTAR = exports2.GLOBSTAR; @@ -120660,12 +120660,12 @@ var require_commonjs23 = __commonJS({ /** * Get the Path object referenced by the string path, resolved from this Path */ - resolve(path18) { - if (!path18) { + resolve(path19) { + if (!path19) { return this; } - const rootPath = this.getRootString(path18); - const dir = path18.substring(rootPath.length); + const rootPath = this.getRootString(path19); + const dir = path19.substring(rootPath.length); const dirParts = dir.split(this.splitSep); const result = rootPath ? this.getRoot(rootPath).#resolveParts(dirParts) : this.#resolveParts(dirParts); return result; @@ -121418,8 +121418,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - getRootString(path18) { - return node_path_1.win32.parse(path18).root; + getRootString(path19) { + return node_path_1.win32.parse(path19).root; } /** * @internal @@ -121466,8 +121466,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - getRootString(path18) { - return path18.startsWith("/") ? "/" : ""; + getRootString(path19) { + return path19.startsWith("/") ? "/" : ""; } /** * @internal @@ -121557,11 +121557,11 @@ var require_commonjs23 = __commonJS({ /** * Get the depth of a provided path, string, or the cwd */ - depth(path18 = this.cwd) { - if (typeof path18 === "string") { - path18 = this.cwd.resolve(path18); + depth(path19 = this.cwd) { + if (typeof path19 === "string") { + path19 = this.cwd.resolve(path19); } - return path18.depth(); + return path19.depth(); } /** * Return the cache of child entries. Exposed so subclasses can create @@ -122048,9 +122048,9 @@ var require_commonjs23 = __commonJS({ process2(); return results; } - chdir(path18 = this.cwd) { + chdir(path19 = this.cwd) { const oldCwd = this.cwd; - this.cwd = typeof path18 === "string" ? this.cwd.resolve(path18) : path18; + this.cwd = typeof path19 === "string" ? this.cwd.resolve(path19) : path19; this.cwd[setAsCwd](oldCwd); } }; @@ -122438,8 +122438,8 @@ var require_processor = __commonJS({ } // match, absolute, ifdir entries() { - return [...this.store.entries()].map(([path18, n]) => [ - path18, + return [...this.store.entries()].map(([path19, n]) => [ + path19, !!(n & 2), !!(n & 1) ]); @@ -122657,9 +122657,9 @@ var require_walker = __commonJS({ signal; maxDepth; includeChildMatches; - constructor(patterns, path18, opts) { + constructor(patterns, path19, opts) { this.patterns = patterns; - this.path = path18; + this.path = path19; this.opts = opts; this.#sep = !opts.posix && opts.platform === "win32" ? "\\" : "/"; this.includeChildMatches = opts.includeChildMatches !== false; @@ -122678,11 +122678,11 @@ var require_walker = __commonJS({ }); } } - #ignored(path18) { - return this.seen.has(path18) || !!this.#ignore?.ignored?.(path18); + #ignored(path19) { + return this.seen.has(path19) || !!this.#ignore?.ignored?.(path19); } - #childrenIgnored(path18) { - return !!this.#ignore?.childrenIgnored?.(path18); + #childrenIgnored(path19) { + return !!this.#ignore?.childrenIgnored?.(path19); } // backpressure mechanism pause() { @@ -122898,8 +122898,8 @@ var require_walker = __commonJS({ exports2.GlobUtil = GlobUtil; var GlobWalker = class extends GlobUtil { matches = /* @__PURE__ */ new Set(); - constructor(patterns, path18, opts) { - super(patterns, path18, opts); + constructor(patterns, path19, opts) { + super(patterns, path19, opts); } matchEmit(e) { this.matches.add(e); @@ -122937,8 +122937,8 @@ var require_walker = __commonJS({ exports2.GlobWalker = GlobWalker; var GlobStream = class extends GlobUtil { results; - constructor(patterns, path18, opts) { - super(patterns, path18, opts); + constructor(patterns, path19, opts) { + super(patterns, path19, opts); this.results = new minipass_1.Minipass({ signal: this.signal, objectMode: true @@ -123294,7 +123294,7 @@ var require_commonjs24 = __commonJS({ var require_file4 = __commonJS({ "node_modules/archiver-utils/file.js"(exports2, module2) { var fs19 = require_graceful_fs(); - var path18 = require("path"); + var path19 = require("path"); var flatten = require_flatten(); var difference = require_difference(); var union = require_union(); @@ -123319,7 +123319,7 @@ var require_file4 = __commonJS({ return result; }; file.exists = function() { - var filepath = path18.join.apply(path18, arguments); + var filepath = path19.join.apply(path19, arguments); return fs19.existsSync(filepath); }; file.expand = function(...args) { @@ -123333,7 +123333,7 @@ var require_file4 = __commonJS({ }); if (options.filter) { matches = matches.filter(function(filepath) { - filepath = path18.join(options.cwd || "", filepath); + filepath = path19.join(options.cwd || "", filepath); try { if (typeof options.filter === "function") { return options.filter(filepath); @@ -123350,7 +123350,7 @@ var require_file4 = __commonJS({ file.expandMapping = function(patterns, destBase, options) { options = Object.assign({ rename: function(destBase2, destPath) { - return path18.join(destBase2 || "", destPath); + return path19.join(destBase2 || "", destPath); } }, options); var files = []; @@ -123358,14 +123358,14 @@ var require_file4 = __commonJS({ file.expand(options, patterns).forEach(function(src) { var destPath = src; if (options.flatten) { - destPath = path18.basename(destPath); + destPath = path19.basename(destPath); } if (options.ext) { destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); } var dest = options.rename(destBase, destPath, options); if (options.cwd) { - src = path18.join(options.cwd, src); + src = path19.join(options.cwd, src); } dest = dest.replace(pathSeparatorRe, "/"); src = src.replace(pathSeparatorRe, "/"); @@ -123447,7 +123447,7 @@ var require_file4 = __commonJS({ var require_archiver_utils = __commonJS({ "node_modules/archiver-utils/index.js"(exports2, module2) { var fs19 = require_graceful_fs(); - var path18 = require("path"); + var path19 = require("path"); var isStream = require_is_stream(); var lazystream = require_lazystream(); var normalizePath = require_normalize_path(); @@ -123535,11 +123535,11 @@ var require_archiver_utils = __commonJS({ if (!file) { return callback(null, results); } - filepath = path18.join(dirpath, file); + filepath = path19.join(dirpath, file); fs19.stat(filepath, function(err2, stats) { results.push({ path: filepath, - relative: path18.relative(base, filepath).replace(/\\/g, "/"), + relative: path19.relative(base, filepath).replace(/\\/g, "/"), stats }); if (stats && stats.isDirectory()) { @@ -123601,7 +123601,7 @@ var require_core2 = __commonJS({ var fs19 = require("fs"); var glob2 = require_readdir_glob(); var async = require_async(); - var path18 = require("path"); + var path19 = require("path"); var util = require_archiver_utils(); var inherits = require("util").inherits; var ArchiverError = require_error3(); @@ -123877,9 +123877,9 @@ var require_core2 = __commonJS({ task.source = Buffer.concat([]); } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { var linkPath = fs19.readlinkSync(task.filepath); - var dirName = path18.dirname(task.filepath); + var dirName = path19.dirname(task.filepath); task.data.type = "symlink"; - task.data.linkname = path18.relative(dirName, path18.resolve(dirName, linkPath)); + task.data.linkname = path19.relative(dirName, path19.resolve(dirName, linkPath)); task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else { @@ -128329,8 +128329,8 @@ var require_context2 = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path18 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path18} does not exist${os_1.EOL}`); + const path19 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path19} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -128915,14 +128915,14 @@ var require_util24 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol}//${url2.hostname}:${port}`; - let path18 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path19 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path18 && !path18.startsWith("/")) { - path18 = `/${path18}`; + if (path19 && !path19.startsWith("/")) { + path19 = `/${path19}`; } - url2 = new URL(origin + path18); + url2 = new URL(origin + path19); } return url2; } @@ -130536,20 +130536,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename2(path18) { - if (typeof path18 !== "string") { + module2.exports = function basename2(path19) { + if (typeof path19 !== "string") { return ""; } - for (var i = path18.length - 1; i >= 0; --i) { - switch (path18.charCodeAt(i)) { + for (var i = path19.length - 1; i >= 0; --i) { + switch (path19.charCodeAt(i)) { case 47: // '/' case 92: - path18 = path18.slice(i + 1); - return path18 === ".." || path18 === "." ? "" : path18; + path19 = path19.slice(i + 1); + return path19 === ".." || path19 === "." ? "" : path19; } } - return path18 === ".." || path18 === "." ? "" : path18; + return path19 === ".." || path19 === "." ? "" : path19; }; } }); @@ -133579,7 +133579,7 @@ var require_request5 = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path18, + path: path19, method, body, headers, @@ -133593,11 +133593,11 @@ var require_request5 = __commonJS({ throwOnError, expectContinue }, handler2) { - if (typeof path18 !== "string") { + if (typeof path19 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path18[0] !== "/" && !(path18.startsWith("http://") || path18.startsWith("https://")) && method !== "CONNECT") { + } else if (path19[0] !== "/" && !(path19.startsWith("http://") || path19.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path18) !== null) { + } else if (invalidPathRegex.exec(path19) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -133660,7 +133660,7 @@ var require_request5 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path18, query) : path18; + this.path = query ? util.buildURL(path19, query) : path19; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -134668,9 +134668,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path18 = search ? `${pathname}${search}` : pathname; + const path19 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path18; + this.opts.path = path19; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -135910,7 +135910,7 @@ var require_client3 = __commonJS({ writeH2(client, client[kHTTP2Session], request2); return; } - const { body, method, path: path18, host, upgrade, headers, blocking, reset } = request2; + const { body, method, path: path19, host, upgrade, headers, blocking, reset } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -135960,7 +135960,7 @@ var require_client3 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path18} HTTP/1.1\r + let header = `${method} ${path19} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -136023,7 +136023,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request2) { - const { body, method, path: path18, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { body, method, path: path19, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -136066,7 +136066,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path18; + headers[HTTP2_HEADER_PATH] = path19; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -138306,20 +138306,20 @@ var require_mock_utils3 = __commonJS({ } return true; } - function safeUrl(path18) { - if (typeof path18 !== "string") { - return path18; + function safeUrl(path19) { + if (typeof path19 !== "string") { + return path19; } - const pathSegments = path18.split("?"); + const pathSegments = path19.split("?"); if (pathSegments.length !== 2) { - return path18; + return path19; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path18, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path18); + function matchKey(mockDispatch2, { path: path19, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path19); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -138337,7 +138337,7 @@ var require_mock_utils3 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path18 }) => matchValue(safeUrl(path18), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path19 }) => matchValue(safeUrl(path19), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -138374,9 +138374,9 @@ var require_mock_utils3 = __commonJS({ } } function buildKey(opts) { - const { path: path18, method, body, headers, query } = opts; + const { path: path19, method, body, headers, query } = opts; return { - path: path18, + path: path19, method, body, headers, @@ -138825,10 +138825,10 @@ var require_pending_interceptors_formatter3 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path18, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path19, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path18, + Path: path19, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -143448,8 +143448,8 @@ var require_util29 = __commonJS({ } } } - function validateCookiePath(path18) { - for (const char of path18) { + function validateCookiePath(path19) { + for (const char of path19) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -145129,11 +145129,11 @@ var require_undici3 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path18 = opts.path; + let path19 = opts.path; if (!opts.path.startsWith("/")) { - path18 = `/${path18}`; + path19 = `/${path19}`; } - url2 = new URL(util.parseOrigin(url2).origin + path18); + url2 = new URL(util.parseOrigin(url2).origin + path19); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -149983,7 +149983,7 @@ var require_traverse = __commonJS({ })(this.value); }; function walk(root, cb, immutable) { - var path18 = []; + var path19 = []; var parents = []; var alive = true; return (function walker(node_) { @@ -149992,11 +149992,11 @@ var require_traverse = __commonJS({ var state = { node, node_, - path: [].concat(path18), + path: [].concat(path19), parent: parents.slice(-1)[0], - key: path18.slice(-1)[0], - isRoot: path18.length === 0, - level: path18.length, + key: path19.slice(-1)[0], + isRoot: path19.length === 0, + level: path19.length, circular: null, update: function(x) { if (!state.isRoot) { @@ -150051,7 +150051,7 @@ var require_traverse = __commonJS({ parents.push(state); var keys = Object.keys(state.node); keys.forEach(function(key, i2) { - path18.push(key); + path19.push(key); if (modifiers.pre) modifiers.pre.call(state, state.node[key], key); var child = walker(state.node[key]); if (immutable && Object.hasOwnProperty.call(state.node, key)) { @@ -150060,7 +150060,7 @@ var require_traverse = __commonJS({ child.isLast = i2 == keys.length - 1; child.isFirst = i2 == 0; if (modifiers.post) modifiers.post.call(state, child); - path18.pop(); + path19.pop(); }); parents.pop(); } @@ -151081,11 +151081,11 @@ var require_unzip_stream = __commonJS({ return requiredLength; case states.CENTRAL_DIRECTORY_FILE_HEADER_SUFFIX: var isUtf8 = (this.parsedEntity.flags & 2048) !== 0; - var path18 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); + var path19 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); var extraDataBuffer = chunk.slice(this.parsedEntity.fileNameLength, this.parsedEntity.fileNameLength + this.parsedEntity.extraFieldLength); var extra = this._readExtraFields(extraDataBuffer); if (extra && extra.parsed && extra.parsed.path && !isUtf8) { - path18 = extra.parsed.path; + path19 = extra.parsed.path; } this.parsedEntity.extra = extra.parsed; var isUnix = (this.parsedEntity.versionMadeBy & 65280) >> 8 === 3; @@ -151097,7 +151097,7 @@ var require_unzip_stream = __commonJS({ } if (this.options.debug) { const debugObj = Object.assign({}, this.parsedEntity, { - path: path18, + path: path19, flags: "0x" + this.parsedEntity.flags.toString(16), unixAttrs: unixAttrs && "0" + unixAttrs.toString(8), isSymlink, @@ -151534,7 +151534,7 @@ var require_parser_stream = __commonJS({ // node_modules/mkdirp/index.js var require_mkdirp = __commonJS({ "node_modules/mkdirp/index.js"(exports2, module2) { - var path18 = require("path"); + var path19 = require("path"); var fs19 = require("fs"); var _0777 = parseInt("0777", 8); module2.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; @@ -151554,7 +151554,7 @@ var require_mkdirp = __commonJS({ var cb = f || /* istanbul ignore next */ function() { }; - p = path18.resolve(p); + p = path19.resolve(p); xfs.mkdir(p, mode, function(er) { if (!er) { made = made || p; @@ -151562,8 +151562,8 @@ var require_mkdirp = __commonJS({ } switch (er.code) { case "ENOENT": - if (path18.dirname(p) === p) return cb(er); - mkdirP(path18.dirname(p), opts, function(er2, made2) { + if (path19.dirname(p) === p) return cb(er); + mkdirP(path19.dirname(p), opts, function(er2, made2) { if (er2) cb(er2, made2); else mkdirP(p, opts, cb, made2); }); @@ -151590,14 +151590,14 @@ var require_mkdirp = __commonJS({ mode = _0777; } if (!made) made = null; - p = path18.resolve(p); + p = path19.resolve(p); try { xfs.mkdirSync(p, mode); made = made || p; } catch (err0) { switch (err0.code) { case "ENOENT": - made = sync(path18.dirname(p), opts, made); + made = sync(path19.dirname(p), opts, made); sync(p, opts, made); break; // In the case of any other error, just see if there's a dir @@ -151623,7 +151623,7 @@ var require_mkdirp = __commonJS({ var require_extract2 = __commonJS({ "node_modules/unzip-stream/lib/extract.js"(exports2, module2) { var fs19 = require("fs"); - var path18 = require("path"); + var path19 = require("path"); var util = require("util"); var mkdirp = require_mkdirp(); var Transform = require("stream").Transform; @@ -151665,8 +151665,8 @@ var require_extract2 = __commonJS({ }; Extract.prototype._processEntry = function(entry) { var self2 = this; - var destPath = path18.join(this.opts.path, entry.path); - var directory = entry.isDirectory ? destPath : path18.dirname(destPath); + var destPath = path19.join(this.opts.path, entry.path); + var directory = entry.isDirectory ? destPath : path19.dirname(destPath); this.unfinishedEntries++; var writeFileFn = function() { var pipedStream = fs19.createWriteStream(destPath); @@ -151793,10 +151793,10 @@ var require_download_artifact = __commonJS({ parsed.search = ""; return parsed.toString(); }; - function exists(path18) { + function exists(path19) { return __awaiter2(this, void 0, void 0, function* () { try { - yield promises_1.default.access(path18); + yield promises_1.default.access(path19); return true; } catch (error3) { if (error3.code === "ENOENT") { @@ -152028,12 +152028,12 @@ var require_dist_node11 = __commonJS({ octokit.log.debug("request", options); const start = Date.now(); const requestOptions = octokit.request.endpoint.parse(options); - const path18 = requestOptions.url.replace(options.baseUrl, ""); + const path19 = requestOptions.url.replace(options.baseUrl, ""); return request2(options).then((response) => { - octokit.log.info(`${requestOptions.method} ${path18} - ${response.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path19} - ${response.status} in ${Date.now() - start}ms`); return response; }).catch((error3) => { - octokit.log.info(`${requestOptions.method} ${path18} - ${error3.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path19} - ${error3.status} in ${Date.now() - start}ms`); throw error3; }); }); @@ -154128,7 +154128,7 @@ var require_path_utils2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -154138,7 +154138,7 @@ var require_path_utils2 = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path18.sep); + return pth.replace(/[/\\]/g, path19.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -154202,7 +154202,7 @@ var require_io_util2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; var fs19 = __importStar2(require("fs")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; @@ -154251,7 +154251,7 @@ var require_io_util2 = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path18.extname(filePath).toUpperCase(); + const upperExt = path19.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -154275,11 +154275,11 @@ var require_io_util2 = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path18.dirname(filePath); - const upperName = path18.basename(filePath).toUpperCase(); + const directory = path19.dirname(filePath); + const upperName = path19.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path18.join(directory, actualName); + filePath = path19.join(directory, actualName); break; } } @@ -154374,7 +154374,7 @@ var require_io2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util2()); function cp(source, dest, options = {}) { return __awaiter2(this, void 0, void 0, function* () { @@ -154383,7 +154383,7 @@ var require_io2 = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path18.join(dest, path18.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path19.join(dest, path19.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -154395,7 +154395,7 @@ var require_io2 = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path18.relative(source, newDest) === "") { + if (path19.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -154408,7 +154408,7 @@ var require_io2 = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path18.join(dest, path18.basename(source)); + dest = path19.join(dest, path19.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -154419,7 +154419,7 @@ var require_io2 = __commonJS({ } } } - yield mkdirP(path18.dirname(dest)); + yield mkdirP(path19.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -154482,7 +154482,7 @@ var require_io2 = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path18.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path19.delimiter)) { if (extension) { extensions.push(extension); } @@ -154495,12 +154495,12 @@ var require_io2 = __commonJS({ } return []; } - if (tool.includes(path18.sep)) { + if (tool.includes(path19.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path18.delimiter)) { + for (const p of process.env.PATH.split(path19.delimiter)) { if (p) { directories.push(p); } @@ -154508,7 +154508,7 @@ var require_io2 = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path18.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path19.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -154624,7 +154624,7 @@ var require_toolrunner2 = __commonJS({ var os4 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var io7 = __importStar2(require_io2()); var ioUtil = __importStar2(require_io_util2()); var timers_1 = require("timers"); @@ -154839,7 +154839,7 @@ var require_toolrunner2 = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path18.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path19.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -155339,7 +155339,7 @@ var require_core3 = __commonJS({ var file_command_1 = require_file_command2(); var utils_1 = require_utils12(); var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils2(); var ExitCode; (function(ExitCode2) { @@ -155367,7 +155367,7 @@ var require_core3 = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path18.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path19.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -155543,13 +155543,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.checkArtifactName = checkArtifactName; - function checkArtifactFilePath(path18) { - if (!path18) { - throw new Error(`Artifact path: ${path18}, is incorrectly provided`); + function checkArtifactFilePath(path19) { + if (!path19) { + throw new Error(`Artifact path: ${path19}, is incorrectly provided`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path18.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path18}. Contains the following character: ${errorMessageForCharacter} + if (path19.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path19}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -155640,7 +155640,7 @@ var require_tmp = __commonJS({ "node_modules/tmp/lib/tmp.js"(exports2, module2) { var fs19 = require("fs"); var os4 = require("os"); - var path18 = require("path"); + var path19 = require("path"); var crypto2 = require("crypto"); var _c = { fs: fs19.constants, os: os4.constants }; var RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; @@ -155847,35 +155847,35 @@ var require_tmp = __commonJS({ return [actualOptions, callback]; } function _resolvePath(name, tmpDir, cb) { - const pathToResolve = path18.isAbsolute(name) ? name : path18.join(tmpDir, name); + const pathToResolve = path19.isAbsolute(name) ? name : path19.join(tmpDir, name); fs19.stat(pathToResolve, function(err) { if (err) { - fs19.realpath(path18.dirname(pathToResolve), function(err2, parentDir) { + fs19.realpath(path19.dirname(pathToResolve), function(err2, parentDir) { if (err2) return cb(err2); - cb(null, path18.join(parentDir, path18.basename(pathToResolve))); + cb(null, path19.join(parentDir, path19.basename(pathToResolve))); }); } else { - fs19.realpath(path18, cb); + fs19.realpath(path19, cb); } }); } function _resolvePathSync(name, tmpDir) { - const pathToResolve = path18.isAbsolute(name) ? name : path18.join(tmpDir, name); + const pathToResolve = path19.isAbsolute(name) ? name : path19.join(tmpDir, name); try { fs19.statSync(pathToResolve); return fs19.realpathSync(pathToResolve); } catch (_err) { - const parentDir = fs19.realpathSync(path18.dirname(pathToResolve)); - return path18.join(parentDir, path18.basename(pathToResolve)); + const parentDir = fs19.realpathSync(path19.dirname(pathToResolve)); + return path19.join(parentDir, path19.basename(pathToResolve)); } } function _generateTmpName(opts) { const tmpDir = opts.tmpdir; if (!_isUndefined(opts.name)) { - return path18.join(tmpDir, opts.dir, opts.name); + return path19.join(tmpDir, opts.dir, opts.name); } if (!_isUndefined(opts.template)) { - return path18.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + return path19.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); } const name = [ opts.prefix ? opts.prefix : "tmp", @@ -155885,13 +155885,13 @@ var require_tmp = __commonJS({ _randomChars(12), opts.postfix ? "-" + opts.postfix : "" ].join(""); - return path18.join(tmpDir, opts.dir, name); + return path19.join(tmpDir, opts.dir, name); } function _assertOptionsBase(options) { if (!_isUndefined(options.name)) { const name = options.name; - if (path18.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); - const basename2 = path18.basename(name); + if (path19.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); + const basename2 = path19.basename(name); if (basename2 === ".." || basename2 === "." || basename2 !== name) throw new Error(`name option must not contain a path, found "${name}".`); } @@ -155913,7 +155913,7 @@ var require_tmp = __commonJS({ if (_isUndefined(name)) return cb(null); _resolvePath(name, tmpDir, function(err, resolvedPath) { if (err) return cb(err); - const relativePath = path18.relative(tmpDir, resolvedPath); + const relativePath = path19.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { return cb(new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`)); } @@ -155923,7 +155923,7 @@ var require_tmp = __commonJS({ function _getRelativePathSync(option, name, tmpDir) { if (_isUndefined(name)) return; const resolvedPath = _resolvePathSync(name, tmpDir); - const relativePath = path18.relative(tmpDir, resolvedPath); + const relativePath = path19.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { throw new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`); } @@ -156003,14 +156003,14 @@ var require_tmp_promise = __commonJS({ var fileWithOptions = promisify( (options, cb) => tmp.file( options, - (err, path18, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path18, fd, cleanup: promisify(cleanup) }) + (err, path19, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path19, fd, cleanup: promisify(cleanup) }) ) ); module2.exports.file = async (options) => fileWithOptions(options); module2.exports.withFile = async function withFile(fn, options) { - const { path: path18, fd, cleanup } = await module2.exports.file(options); + const { path: path19, fd, cleanup } = await module2.exports.file(options); try { - return await fn({ path: path18, fd }); + return await fn({ path: path19, fd }); } finally { await cleanup(); } @@ -156019,14 +156019,14 @@ var require_tmp_promise = __commonJS({ var dirWithOptions = promisify( (options, cb) => tmp.dir( options, - (err, path18, cleanup) => err ? cb(err) : cb(void 0, { path: path18, cleanup: promisify(cleanup) }) + (err, path19, cleanup) => err ? cb(err) : cb(void 0, { path: path19, cleanup: promisify(cleanup) }) ) ); module2.exports.dir = async (options) => dirWithOptions(options); module2.exports.withDir = async function withDir(fn, options) { - const { path: path18, cleanup } = await module2.exports.dir(options); + const { path: path19, cleanup } = await module2.exports.dir(options); try { - return await fn({ path: path18 }); + return await fn({ path: path19 }); } finally { await cleanup(); } @@ -157734,21 +157734,21 @@ var require_download_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadSpecification = void 0; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { const directories = /* @__PURE__ */ new Set(); const specifications = { - rootDownloadLocation: includeRootDirectory ? path18.join(downloadPath, artifactName) : downloadPath, + rootDownloadLocation: includeRootDirectory ? path19.join(downloadPath, artifactName) : downloadPath, directoryStructure: [], emptyFilesToCreate: [], filesToDownload: [] }; for (const entry of artifactEntries) { if (entry.path.startsWith(`${artifactName}/`) || entry.path.startsWith(`${artifactName}\\`)) { - const normalizedPathEntry = path18.normalize(entry.path); - const filePath = path18.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); + const normalizedPathEntry = path19.normalize(entry.path); + const filePath = path19.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); if (entry.itemType === "file") { - directories.add(path18.dirname(filePath)); + directories.add(path19.dirname(filePath)); if (entry.fileLength === 0) { specifications.emptyFilesToCreate.push(filePath); } else { @@ -157890,7 +157890,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz return uploadResponse; }); } - downloadArtifact(name, path18, options) { + downloadArtifact(name, path19, options) { return __awaiter2(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const artifacts = yield downloadHttpClient.listArtifacts(); @@ -157904,12 +157904,12 @@ Note: The size of downloaded zips can differ significantly from the reported siz throw new Error(`Unable to find an artifact with the name: ${name}`); } const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path18) { - path18 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path19) { + path19 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path18 = (0, path_1.normalize)(path18); - path18 = (0, path_1.resolve)(path18); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path18, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + path19 = (0, path_1.normalize)(path19); + path19 = (0, path_1.resolve)(path19); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path19, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { @@ -157924,7 +157924,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz }; }); } - downloadAllArtifacts(path18) { + downloadAllArtifacts(path19) { return __awaiter2(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const response = []; @@ -157933,18 +157933,18 @@ Note: The size of downloaded zips can differ significantly from the reported siz core17.info("Unable to find any artifacts for the associated workflow"); return response; } - if (!path18) { - path18 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path19) { + path19 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path18 = (0, path_1.normalize)(path18); - path18 = (0, path_1.resolve)(path18); + path19 = (0, path_1.normalize)(path19); + path19 = (0, path_1.resolve)(path19); let downloadedArtifacts = 0; while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path18, true); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path19, true); if (downloadSpecification.filesToDownload.length === 0) { core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { @@ -163563,8 +163563,8 @@ function getExtraOptionsEnvParam() { } function getToolNames(sarif) { const toolNames = {}; - for (const run3 of sarif.runs || []) { - const tool = run3.tool || {}; + for (const run2 of sarif.runs || []) { + const tool = run2.tool || {}; const driver = tool.driver || {}; if (typeof driver.name === "string" && driver.name.length > 0) { toolNames[driver.name] = true; @@ -163855,6 +163855,34 @@ async function asyncSome(array, predicate) { const results = await Promise.all(array.map(predicate)); return results.some((result) => result); } +var Success = class { + constructor(value) { + this.value = value; + } + isSuccess() { + return true; + } + isFailure() { + return false; + } + orElse(_defaultValue) { + return this.value; + } +}; +var Failure = class { + constructor(value) { + this.value = value; + } + isSuccess() { + return false; + } + isFailure() { + return true; + } + orElse(defaultValue) { + return defaultValue; + } +}; // src/actions-util.ts var pkg = require_package(); @@ -164902,8 +164930,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path18 = decodeGitFilePath(match[2]); - fileOidMap[path18] = oid; + const path19 = decodeGitFilePath(match[2]); + fileOidMap[path19] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -165750,6 +165778,9 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { function isCodeScanningEnabled(config) { return config.analysisKinds.includes("code-scanning" /* CodeScanning */); } +function isRiskAssessmentEnabled(config) { + return config.analysisKinds.includes("risk-assessment" /* RiskAssessment */); +} // src/setup-codeql.ts var fs10 = __toESM(require("fs")); @@ -167851,6 +167882,7 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs18 = __toESM(require("fs")); +var import_path3 = __toESM(require("path")); var github2 = __toESM(require_github()); // src/upload-lib.ts @@ -168940,9 +168972,9 @@ async function addFingerprints(sarif, sourceRoot, logger) { `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` ); const callbacksByFile = {}; - for (const run3 of sarif.runs || []) { - const artifacts = run3.artifacts || []; - for (const result of run3.results || []) { + for (const run2 of sarif.runs || []) { + const artifacts = run2.artifacts || []; + for (const result of run2.results || []) { const primaryLocation = (result.locations || [])[0]; if (!primaryLocation?.physicalLocation?.artifactLocation) { logger.debug( @@ -169043,25 +169075,25 @@ function combineSarifFiles(sarifFiles, logger) { function areAllRunsProducedByCodeQL(sarifObjects) { return sarifObjects.every((sarifObject) => { return sarifObject.runs?.every( - (run3) => run3.tool?.driver?.name === "CodeQL" + (run2) => run2.tool?.driver?.name === "CodeQL" ); }); } -function createRunKey(run3) { +function createRunKey(run2) { return { - name: run3.tool?.driver?.name, - fullName: run3.tool?.driver?.fullName, - version: run3.tool?.driver?.version, - semanticVersion: run3.tool?.driver?.semanticVersion, - guid: run3.tool?.driver?.guid, - automationId: run3.automationDetails?.id + name: run2.tool?.driver?.name, + fullName: run2.tool?.driver?.fullName, + version: run2.tool?.driver?.version, + semanticVersion: run2.tool?.driver?.semanticVersion, + guid: run2.tool?.driver?.guid, + automationId: run2.automationDetails?.id }; } function areAllRunsUnique(sarifObjects) { const keys = /* @__PURE__ */ new Set(); for (const sarifObject of sarifObjects) { - for (const run3 of sarifObject.runs) { - const key = JSON.stringify(createRunKey(run3)); + for (const run2 of sarifObject.runs) { + const key = JSON.stringify(createRunKey(run2)); if (keys.has(key)) { return false; } @@ -169164,9 +169196,9 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { - for (const run3 of sarif.runs || []) { - if (run3.automationDetails === void 0) { - run3.automationDetails = { + for (const run2 of sarif.runs || []) { + if (run2.automationDetails === void 0) { + run2.automationDetails = { id: automationID }; } @@ -169265,13 +169297,13 @@ function countResultsInSarif(sarif) { if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } - for (const run3 of parsedSarif.runs) { - if (!Array.isArray(run3.results)) { + for (const run2 of parsedSarif.runs) { + if (!Array.isArray(run2.results)) { throw new InvalidSarifUploadError( "Invalid SARIF. Missing 'results' array in run." ); } - numResults += run3.results.length; + numResults += run2.results.length; } return numResults; } @@ -169564,9 +169596,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger } function validateUniqueCategory(sarif, sentinelPrefix) { const categories = {}; - for (const run3 of sarif.runs) { - const id = run3?.automationDetails?.id; - const tool = run3.tool?.driver?.name; + for (const run2 of sarif.runs) { + const id = run2?.automationDetails?.id; + const tool = run2.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; categories[category] = { id, tool }; } @@ -169591,9 +169623,9 @@ function filterAlertsByDiffRange(logger, sarif) { return sarif; } const checkoutPath = getRequiredInput("checkout_path"); - for (const run3 of sarif.runs) { - if (run3.results) { - run3.results = run3.results.filter((result) => { + for (const run2 of sarif.runs) { + if (run2.results) { + run2.results = run2.results.filter((result) => { const locations = [ ...(result.locations || []).map((loc) => loc.physicalLocation), ...(result.relatedLocations || []).map((loc) => loc.physicalLocation) @@ -169755,34 +169787,81 @@ function createFailedUploadFailedSarifResult(error3) { upload_failed_run_stack_trace: wrappedError.stack }; } -async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { +async function prepareFailedSarif(logger, features, config) { if (!config.codeQLCmd) { - return { upload_failed_run_skipped_because: "CodeQL command not found" }; + return new Failure({ + upload_failed_run_skipped_because: "CodeQL command not found" + }); } - const workflow = await getWorkflow(logger); const jobName = getRequiredEnvParam("GITHUB_JOB"); const matrix = parseMatrixInput(getRequiredInput("matrix")); - const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); - if (!["always", "failure-only"].includes( - getUploadValue(shouldUpload) - ) || shouldSkipSarifUpload()) { - return { upload_failed_run_skipped_because: "SARIF upload is disabled" }; - } - const category = getCategoryInputOrThrow(workflow, jobName, matrix); - const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + if (shouldSkipSarifUpload()) { + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled" + }); + } + if (isRiskAssessmentEnabled(config)) { + if (config.languages.length !== 1) { + return new Failure({ + upload_failed_run_skipped_because: "Unexpectedly, the configuration is not for a single language." + }); + } + const language = config.languages[0]; + const category = `/language:${language}`; + const checkoutPath = "."; + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath, + `../codeql-failed-sarif-${language}${RiskAssessment.sarifExtension}` + ); + return new Success(result); + } else { + const workflow = await getWorkflow(logger); + const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); + if (!["always", "failure-only"].includes( + getUploadValue(shouldUpload) + )) { + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled" + }); + } + const category = getCategoryInputOrThrow(workflow, jobName, matrix); + const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath + ); + return new Success(result); + } +} +async function generateFailedSarif(features, config, category, checkoutPath, sarifFile) { const databasePath = config.dbLocation; const codeql = await getCodeQL(config.codeQLCmd); - const sarifFile = "../codeql-failed-run.sarif"; + if (sarifFile === void 0) { + sarifFile = "../codeql-failed-run.sarif"; + } if (databasePath === void 0 || !await features.getValue("export_diagnostics_enabled" /* ExportDiagnosticsEnabled */, codeql)) { await codeql.diagnosticsExport(sarifFile, category, config); } else { await codeql.databaseExportDiagnostics(databasePath, sarifFile, category); } - logger.info(`Uploading failed SARIF file ${sarifFile}`); + return { sarifFile, category, checkoutPath }; +} +async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + const failedSarif = failedSarifResult.value; + logger.info(`Uploading failed SARIF file ${failedSarif.sarifFile}`); const uploadResult = await uploadFiles( - sarifFile, - checkoutPath, - category, + failedSarif.sarifFile, + failedSarif.checkoutPath, + failedSarif.category, features, logger, CodeScanning @@ -169795,24 +169874,47 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { ); return uploadResult ? { ...uploadResult.statusReport, sarifID: uploadResult.sarifID } : {}; } -async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { - if (!isCodeScanningEnabled(config)) { - return { - upload_failed_run_skipped_because: "Code Scanning is not enabled." - }; +async function maybeUploadFailedSarifArtifact(config, features, logger) { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; } + const failedSarif = failedSarifResult.value; + logger.info( + `Uploading failed SARIF file ${failedSarif.sarifFile} as artifact` + ); + const gitHubVersion = await getGitHubVersion(); + const client = await getArtifactUploaderClient(logger, gitHubVersion.type); + const suffix = getArtifactSuffix(getOptionalInput("matrix")); + const name = sanitizeArtifactName(`sarif-artifact-${suffix}`); + await client.uploadArtifact( + name, + [import_path3.default.normalize(failedSarif.sarifFile)], + import_path3.default.normalize("..") + ); + return { sarifID: name }; +} +async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { return { upload_failed_run_skipped_because: "Analyze Action completed successfully" }; } try { - return await maybeUploadFailedSarif( - config, - repositoryNwo, - features, - logger - ); + if (isCodeScanningEnabled(config)) { + return await maybeUploadFailedSarif( + config, + repositoryNwo, + features, + logger + ); + } else if (isRiskAssessmentEnabled(config)) { + return await maybeUploadFailedSarifArtifact(config, features, logger); + } else { + return { + upload_failed_run_skipped_because: "No analysis kind that supports failed SARIF uploads is enabled." + }; + } } catch (e) { logger.debug( `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}` @@ -169820,7 +169922,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger return createFailedUploadFailedSarifResult(e); } } -async function run(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, config, repositoryNwo, features, logger) { +async function uploadFailureInfo(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, config, repositoryNwo, features, logger) { await recordOverlayStatus(codeql, config, features, logger); const uploadFailedSarifResult = await tryUploadSarifIfRunFailed( config, @@ -169830,7 +169932,7 @@ async function run(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, co ); if (uploadFailedSarifResult.upload_failed_run_skipped_because) { logger.debug( - `Won't upload a failed SARIF file for this CodeQL code scanning run because: ${uploadFailedSarifResult.upload_failed_run_skipped_because}.` + `Won't upload a failed SARIF file for this CodeQL analysis because: ${uploadFailedSarifResult.upload_failed_run_skipped_because}.` ); } if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true" && !uploadFailedSarifResult.raw_upload_size_bytes) { @@ -170205,7 +170307,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error } // src/init-action-post.ts -async function run2(startedAt) { +async function run(startedAt) { const logger = getActionsLogger(); let config; let uploadFailedSarifResult; @@ -170228,7 +170330,7 @@ async function run2(startedAt) { ); } else { const codeql = await getCodeQL(config.codeQLCmd); - uploadFailedSarifResult = await run( + uploadFailedSarifResult = await uploadFailureInfo( tryUploadAllAvailableDebugArtifacts, printDebugLogs, codeql, @@ -170312,7 +170414,7 @@ async function runWrapper() { const startedAt = /* @__PURE__ */ new Date(); const logger = getActionsLogger(); try { - await run2(startedAt); + await run(startedAt); } catch (error3) { core16.setFailed(`init post action failed: ${wrapError(error3).message}`); await sendUnhandledErrorStatusReport( diff --git a/src/config-utils.ts b/src/config-utils.ts index 3b23a12bd7..86f784e3a3 100644 --- a/src/config-utils.ts +++ b/src/config-utils.ts @@ -1470,6 +1470,13 @@ export function isCodeQualityEnabled(config: Config): boolean { return config.analysisKinds.includes(AnalysisKind.CodeQuality); } +/** + * Returns `true` if Code Scanning Risk Assessment analysis is enabled, or `false` if not. + */ +export function isRiskAssessmentEnabled(config: Config): boolean { + return config.analysisKinds.includes(AnalysisKind.RiskAssessment); +} + /** * Returns the primary analysis kind that the Action is initialised with. If there is only * one analysis kind, then that is returned. diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index 1ddb702872..e5f1a3831c 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -1,10 +1,13 @@ +import * as core from "@actions/core"; import test, { ExecutionContext } from "ava"; import * as sinon from "sinon"; import * as actionsUtil from "./actions-util"; import { AnalysisKind } from "./analyses"; +import * as apiClient from "./api-client"; import * as codeql from "./codeql"; import * as configUtils from "./config-utils"; +import * as debugArtifacts from "./debug-artifacts"; import { EnvVar } from "./environment"; import { Feature } from "./feature-flags"; import * as initActionPostHelper from "./init-action-post-helper"; @@ -16,6 +19,7 @@ import { createFeatures, createTestConfig, makeVersionInfo, + RecordingLogger, setupTests, } from "./testing-utils"; import * as uploadLib from "./upload-lib"; @@ -45,7 +49,7 @@ test("init-post action with debug mode off", async (t) => { const uploadAllAvailableDebugArtifactsSpy = sinon.spy(); const printDebugLogsSpy = sinon.spy(); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( uploadAllAvailableDebugArtifactsSpy, printDebugLogsSpy, codeql.createStubCodeQL({}), @@ -68,7 +72,7 @@ test("init-post action with debug mode on", async (t) => { const uploadAllAvailableDebugArtifactsSpy = sinon.spy(); const printDebugLogsSpy = sinon.spy(); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( uploadAllAvailableDebugArtifactsSpy, printDebugLogsSpy, codeql.createStubCodeQL({}), @@ -309,7 +313,7 @@ test("not uploading failed SARIF when `code-scanning` is not an enabled analysis }); t.is( result.upload_failed_run_skipped_because, - "Code Scanning is not enabled.", + "No analysis kind that supports failed SARIF uploads is enabled.", ); }); @@ -332,7 +336,7 @@ test("saves overlay status when overlay-base analysis did not complete successfu const stubCodeQL = codeql.createStubCodeQL({}); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), stubCodeQL, @@ -392,7 +396,7 @@ test("does not save overlay status when OverlayAnalysisStatusSave feature flag i .stub(overlayStatus, "saveOverlayStatus") .resolves(true); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), codeql.createStubCodeQL({}), @@ -429,7 +433,7 @@ test("does not save overlay status when build successful", async (t) => { .stub(overlayStatus, "saveOverlayStatus") .resolves(true); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), codeql.createStubCodeQL({}), @@ -465,7 +469,7 @@ test("does not save overlay status when overlay not enabled", async (t) => { .stub(overlayStatus, "saveOverlayStatus") .resolves(true); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), codeql.createStubCodeQL({}), @@ -622,3 +626,191 @@ async function testFailedSarifUpload( } return result; } + +const singleLanguageMatrix = JSON.stringify({ + language: "javascript", + category: "/language:javascript", + "build-mode": "none", + runner: "ubuntu-latest", +}); + +async function mockRiskAssessmentEnv(matrix: string) { + process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] = "false"; + process.env["GITHUB_JOB"] = "analyze"; + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + process.env["GITHUB_WORKSPACE"] = + "/home/runner/work/codeql-action-fake-repository/codeql-action-fake-repository"; + + sinon + .stub(apiClient, "getGitHubVersion") + .resolves({ type: util.GitHubVariant.GHES, version: "3.0.0" }); + + const codeqlObject = await codeql.getCodeQLForTesting(); + const databaseExportDiagnostics = sinon + .stub(codeqlObject, "databaseExportDiagnostics") + .resolves(); + const diagnosticsExport = sinon + .stub(codeqlObject, "diagnosticsExport") + .resolves(); + + sinon.stub(codeql, "getCodeQL").resolves(codeqlObject); + + sinon.stub(core, "getInput").withArgs("matrix").returns(matrix); + + const uploadArtifact = sinon.stub().resolves(); + const artifactClient = { uploadArtifact }; + sinon + .stub(debugArtifacts, "getArtifactUploaderClient") + .value(() => artifactClient); + + return { uploadArtifact, databaseExportDiagnostics, diagnosticsExport }; +} + +test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (diagnosticsExport)", async (t) => { + const logger = new RecordingLogger(); + const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); + + const config = createTestConfig({ + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript"], + }); + const features = createFeatures([]); + + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + config, + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + const expectedName = debugArtifacts.sanitizeArtifactName( + `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, + ); + const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; + t.is(result.upload_failed_run_skipped_because, undefined); + t.is(result.upload_failed_run_error, undefined); + t.is(result.sarifID, expectedName); + t.assert( + uploadArtifact.calledOnceWith( + expectedName, + [sinon.match(expectedFilePattern)], + sinon.match.string, + ), + ); + t.assert(databaseExportDiagnostics.notCalled); + t.assert( + diagnosticsExport.calledOnceWith( + sinon.match(expectedFilePattern), + "/language:javascript", + config, + ), + ); +}); + +test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (databaseExportDiagnostics)", async (t) => { + const logger = new RecordingLogger(); + const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); + + const dbLocation = "/some/path"; + const config = createTestConfig({ + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript"], + dbLocation: "/some/path", + }); + const features = createFeatures([Feature.ExportDiagnosticsEnabled]); + + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + config, + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + const expectedName = debugArtifacts.sanitizeArtifactName( + `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, + ); + const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; + t.is(result.upload_failed_run_skipped_because, undefined); + t.is(result.upload_failed_run_error, undefined); + t.is(result.sarifID, expectedName); + t.assert( + uploadArtifact.calledOnceWith( + expectedName, + [sinon.match(expectedFilePattern)], + sinon.match.string, + ), + ); + t.assert(diagnosticsExport.notCalled); + t.assert( + databaseExportDiagnostics.calledOnceWith( + dbLocation, + sinon.match(expectedFilePattern), + "/language:javascript", + ), + ); +}); + +const skippedUploadTest = test.macro({ + exec: async ( + t: ExecutionContext, + config: Partial, + expectedSkippedReason: string, + ) => { + const logger = new RecordingLogger(); + const { uploadArtifact, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); + const features = createFeatures([]); + + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + createTestConfig(config), + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + t.is(result.upload_failed_run_skipped_because, expectedSkippedReason); + t.assert(uploadArtifact.notCalled); + t.assert(diagnosticsExport.notCalled); + }, + + title: (providedTitle: string = "") => + `tryUploadSarifIfRunFailed - skips upload ${providedTitle}`, +}); + +test( + "without CodeQL command", + skippedUploadTest, + // No codeQLCmd + { + analysisKinds: [AnalysisKind.RiskAssessment], + languages: ["javascript"], + } satisfies Partial, + "CodeQL command not found", +); + +test( + "if no language is configured", + skippedUploadTest, + // No explicit language configuration + { + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + } satisfies Partial, + "Unexpectedly, the configuration is not for a single language.", +); + +test( + "if multiple languages is configured", + skippedUploadTest, + // Multiple explicit languages configured + { + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript", "python"], + } satisfies Partial, + "Unexpectedly, the configuration is not for a single language.", +); diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 7cbf20d5db..6fe38f3bb8 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -1,12 +1,22 @@ import * as fs from "fs"; +import path from "path"; import * as github from "@actions/github"; import * as actionsUtil from "./actions-util"; -import { CodeScanning } from "./analyses"; -import { getApiClient } from "./api-client"; +import { CodeScanning, RiskAssessment } from "./analyses"; +import { getApiClient, getGitHubVersion } from "./api-client"; import { CodeQL, getCodeQL } from "./codeql"; -import { Config, isCodeScanningEnabled } from "./config-utils"; +import { + Config, + isCodeScanningEnabled, + isRiskAssessmentEnabled, +} from "./config-utils"; +import { + getArtifactSuffix, + getArtifactUploaderClient, + sanitizeArtifactName, +} from "./debug-artifacts"; import * as dependencyCaching from "./dependency-caching"; import { EnvVar } from "./environment"; import { Feature, FeatureEnablement } from "./feature-flags"; @@ -19,10 +29,13 @@ import * as uploadLib from "./upload-lib"; import { checkDiskUsage, delay, + Failure, getErrorMessage, getRequiredEnvParam, parseMatrixInput, + Result, shouldSkipSarifUpload, + Success, wrapError, } from "./util"; import { @@ -62,37 +75,96 @@ function createFailedUploadFailedSarifResult( }; } +/** Records details about a SARIF file that can contains information about a failed analysis. */ +interface FailedSarifInfo { + sarifFile: string; + category: string | undefined; + checkoutPath: string; +} + /** - * Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF - * category for the workflow. + * Tries to prepare a SARIF file that can contains information about a failed analysis. + * + * @returns Either information about the SARIF file that was produced, or a reason why it couldn't be produced. */ -async function maybeUploadFailedSarif( - config: Config, - repositoryNwo: RepositoryNwo, - features: FeatureEnablement, +async function prepareFailedSarif( logger: Logger, -): Promise { + features: FeatureEnablement, + config: Config, +): Promise> { if (!config.codeQLCmd) { - return { upload_failed_run_skipped_because: "CodeQL command not found" }; + return new Failure({ + upload_failed_run_skipped_because: "CodeQL command not found", + }); } - const workflow = await getWorkflow(logger); const jobName = getRequiredEnvParam("GITHUB_JOB"); const matrix = parseMatrixInput(actionsUtil.getRequiredInput("matrix")); - const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); - if ( - !["always", "failure-only"].includes( - actionsUtil.getUploadValue(shouldUpload), - ) || - shouldSkipSarifUpload() - ) { - return { upload_failed_run_skipped_because: "SARIF upload is disabled" }; + + if (shouldSkipSarifUpload()) { + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled", + }); } - const category = getCategoryInputOrThrow(workflow, jobName, matrix); - const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); - const databasePath = config.dbLocation; + if (isRiskAssessmentEnabled(config)) { + if (config.languages.length !== 1) { + return new Failure({ + upload_failed_run_skipped_because: + "Unexpectedly, the configuration is not for a single language.", + }); + } + + // We can make these assumptions for risk assessments. + const language = config.languages[0]; + const category = `/language:${language}`; + const checkoutPath = "."; + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath, + `../codeql-failed-sarif-${language}${RiskAssessment.sarifExtension}`, + ); + return new Success(result); + } else { + const workflow = await getWorkflow(logger); + const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); + if ( + !["always", "failure-only"].includes( + actionsUtil.getUploadValue(shouldUpload), + ) + ) { + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled", + }); + } + const category = getCategoryInputOrThrow(workflow, jobName, matrix); + const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath, + ); + return new Success(result); + } +} + +async function generateFailedSarif( + features: FeatureEnablement, + config: Config, + category: string | undefined, + checkoutPath: string, + sarifFile?: string, +) { + const databasePath = config.dbLocation; const codeql = await getCodeQL(config.codeQLCmd); - const sarifFile = "../codeql-failed-run.sarif"; + + // Set the filename for the SARIF file if not already set. + if (sarifFile === undefined) { + sarifFile = "../codeql-failed-run.sarif"; + } // If there is no database or the feature flag is off, we run 'export diagnostics' if ( @@ -105,11 +177,32 @@ async function maybeUploadFailedSarif( await codeql.databaseExportDiagnostics(databasePath, sarifFile, category); } - logger.info(`Uploading failed SARIF file ${sarifFile}`); + return { sarifFile, category, checkoutPath }; +} + +/** + * Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF + * category for the workflow. + */ +async function maybeUploadFailedSarif( + config: Config, + repositoryNwo: RepositoryNwo, + features: FeatureEnablement, + logger: Logger, +): Promise { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + + const failedSarif = failedSarifResult.value; + + logger.info(`Uploading failed SARIF file ${failedSarif.sarifFile}`); const uploadResult = await uploadLib.uploadFiles( - sarifFile, - checkoutPath, - category, + failedSarif.sarifFile, + failedSarif.checkoutPath, + failedSarif.category, features, logger, CodeScanning, @@ -125,31 +218,78 @@ async function maybeUploadFailedSarif( : {}; } +/** Uploads a failed SARIF file as workflow artifact, if it can be generated. */ +async function maybeUploadFailedSarifArtifact( + config: Config, + features: FeatureEnablement, + logger: Logger, +): Promise { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + + const failedSarif = failedSarifResult.value; + + logger.info( + `Uploading failed SARIF file ${failedSarif.sarifFile} as artifact`, + ); + + const gitHubVersion = await getGitHubVersion(); + const client = await getArtifactUploaderClient(logger, gitHubVersion.type); + + const suffix = getArtifactSuffix(actionsUtil.getOptionalInput("matrix")); + const name = sanitizeArtifactName(`sarif-artifact-${suffix}`); + await client.uploadArtifact( + name, + [path.normalize(failedSarif.sarifFile)], + path.normalize(".."), + ); + + return { sarifID: name }; +} + +/** + * Tries to upload a SARIF file with information about the run, if it failed. + * + * @param config The CodeQL Action configuration. + * @param repositoryNwo The name and owner of the repository. + * @param features Information about enabled features. + * @param logger The logger to use. + * @returns The results of uploading the SARIF file for the failure. + */ export async function tryUploadSarifIfRunFailed( config: Config, repositoryNwo: RepositoryNwo, features: FeatureEnablement, logger: Logger, ): Promise { - // Only upload the failed SARIF to Code scanning if Code scanning is enabled. - if (!isCodeScanningEnabled(config)) { - return { - upload_failed_run_skipped_because: "Code Scanning is not enabled.", - }; - } + // There's nothing to do here if the analysis succeeded. if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true") { return { upload_failed_run_skipped_because: "Analyze Action completed successfully", }; } + try { - return await maybeUploadFailedSarif( - config, - repositoryNwo, - features, - logger, - ); + // Only upload the failed SARIF to Code scanning if Code scanning is enabled. + if (isCodeScanningEnabled(config)) { + return await maybeUploadFailedSarif( + config, + repositoryNwo, + features, + logger, + ); + } else if (isRiskAssessmentEnabled(config)) { + return await maybeUploadFailedSarifArtifact(config, features, logger); + } else { + return { + upload_failed_run_skipped_because: + "No analysis kind that supports failed SARIF uploads is enabled.", + }; + } } catch (e) { logger.debug( `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`, @@ -158,7 +298,21 @@ export async function tryUploadSarifIfRunFailed( } } -export async function run( +/** + * Handles the majority of the `post-init` step logic which, depending on the configuration, + * mainly involves uploading a SARIF file with information about the failued run, debug + * artifacts, and performing clean-up operations. + * + * @param uploadAllAvailableDebugArtifacts A function with which to upload debug artifacts. + * @param printDebugLogs A function with which to print debug logs. + * @param codeql The CodeQL CLI instance. + * @param config The CodeQL Action configuration. + * @param repositoryNwo The name and owner of the repository. + * @param features Information about enabled features. + * @param logger The logger to use. + * @returns The results of uploading the SARIF file for the failure. + */ +export async function uploadFailureInfo( uploadAllAvailableDebugArtifacts: ( codeql: CodeQL, config: Config, @@ -171,7 +325,7 @@ export async function run( repositoryNwo: RepositoryNwo, features: FeatureEnablement, logger: Logger, -) { +): Promise { await recordOverlayStatus(codeql, config, features, logger); const uploadFailedSarifResult = await tryUploadSarifIfRunFailed( @@ -183,7 +337,7 @@ export async function run( if (uploadFailedSarifResult.upload_failed_run_skipped_because) { logger.debug( - "Won't upload a failed SARIF file for this CodeQL code scanning run because: " + + "Won't upload a failed SARIF file for this CodeQL analysis because: " + `${uploadFailedSarifResult.upload_failed_run_skipped_because}.`, ); } diff --git a/src/init-action-post.ts b/src/init-action-post.ts index d5aab32f33..ed99701707 100644 --- a/src/init-action-post.ts +++ b/src/init-action-post.ts @@ -77,7 +77,7 @@ async function run(startedAt: Date) { } else { const codeql = await getCodeQL(config.codeQLCmd); - uploadFailedSarifResult = await initActionPostHelper.run( + uploadFailedSarifResult = await initActionPostHelper.uploadFailureInfo( debugArtifacts.tryUploadAllAvailableDebugArtifacts, printDebugLogs, codeql,