From 7cbb19ece7be845e75fd781baaaa53c9520b01c6 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 13:52:41 +0000 Subject: [PATCH 1/6] Refactor `minimalInitCodeQL` out of `combineSarifFilesUsingCLI` --- lib/analyze-action.js | 51 +++++++++++++++-------------- lib/init-action-post.js | 51 +++++++++++++++-------------- lib/upload-lib.js | 53 ++++++++++++++++-------------- lib/upload-sarif-action.js | 51 +++++++++++++++-------------- src/upload-lib.ts | 67 +++++++++++++++++++++----------------- 5 files changed, 144 insertions(+), 129 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index d97d19c515..39ba407e75 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -112327,6 +112327,31 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } return true; } +async function minimalInitCodeQL(logger, gitHubVersion, features) { + logger.info( + "Initializing CodeQL since the 'init' Action was not called before this step." + ); + const apiDetails = { + auth: getRequiredInput("token"), + externalRepoAuth: getOptionalInput("external-repository-token"), + url: getRequiredEnvParam("GITHUB_SERVER_URL"), + apiURL: getRequiredEnvParam("GITHUB_API_URL") + }; + const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( + gitHubVersion.type + ); + const initCodeQLResult = await initCodeQL( + void 0, + // There is no tools input on the upload action + apiDetails, + getTemporaryDirectory(), + gitHubVersion.type, + codeQLDefaultVersionInfo, + features, + logger + ); + return initCodeQLResult.codeql; +} async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { @@ -112357,31 +112382,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = await getCodeQL(config.codeQLCmd); tempDir = config.tempDir; } else { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step." - ); - const apiDetails = { - auth: getRequiredInput("token"), - externalRepoAuth: getOptionalInput( - "external-repository-token" - ), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL") - }; - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type - ); - const initCodeQLResult = await initCodeQL( - void 0, - // There is no tools input on the upload action - apiDetails, - tempDir, - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger - ); - codeQL = initCodeQLResult.codeql; + codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); } const baseTempDir = path14.resolve(tempDir, "combined-sarif"); fs15.mkdirSync(baseTempDir, { recursive: true }); diff --git a/lib/init-action-post.js b/lib/init-action-post.js index f4bc19ecf9..c5e72a6c58 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -168995,6 +168995,31 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } return true; } +async function minimalInitCodeQL(logger, gitHubVersion, features) { + logger.info( + "Initializing CodeQL since the 'init' Action was not called before this step." + ); + const apiDetails = { + auth: getRequiredInput("token"), + externalRepoAuth: getOptionalInput("external-repository-token"), + url: getRequiredEnvParam("GITHUB_SERVER_URL"), + apiURL: getRequiredEnvParam("GITHUB_API_URL") + }; + const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( + gitHubVersion.type + ); + const initCodeQLResult = await initCodeQL( + void 0, + // There is no tools input on the upload action + apiDetails, + getTemporaryDirectory(), + gitHubVersion.type, + codeQLDefaultVersionInfo, + features, + logger + ); + return initCodeQLResult.codeql; +} async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { @@ -169025,31 +169050,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = await getCodeQL(config.codeQLCmd); tempDir = config.tempDir; } else { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step." - ); - const apiDetails = { - auth: getRequiredInput("token"), - externalRepoAuth: getOptionalInput( - "external-repository-token" - ), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL") - }; - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type - ); - const initCodeQLResult = await initCodeQL( - void 0, - // There is no tools input on the upload action - apiDetails, - tempDir, - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger - ); - codeQL = initCodeQLResult.codeql; + codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); } const baseTempDir = path15.resolve(tempDir, "combined-sarif"); fs15.mkdirSync(baseTempDir, { recursive: true }); diff --git a/lib/upload-lib.js b/lib/upload-lib.js index fcec315ba3..984ecc5716 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -103114,6 +103114,7 @@ __export(upload_lib_exports, { buildPayload: () => buildPayload, findSarifFilesInDir: () => findSarifFilesInDir, getGroupedSarifFilePaths: () => getGroupedSarifFilePaths, + minimalInitCodeQL: () => minimalInitCodeQL, populateRunAutomationDetails: () => populateRunAutomationDetails, postProcessSarifFiles: () => postProcessSarifFiles, readSarifFile: () => readSarifFile, @@ -110218,6 +110219,31 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } return true; } +async function minimalInitCodeQL(logger, gitHubVersion, features) { + logger.info( + "Initializing CodeQL since the 'init' Action was not called before this step." + ); + const apiDetails = { + auth: getRequiredInput("token"), + externalRepoAuth: getOptionalInput("external-repository-token"), + url: getRequiredEnvParam("GITHUB_SERVER_URL"), + apiURL: getRequiredEnvParam("GITHUB_API_URL") + }; + const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( + gitHubVersion.type + ); + const initCodeQLResult = await initCodeQL( + void 0, + // There is no tools input on the upload action + apiDetails, + getTemporaryDirectory(), + gitHubVersion.type, + codeQLDefaultVersionInfo, + features, + logger + ); + return initCodeQLResult.codeql; +} async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { @@ -110248,31 +110274,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = await getCodeQL(config.codeQLCmd); tempDir = config.tempDir; } else { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step." - ); - const apiDetails = { - auth: getRequiredInput("token"), - externalRepoAuth: getOptionalInput( - "external-repository-token" - ), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL") - }; - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type - ); - const initCodeQLResult = await initCodeQL( - void 0, - // There is no tools input on the upload action - apiDetails, - tempDir, - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger - ); - codeQL = initCodeQLResult.codeql; + codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); } const baseTempDir = path11.resolve(tempDir, "combined-sarif"); fs11.mkdirSync(baseTempDir, { recursive: true }); @@ -110821,6 +110823,7 @@ function filterAlertsByDiffRange(logger, sarif) { buildPayload, findSarifFilesInDir, getGroupedSarifFilePaths, + minimalInitCodeQL, populateRunAutomationDetails, postProcessSarifFiles, readSarifFile, diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 071fe3b0ca..f4635e27da 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -110809,6 +110809,31 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } return true; } +async function minimalInitCodeQL(logger, gitHubVersion, features) { + logger.info( + "Initializing CodeQL since the 'init' Action was not called before this step." + ); + const apiDetails = { + auth: getRequiredInput("token"), + externalRepoAuth: getOptionalInput("external-repository-token"), + url: getRequiredEnvParam("GITHUB_SERVER_URL"), + apiURL: getRequiredEnvParam("GITHUB_API_URL") + }; + const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( + gitHubVersion.type + ); + const initCodeQLResult = await initCodeQL( + void 0, + // There is no tools input on the upload action + apiDetails, + getTemporaryDirectory(), + gitHubVersion.type, + codeQLDefaultVersionInfo, + features, + logger + ); + return initCodeQLResult.codeql; +} async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { @@ -110839,31 +110864,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = await getCodeQL(config.codeQLCmd); tempDir = config.tempDir; } else { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step." - ); - const apiDetails = { - auth: getRequiredInput("token"), - externalRepoAuth: getOptionalInput( - "external-repository-token" - ), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL") - }; - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type - ); - const initCodeQLResult = await initCodeQL( - void 0, - // There is no tools input on the upload action - apiDetails, - tempDir, - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger - ); - codeQL = initCodeQLResult.codeql; + codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); } const baseTempDir = path12.resolve(tempDir, "combined-sarif"); fs12.mkdirSync(baseTempDir, { recursive: true }); diff --git a/src/upload-lib.ts b/src/upload-lib.ts index 43039c596f..2c58beb2b7 100644 --- a/src/upload-lib.ts +++ b/src/upload-lib.ts @@ -11,7 +11,7 @@ import * as actionsUtil from "./actions-util"; import * as analyses from "./analyses"; import * as api from "./api-client"; import { getGitHubVersion, wrapApiConfigurationError } from "./api-client"; -import { CodeQL, getCodeQL } from "./codeql"; +import { getCodeQL, type CodeQL } from "./codeql"; import { getConfig } from "./config-utils"; import { readDiffRangesJsonFile } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; @@ -183,6 +183,42 @@ async function shouldDisableCombineSarifFiles( return true; } +/** + * Initialises a `CodeQL` instance that we can use to combine SARIF files. + */ +export async function minimalInitCodeQL( + logger: Logger, + gitHubVersion: GitHubVersion, + features: FeatureEnablement, +): Promise { + logger.info( + "Initializing CodeQL since the 'init' Action was not called before this step.", + ); + + const apiDetails = { + auth: actionsUtil.getRequiredInput("token"), + externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"), + url: getRequiredEnvParam("GITHUB_SERVER_URL"), + apiURL: getRequiredEnvParam("GITHUB_API_URL"), + }; + + const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( + gitHubVersion.type, + ); + + const initCodeQLResult = await initCodeQL( + undefined, // There is no tools input on the upload action + apiDetails, + actionsUtil.getTemporaryDirectory(), + gitHubVersion.type, + codeQLDefaultVersionInfo, + features, + logger, + ); + + return initCodeQLResult.codeql; +} + // Takes a list of paths to sarif files and combines them together using the // CLI `github merge-results` command when all SARIF files are produced by // CodeQL. Otherwise, it will fall back to combining the files in the action. @@ -239,34 +275,7 @@ async function combineSarifFilesUsingCLI( codeQL = await getCodeQL(config.codeQLCmd); tempDir = config.tempDir; } else { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step.", - ); - - const apiDetails = { - auth: actionsUtil.getRequiredInput("token"), - externalRepoAuth: actionsUtil.getOptionalInput( - "external-repository-token", - ), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL"), - }; - - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type, - ); - - const initCodeQLResult = await initCodeQL( - undefined, // There is no tools input on the upload action - apiDetails, - tempDir, - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger, - ); - - codeQL = initCodeQLResult.codeql; + codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); } const baseTempDir = path.resolve(tempDir, "combined-sarif"); From c59e24e20a62c525fdf86eb1c6102cf919c11090 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 14:36:30 +0000 Subject: [PATCH 2/6] Prevent `combineSarifFilesUsingCLI` initialising `CodeQL` instance more than once --- lib/analyze-action.js | 3150 +- lib/init-action-post.js | 5162 +- lib/upload-lib.js | 6500 +- lib/upload-sarif-action.js | 92375 +++++++++++++------------- src/analyze-action.ts | 2 + src/init-action-post-helper.test.ts | 2 + src/init-action-post-helper.ts | 2 + src/upload-lib.ts | 39 +- src/upload-sarif-action.ts | 45 +- src/upload-sarif.test.ts | 11 + src/upload-sarif.ts | 6 + 11 files changed, 52556 insertions(+), 54738 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 39ba407e75..9294a0560e 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -108,11 +108,11 @@ var require_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.issueCommand = issueCommand; exports2.issue = issue; - var os5 = __importStar2(require("os")); + var os4 = __importStar2(require("os")); var utils_1 = require_utils(); function issueCommand(command, properties, message) { const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os5.EOL); + process.stdout.write(cmd.toString() + os4.EOL); } function issue(name, message = "") { issueCommand(name, {}, message); @@ -204,18 +204,18 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto3 = __importStar2(require("crypto")); - var fs17 = __importStar2(require("fs")); - var os5 = __importStar2(require("os")); + var fs14 = __importStar2(require("fs")); + var os4 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs17.existsSync(filePath)) { + if (!fs14.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs17.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { + fs14.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { encoding: "utf8" }); } @@ -228,7 +228,7 @@ var require_file_command = __commonJS({ if (convertedValue.includes(delimiter)) { throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); } - return `${key}<<${delimiter}${os5.EOL}${convertedValue}${os5.EOL}${delimiter}`; + return `${key}<<${delimiter}${os4.EOL}${convertedValue}${os4.EOL}${delimiter}`; } } }); @@ -1228,7 +1228,7 @@ var require_util = __commonJS({ var assert = require("node:assert"); var { kDestroyed, kBodyUsed, kListeners, kBody } = require_symbols(); var { IncomingMessage } = require("node:http"); - var stream2 = require("node:stream"); + var stream = require("node:stream"); var net = require("node:net"); var { Blob: Blob2 } = require("node:buffer"); var nodeUtil = require("node:util"); @@ -1337,14 +1337,14 @@ var require_util = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path16 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path13 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path16 && path16[0] !== "/") { - path16 = `/${path16}`; + if (path13 && path13[0] !== "/") { + path13 = `/${path13}`; } - return new URL(`${origin}${path16}`); + return new URL(`${origin}${path13}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -1402,24 +1402,24 @@ var require_util = __commonJS({ return null; } function isDestroyed(body) { - return body && !!(body.destroyed || body[kDestroyed] || stream2.isDestroyed?.(body)); + return body && !!(body.destroyed || body[kDestroyed] || stream.isDestroyed?.(body)); } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { queueMicrotask(() => { - stream3.emit("error", err); + stream2.emit("error", err); }); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -1518,13 +1518,13 @@ var require_util = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed(body) || body[kBodyUsed])); + return !!(body && (stream.isDisturbed(body) || body[kBodyUsed])); } function isErrored(body) { - return !!(body && stream2.isErrored(body)); + return !!(body && stream.isErrored(body)); } function isReadable(body) { - return !!(body && stream2.isReadable(body)); + return !!(body && stream.isReadable(body)); } function getSocketInfo(socket) { return { @@ -1795,39 +1795,39 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path16, origin } + request: { method, path: path13, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path16); + debuglog("sending request to %s %s/%s", method, origin, path13); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path16, origin }, + request: { method, path: path13, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path16, + path13, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path16, origin } + request: { method, path: path13, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path16); + debuglog("trailers received from %s %s/%s", method, origin, path13); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path16, origin }, + request: { method, path: path13, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path16, + path13, error3.message ); }); @@ -1876,9 +1876,9 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path16, origin } + request: { method, path: path13, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path16); + debuglog("sending request to %s %s/%s", method, origin, path13); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -1941,7 +1941,7 @@ var require_request = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path16, + path: path13, method, body, headers, @@ -1956,11 +1956,11 @@ var require_request = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path16 !== "string") { + if (typeof path13 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path16[0] !== "/" && !(path16.startsWith("http://") || path16.startsWith("https://")) && method !== "CONNECT") { + } else if (path13[0] !== "/" && !(path13.startsWith("http://") || path13.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path16)) { + } else if (invalidPathRegex.test(path13)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -2023,7 +2023,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path16, query) : path16; + this.path = query ? buildURL(path13, query) : path13; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -2335,9 +2335,9 @@ var require_dispatcher_base = __commonJS({ } close(callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { this.close((err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -2375,12 +2375,12 @@ var require_dispatcher_base = __commonJS({ err = null; } if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { this.destroy(err, (err2, data) => { return err2 ? ( /* istanbul ignore next: should never error */ reject(err2) - ) : resolve8(data); + ) : resolve7(data); }); }); } @@ -4256,7 +4256,7 @@ var require_util2 = __commonJS({ var { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require_constants3(); var { getGlobalOrigin } = require_global(); var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url(); - var { performance: performance5 } = require("node:perf_hooks"); + var { performance: performance4 } = require("node:perf_hooks"); var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util(); var assert = require("node:assert"); var { isUint8Array } = require("node:util/types"); @@ -4415,7 +4415,7 @@ var require_util2 = __commonJS({ }; } function coarsenedSharedCurrentTime(crossOriginIsolatedCapability) { - return coarsenTime(performance5.now(), crossOriginIsolatedCapability); + return coarsenTime(performance4.now(), crossOriginIsolatedCapability); } function createOpaqueTimingInfo(timingInfo) { return { @@ -4647,8 +4647,8 @@ var require_util2 = __commonJS({ function createDeferredPromise() { let res; let rej; - const promise = new Promise((resolve8, reject) => { - res = resolve8; + const promise = new Promise((resolve7, reject) => { + res = resolve7; rej = reject; }); return { promise, resolve: res, reject: rej }; @@ -4813,8 +4813,8 @@ var require_util2 = __commonJS({ errorSteps(e); } } - function isReadableStreamLike(stream2) { - return stream2 instanceof ReadableStream || stream2[Symbol.toStringTag] === "ReadableStream" && typeof stream2.tee === "function"; + function isReadableStreamLike(stream) { + return stream instanceof ReadableStream || stream[Symbol.toStringTag] === "ReadableStream" && typeof stream.tee === "function"; } function readableStreamClose(controller) { try { @@ -5630,20 +5630,20 @@ var require_body = __commonJS({ var streamRegistry; if (hasFinalizationRegistry) { streamRegistry = new FinalizationRegistry((weakRef) => { - const stream2 = weakRef.deref(); - if (stream2 && !stream2.locked && !isDisturbed(stream2) && !isErrored(stream2)) { - stream2.cancel("Response object has been garbage collected").catch(noop3); + const stream = weakRef.deref(); + if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) { + stream.cancel("Response object has been garbage collected").catch(noop3); } }); } function extractBody(object, keepalive = false) { - let stream2 = null; + let stream = null; if (object instanceof ReadableStream) { - stream2 = object; + stream = object; } else if (isBlobLike(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream({ + stream = new ReadableStream({ async pull(controller) { const buffer = typeof source === "string" ? textEncoder.encode(source) : source; if (buffer.byteLength) { @@ -5656,7 +5656,7 @@ var require_body = __commonJS({ type: "bytes" }); } - assert(isReadableStreamLike(stream2)); + assert(isReadableStreamLike(stream)); let action = null; let source = null; let length = null; @@ -5735,14 +5735,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = object instanceof ReadableStream ? object : ReadableStreamFrom(object); + stream = object instanceof ReadableStream ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator2; - stream2 = new ReadableStream({ + stream = new ReadableStream({ async start() { iterator2 = action(object)[Symbol.asyncIterator](); }, @@ -5754,7 +5754,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.byobRequest?.respond(0); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { const buffer = new Uint8Array(value); if (buffer.byteLength) { controller.enqueue(buffer); @@ -5769,7 +5769,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: "bytes" }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type2]; } function safelyExtractBody(object, keepalive = false) { @@ -6536,7 +6536,7 @@ var require_client_h1 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path16, host, upgrade, blocking, reset } = request2; + const { method, path: path13, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -6602,7 +6602,7 @@ var require_client_h1 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path16} HTTP/1.1\r + let header = `${method} ${path13} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -6789,12 +6789,12 @@ upgrade: ${upgrade}\r cb(); } } - const waitForDrain = () => new Promise((resolve8, reject) => { + const waitForDrain = () => new Promise((resolve7, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve8; + callback = resolve7; } }); socket.on("close", onDrain).on("drain", onDrain); @@ -7128,7 +7128,7 @@ var require_client_h2 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path16, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path13, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -7150,7 +7150,7 @@ var require_client_h2 = __commonJS({ headers[key] = val; } } - let stream2; + let stream; const { hostname, port } = client[kUrl]; headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; @@ -7160,8 +7160,8 @@ var require_client_h2 = __commonJS({ } err = err || new RequestAbortedError(); util.errorRequest(client, request2, err); - if (stream2 != null) { - util.destroy(stream2, err); + if (stream != null) { + util.destroy(stream, err); } util.destroy(body, err); client[kQueue][client[kRunningIdx]++] = null; @@ -7177,25 +7177,25 @@ var require_client_h2 = __commonJS({ } if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (stream2.id && !stream2.pending) { - request2.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (stream.id && !stream.pending) { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; } else { - stream2.once("ready", () => { - request2.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; }); } - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) session.unref(); }); return true; } - headers[HTTP2_HEADER_PATH] = path16; + headers[HTTP2_HEADER_PATH] = path13; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -7230,36 +7230,36 @@ var require_client_h2 = __commonJS({ const shouldEndStream = method === "GET" || method === "HEAD" || body === null; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++session[kOpenStreams]; - stream2.once("response", (headers2) => { + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; request2.onResponseStarted(); if (request2.aborted) { const err = new RequestAbortedError(); util.errorRequest(client, request2, err); - util.destroy(stream2, err); + util.destroy(stream, err); return; } - if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { + stream.pause(); } - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request2.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); }); - stream2.once("end", () => { - if (stream2.state?.state == null || stream2.state.state < 6) { + stream.once("end", () => { + if (stream.state?.state == null || stream.state.state < 6) { request2.onComplete([]); } if (session[kOpenStreams] === 0) { @@ -7270,16 +7270,16 @@ var require_client_h2 = __commonJS({ client[kPendingIdx] = client[kRunningIdx]; client[kResume](); }); - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) { session.unref(); } }); - stream2.once("error", function(err) { + stream.once("error", function(err) { abort(err); }); - stream2.once("frameError", (type2, code) => { + stream.once("frameError", (type2, code) => { abort(new InformationalError(`HTTP/2: "frameError" received - type ${type2}, code ${code}`)); }); return true; @@ -7287,7 +7287,7 @@ var require_client_h2 = __commonJS({ if (!body || contentLength === 0) { writeBuffer( abort, - stream2, + stream, null, client, request2, @@ -7298,7 +7298,7 @@ var require_client_h2 = __commonJS({ } else if (util.isBuffer(body)) { writeBuffer( abort, - stream2, + stream, body, client, request2, @@ -7310,7 +7310,7 @@ var require_client_h2 = __commonJS({ if (typeof body.stream === "function") { writeIterable( abort, - stream2, + stream, body.stream(), client, request2, @@ -7321,7 +7321,7 @@ var require_client_h2 = __commonJS({ } else { writeBlob( abort, - stream2, + stream, body, client, request2, @@ -7335,7 +7335,7 @@ var require_client_h2 = __commonJS({ abort, client[kSocket], expectsPayload, - stream2, + stream, body, client, request2, @@ -7344,7 +7344,7 @@ var require_client_h2 = __commonJS({ } else if (util.isIterable(body)) { writeIterable( abort, - stream2, + stream, body, client, request2, @@ -7431,12 +7431,12 @@ var require_client_h2 = __commonJS({ cb(); } } - const waitForDrain = () => new Promise((resolve8, reject) => { + const waitForDrain = () => new Promise((resolve7, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve8; + callback = resolve7; } }); h2stream.on("close", onDrain).on("drain", onDrain); @@ -7548,9 +7548,9 @@ var require_redirect_handler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path16 = search ? `${pathname}${search}` : pathname; + const path13 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path16; + this.opts.path = path13; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7913,16 +7913,16 @@ var require_client = __commonJS({ return this[kNeedDrain] < 2; } async [kClose]() { - return new Promise((resolve8) => { + return new Promise((resolve7) => { if (this[kSize]) { - this[kClosedResolve] = resolve8; + this[kClosedResolve] = resolve7; } else { - resolve8(null); + resolve7(null); } }); } async [kDestroy](err) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i = 0; i < requests.length; i++) { const request2 = requests[i]; @@ -7933,7 +7933,7 @@ var require_client = __commonJS({ this[kClosedResolve](); this[kClosedResolve] = null; } - resolve8(null); + resolve7(null); }; if (this[kHTTPContext]) { this[kHTTPContext].destroy(err, callback); @@ -7984,7 +7984,7 @@ var require_client = __commonJS({ }); } try { - const socket = await new Promise((resolve8, reject) => { + const socket = await new Promise((resolve7, reject) => { client[kConnector]({ host, hostname, @@ -7996,7 +7996,7 @@ var require_client = __commonJS({ if (err) { reject(err); } else { - resolve8(socket2); + resolve7(socket2); } }); }); @@ -8332,8 +8332,8 @@ var require_pool_base = __commonJS({ if (this[kQueue].isEmpty()) { await Promise.all(this[kClients].map((c) => c.close())); } else { - await new Promise((resolve8) => { - this[kClosedResolve] = resolve8; + await new Promise((resolve7) => { + this[kClosedResolve] = resolve7; }); } } @@ -8784,10 +8784,10 @@ var require_proxy_agent = __commonJS({ }; const { origin, - path: path16 = "/", + path: path13 = "/", headers = {} } = opts; - opts.path = origin + path16; + opts.path = origin + path13; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -9421,7 +9421,7 @@ var require_readable = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/api/readable.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require("node:stream"); + var { Readable } = require("node:stream"); var { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require_errors(); var util = require_util(); var { ReadableStreamFrom } = require_util(); @@ -9433,7 +9433,7 @@ var require_readable = __commonJS({ var kContentLength = /* @__PURE__ */ Symbol("kContentLength"); var noop3 = () => { }; - var BodyReadable = class extends Readable2 { + var BodyReadable = class extends Readable { constructor({ resume, abort, @@ -9548,7 +9548,7 @@ var require_readable = __commonJS({ if (this._readableState.closeEmitted) { return null; } - return await new Promise((resolve8, reject) => { + return await new Promise((resolve7, reject) => { if (this[kContentLength] > limit) { this.destroy(new AbortError()); } @@ -9561,7 +9561,7 @@ var require_readable = __commonJS({ if (signal?.aborted) { reject(signal.reason ?? new AbortError()); } else { - resolve8(null); + resolve7(null); } }).on("error", noop3).on("data", function(chunk) { limit -= chunk.length; @@ -9578,13 +9578,13 @@ var require_readable = __commonJS({ function isUnusable(self2) { return util.isDisturbed(self2) || isLocked(self2); } - async function consume(stream2, type2) { - assert(!stream2[kConsume]); - return new Promise((resolve8, reject) => { - if (isUnusable(stream2)) { - const rState = stream2._readableState; + async function consume(stream, type2) { + assert(!stream[kConsume]); + return new Promise((resolve7, reject) => { + if (isUnusable(stream)) { + const rState = stream._readableState; if (rState.destroyed && rState.closeEmitted === false) { - stream2.on("error", (err) => { + stream.on("error", (err) => { reject(err); }).on("close", () => { reject(new TypeError("unusable")); @@ -9594,22 +9594,22 @@ var require_readable = __commonJS({ } } else { queueMicrotask(() => { - stream2[kConsume] = { + stream[kConsume] = { type: type2, - stream: stream2, - resolve: resolve8, + stream, + resolve: resolve7, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - consumeStart(stream2[kConsume]); + consumeStart(stream[kConsume]); }); } }); @@ -9667,22 +9667,22 @@ var require_readable = __commonJS({ return buffer; } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve8, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve7, stream, length } = consume2; try { if (type2 === "text") { - resolve8(chunksDecode(body, length)); + resolve7(chunksDecode(body, length)); } else if (type2 === "json") { - resolve8(JSON.parse(chunksDecode(body, length))); + resolve7(JSON.parse(chunksDecode(body, length))); } else if (type2 === "arrayBuffer") { - resolve8(chunksConcat(body, length).buffer); + resolve7(chunksConcat(body, length).buffer); } else if (type2 === "blob") { - resolve8(new Blob(body, { type: stream2[kContentType] })); + resolve7(new Blob(body, { type: stream[kContentType] })); } else if (type2 === "bytes") { - resolve8(chunksConcat(body, length)); + resolve7(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -9775,7 +9775,7 @@ var require_api_request = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/api/api-request.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require_readable(); + var { Readable } = require_readable(); var { InvalidArgumentError, RequestAbortedError } = require_errors(); var util = require_util(); var { getResolveErrorBodyCallback } = require_util3(); @@ -9870,7 +9870,7 @@ var require_api_request = __commonJS({ const parsedHeaders = responseHeaders === "raw" ? util.parseHeaders(rawHeaders) : headers; const contentType = parsedHeaders["content-type"]; const contentLength = parsedHeaders["content-length"]; - const res = new Readable2({ + const res = new Readable({ resume, abort, contentType, @@ -9935,9 +9935,9 @@ var require_api_request = __commonJS({ }; function request2(opts, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { request2.call(this, opts, (err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -10158,11 +10158,11 @@ var require_api_stream = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { - stream2.call(this, opts, factory, (err, data) => { - return err ? reject(err) : resolve8(data); + return new Promise((resolve7, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve7(data); }); }); } @@ -10176,7 +10176,7 @@ var require_api_stream = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -10185,7 +10185,7 @@ var require_api_pipeline = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/api/api-pipeline.js"(exports2, module2) { "use strict"; var { - Readable: Readable2, + Readable, Duplex, PassThrough } = require("node:stream"); @@ -10199,7 +10199,7 @@ var require_api_pipeline = __commonJS({ var { addSignal, removeSignal } = require_abort_signal(); var assert = require("node:assert"); var kResume = /* @__PURE__ */ Symbol("resume"); - var PipelineRequest = class extends Readable2 { + var PipelineRequest = class extends Readable { constructor() { super({ autoDestroy: true }); this[kResume] = null; @@ -10216,7 +10216,7 @@ var require_api_pipeline = __commonJS({ callback(err); } }; - var PipelineResponse = class extends Readable2 { + var PipelineResponse = class extends Readable { constructor(resume) { super({ autoDestroy: true }); this[kResume] = resume; @@ -10447,9 +10447,9 @@ var require_api_upgrade = __commonJS({ }; function upgrade(opts, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { upgrade.call(this, opts, (err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -10541,9 +10541,9 @@ var require_api_connect = __commonJS({ }; function connect(opts, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { connect.call(this, opts, (err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -10708,20 +10708,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path16) { - if (typeof path16 !== "string") { - return path16; + function safeUrl(path13) { + if (typeof path13 !== "string") { + return path13; } - const pathSegments = path16.split("?"); + const pathSegments = path13.split("?"); if (pathSegments.length !== 2) { - return path16; + return path13; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path16, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path16); + function matchKey(mockDispatch2, { path: path13, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path13); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10743,7 +10743,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path16 }) => matchValue(safeUrl(path16), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path13 }) => matchValue(safeUrl(path13), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10781,9 +10781,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path16, method, body, headers, query } = opts; + const { path: path13, method, body, headers, query } = opts; return { - path: path16, + path: path13, method, body, headers, @@ -11246,10 +11246,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path16, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path13, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path16, + Path: path13, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -13541,7 +13541,7 @@ var require_fetch = __commonJS({ subresourceSet } = require_constants3(); var EE = require("node:events"); - var { Readable: Readable2, pipeline, finished } = require("node:stream"); + var { Readable, pipeline, finished } = require("node:stream"); var { addAbortListener, isErrored, isReadable, bufferToLowerCasedHeaderName } = require_util(); var { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = require_data_url(); var { getGlobalDispatcher } = require_global2(); @@ -14326,7 +14326,7 @@ var require_fetch = __commonJS({ fetchParams.controller.abort(reason); } }; - const stream2 = new ReadableStream( + const stream = new ReadableStream( { async start(controller) { fetchParams.controller.controller = controller; @@ -14340,7 +14340,7 @@ var require_fetch = __commonJS({ type: "bytes" } ); - response.body = { stream: stream2, source: null, length: null }; + response.body = { stream, source: null, length: null }; fetchParams.controller.onAborted = onAborted; fetchParams.controller.on("terminated", onAborted); fetchParams.controller.resume = async () => { @@ -14375,7 +14375,7 @@ var require_fetch = __commonJS({ if (buffer.byteLength) { fetchParams.controller.controller.enqueue(buffer); } - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -14387,13 +14387,13 @@ var require_fetch = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -14405,7 +14405,7 @@ var require_fetch = __commonJS({ function dispatch({ body }) { const url2 = requestCurrentURL(request2); const agent = fetchParams.controller.dispatcher; - return new Promise((resolve8, reject) => agent.dispatch( + return new Promise((resolve7, reject) => agent.dispatch( { path: url2.pathname + url2.search, origin: url2.origin, @@ -14442,7 +14442,7 @@ var require_fetch = __commonJS({ headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } location = headersList.get("location", true); - this.body = new Readable2({ read: resume }); + this.body = new Readable({ read: resume }); const decoders = []; const willFollow = location && request2.redirect === "follow" && redirectStatusSet.has(status); if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { @@ -14481,7 +14481,7 @@ var require_fetch = __commonJS({ } } const onError = this.onError.bind(this); - resolve8({ + resolve7({ status, statusText, headersList, @@ -14527,7 +14527,7 @@ var require_fetch = __commonJS({ for (let i = 0; i < rawHeaders.length; i += 2) { headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } - resolve8({ + resolve7({ status, statusText: STATUS_CODES[status], headersList, @@ -14946,8 +14946,8 @@ var require_util4 = __commonJS({ fr[kState] = "loading"; fr[kResult] = null; fr[kError] = null; - const stream2 = blob.stream(); - const reader = stream2.getReader(); + const stream = blob.stream(); + const reader = stream.getReader(); const bytes = []; let chunkPromise = reader.read(); let isFirstChunk = true; @@ -15606,8 +15606,8 @@ var require_cache = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -16130,9 +16130,9 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path16) { - for (let i = 0; i < path16.length; ++i) { - const code = path16.charCodeAt(i); + function validateCookiePath(path13) { + for (let i = 0; i < path13.length; ++i) { + const code = path13.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -17031,7 +17031,7 @@ var require_frame = __commonJS({ } catch { crypto3 = { // not full compatibility, but minimum. - randomFillSync: function randomFillSync2(buffer2, _offset, _size) { + randomFillSync: function randomFillSync(buffer2, _offset, _size) { for (let i = 0; i < buffer2.length; ++i) { buffer2[i] = Math.random() * 255 | 0; } @@ -18120,8 +18120,8 @@ var require_util8 = __commonJS({ return true; } function delay2(ms) { - return new Promise((resolve8) => { - setTimeout(resolve8, ms).unref(); + return new Promise((resolve7) => { + setTimeout(resolve7, ms).unref(); }); } module2.exports = { @@ -18726,11 +18726,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path16 = opts.path; + let path13 = opts.path; if (!opts.path.startsWith("/")) { - path16 = `/${path16}`; + path13 = `/${path13}`; } - url2 = new URL(util.parseOrigin(url2).origin + path16); + url2 = new URL(util.parseOrigin(url2).origin + path13); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -18844,11 +18844,11 @@ var require_lib = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -18864,7 +18864,7 @@ var require_lib = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -18951,26 +18951,26 @@ var require_lib = __commonJS({ } readBody() { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve7) => __awaiter2(this, void 0, void 0, function* () { let output = Buffer.alloc(0); this.message.on("data", (chunk) => { output = Buffer.concat([output, chunk]); }); this.message.on("end", () => { - resolve8(output.toString()); + resolve7(output.toString()); }); })); }); } readBodyBuffer() { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve7) => __awaiter2(this, void 0, void 0, function* () { const chunks = []; this.message.on("data", (chunk) => { chunks.push(chunk); }); this.message.on("end", () => { - resolve8(Buffer.concat(chunks)); + resolve7(Buffer.concat(chunks)); }); })); }); @@ -19054,9 +19054,9 @@ var require_lib = __commonJS({ return this.request("HEAD", requestUrl, null, additionalHeaders || {}); }); } - sendStream(verb, requestUrl, stream2, additionalHeaders) { + sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter2(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream2, additionalHeaders); + return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** @@ -19178,14 +19178,14 @@ var require_lib = __commonJS({ */ requestRaw(info6, data) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { function callbackForResult(err, res) { if (err) { reject(err); } else if (!res) { reject(new Error("Unknown error")); } else { - resolve8(res); + resolve7(res); } } this.requestRawWithCallback(info6, data, callbackForResult); @@ -19429,12 +19429,12 @@ var require_lib = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise((resolve8) => setTimeout(() => resolve8(), ms)); + return new Promise((resolve7) => setTimeout(() => resolve7(), ms)); }); } _processResponse(res, options) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve7, reject) => __awaiter2(this, void 0, void 0, function* () { const statusCode = res.message.statusCode || 0; const response = { statusCode, @@ -19442,7 +19442,7 @@ var require_lib = __commonJS({ headers: {} }; if (statusCode === HttpCodes.NotFound) { - resolve8(response); + resolve7(response); } function dateTimeDeserializer(key, value) { if (typeof value === "string") { @@ -19481,7 +19481,7 @@ var require_lib = __commonJS({ err.result = response.result; reject(err); } else { - resolve8(response); + resolve7(response); } })); }); @@ -19498,11 +19498,11 @@ var require_auth = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -19518,7 +19518,7 @@ var require_auth = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -19602,11 +19602,11 @@ var require_oidc_utils = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -19622,7 +19622,7 @@ var require_oidc_utils = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -19700,11 +19700,11 @@ var require_summary = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -19720,7 +19720,7 @@ var require_summary = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20033,7 +20033,7 @@ var require_path_utils = __commonJS({ exports2.toPosixPath = toPosixPath; exports2.toWin32Path = toWin32Path; exports2.toPlatformPath = toPlatformPath; - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -20041,7 +20041,7 @@ var require_path_utils = __commonJS({ return pth.replace(/[/]/g, "\\"); } function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path16.sep); + return pth.replace(/[/\\]/g, path13.sep); } } }); @@ -20089,11 +20089,11 @@ var require_io_util = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -20109,7 +20109,7 @@ var require_io_util = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs17 = __importStar2(require("fs")); - var path16 = __importStar2(require("path")); - _a = fs17.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs14 = __importStar2(require("fs")); + var path13 = __importStar2(require("path")); + _a = fs14.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs17.promises.readlink(fsPath); + const result = yield fs14.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs17.constants.O_RDONLY; + exports2.READONLY = fs14.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -20179,7 +20179,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path16.extname(filePath).toUpperCase(); + const upperExt = path13.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -20203,11 +20203,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path16.dirname(filePath); - const upperName = path16.basename(filePath).toUpperCase(); + const directory = path13.dirname(filePath); + const upperName = path13.basename(filePath).toUpperCase(); for (const actualName of yield (0, exports2.readdir)(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path16.join(directory, actualName); + filePath = path13.join(directory, actualName); break; } } @@ -20286,11 +20286,11 @@ var require_io = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -20306,7 +20306,7 @@ var require_io = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20319,7 +20319,7 @@ var require_io = __commonJS({ exports2.which = which7; exports2.findInPath = findInPath; var assert_1 = require("assert"); - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util()); function cp(source_1, dest_1) { return __awaiter2(this, arguments, void 0, function* (source, dest, options = {}) { @@ -20328,7 +20328,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path16.join(dest, path16.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path13.join(dest, path13.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -20340,7 +20340,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path16.relative(source, newDest) === "") { + if (path13.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -20352,7 +20352,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path16.join(dest, path16.basename(source)); + dest = path13.join(dest, path13.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -20363,7 +20363,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path16.dirname(dest)); + yield mkdirP(path13.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -20422,7 +20422,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path16.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path13.delimiter)) { if (extension) { extensions.push(extension); } @@ -20435,12 +20435,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path16.sep)) { + if (tool.includes(path13.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path16.delimiter)) { + for (const p of process.env.PATH.split(path13.delimiter)) { if (p) { directories.push(p); } @@ -20448,7 +20448,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path16.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path13.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -20547,11 +20547,11 @@ var require_toolrunner = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -20567,7 +20567,7 @@ var require_toolrunner = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20575,10 +20575,10 @@ var require_toolrunner = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.ToolRunner = void 0; exports2.argStringToArray = argStringToArray; - var os5 = __importStar2(require("os")); + var os4 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var io7 = __importStar2(require_io()); var ioUtil = __importStar2(require_io_util()); var timers_1 = require("timers"); @@ -20630,12 +20630,12 @@ var require_toolrunner = __commonJS({ _processLineBuffer(data, strBuffer, onLine) { try { let s = strBuffer + data.toString(); - let n = s.indexOf(os5.EOL); + let n = s.indexOf(os4.EOL); while (n > -1) { const line = s.substring(0, n); onLine(line); - s = s.substring(n + os5.EOL.length); - n = s.indexOf(os5.EOL); + s = s.substring(n + os4.EOL.length); + n = s.indexOf(os4.EOL); } return s; } catch (err) { @@ -20793,10 +20793,10 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path16.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path13.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); - return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve7, reject) => __awaiter2(this, void 0, void 0, function* () { this._debug(`exec tool: ${this.toolPath}`); this._debug("arguments:"); for (const arg of this.args) { @@ -20804,7 +20804,7 @@ var require_toolrunner = __commonJS({ } const optionsNonNull = this._cloneExecOptions(this.options); if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os5.EOL); + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os4.EOL); } const state = new ExecState(optionsNonNull, this.toolPath); state.on("debug", (message) => { @@ -20879,7 +20879,7 @@ var require_toolrunner = __commonJS({ if (error3) { reject(error3); } else { - resolve8(exitCode); + resolve7(exitCode); } }); if (this.options.input) { @@ -21045,11 +21045,11 @@ var require_exec = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -21065,7 +21065,7 @@ var require_exec = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -21165,11 +21165,11 @@ var require_platform = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -21185,7 +21185,7 @@ var require_platform = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -21294,11 +21294,11 @@ var require_core = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -21314,7 +21314,7 @@ var require_core = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -21345,8 +21345,8 @@ var require_core = __commonJS({ var command_1 = require_command(); var file_command_1 = require_file_command(); var utils_1 = require_utils(); - var os5 = __importStar2(require("os")); - var path16 = __importStar2(require("path")); + var os4 = __importStar2(require("os")); + var path13 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -21372,7 +21372,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path16.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path13.delimiter}${process.env["PATH"]}`; } function getInput2(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; @@ -21407,7 +21407,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); if (filePath) { return (0, file_command_1.issueFileCommand)("OUTPUT", (0, file_command_1.prepareKeyValueMessage)(name, value)); } - process.stdout.write(os5.EOL); + process.stdout.write(os4.EOL); (0, command_1.issueCommand)("set-output", { name }, (0, utils_1.toCommandValue)(value)); } function setCommandEcho(enabled) { @@ -21433,7 +21433,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } function info6(message) { - process.stdout.write(message + os5.EOL); + process.stdout.write(message + os4.EOL); } function startGroup3(name) { (0, command_1.issue)("group", name); @@ -21509,8 +21509,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path16 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path16} does not exist${os_1.EOL}`); + const path13 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path13} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -22226,7 +22226,7 @@ var require_util9 = __commonJS({ var assert = require("node:assert"); var { kDestroyed, kBodyUsed, kListeners, kBody } = require_symbols6(); var { IncomingMessage } = require("node:http"); - var stream2 = require("node:stream"); + var stream = require("node:stream"); var net = require("node:net"); var { Blob: Blob2 } = require("node:buffer"); var nodeUtil = require("node:util"); @@ -22335,14 +22335,14 @@ var require_util9 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path16 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path13 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path16 && path16[0] !== "/") { - path16 = `/${path16}`; + if (path13 && path13[0] !== "/") { + path13 = `/${path13}`; } - return new URL(`${origin}${path16}`); + return new URL(`${origin}${path13}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -22400,24 +22400,24 @@ var require_util9 = __commonJS({ return null; } function isDestroyed(body) { - return body && !!(body.destroyed || body[kDestroyed] || stream2.isDestroyed?.(body)); + return body && !!(body.destroyed || body[kDestroyed] || stream.isDestroyed?.(body)); } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { queueMicrotask(() => { - stream3.emit("error", err); + stream2.emit("error", err); }); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -22516,13 +22516,13 @@ var require_util9 = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed(body) || body[kBodyUsed])); + return !!(body && (stream.isDisturbed(body) || body[kBodyUsed])); } function isErrored(body) { - return !!(body && stream2.isErrored(body)); + return !!(body && stream.isErrored(body)); } function isReadable(body) { - return !!(body && stream2.isReadable(body)); + return !!(body && stream.isReadable(body)); } function getSocketInfo(socket) { return { @@ -22793,39 +22793,39 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path16, origin } + request: { method, path: path13, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path16); + debuglog("sending request to %s %s/%s", method, origin, path13); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path16, origin }, + request: { method, path: path13, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path16, + path13, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path16, origin } + request: { method, path: path13, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path16); + debuglog("trailers received from %s %s/%s", method, origin, path13); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path16, origin }, + request: { method, path: path13, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path16, + path13, error3.message ); }); @@ -22874,9 +22874,9 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path16, origin } + request: { method, path: path13, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path16); + debuglog("sending request to %s %s/%s", method, origin, path13); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -22939,7 +22939,7 @@ var require_request3 = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path16, + path: path13, method, body, headers, @@ -22954,11 +22954,11 @@ var require_request3 = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path16 !== "string") { + if (typeof path13 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path16[0] !== "/" && !(path16.startsWith("http://") || path16.startsWith("https://")) && method !== "CONNECT") { + } else if (path13[0] !== "/" && !(path13.startsWith("http://") || path13.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path16)) { + } else if (invalidPathRegex.test(path13)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -23021,7 +23021,7 @@ var require_request3 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path16, query) : path16; + this.path = query ? buildURL(path13, query) : path13; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -23333,9 +23333,9 @@ var require_dispatcher_base2 = __commonJS({ } close(callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { this.close((err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -23373,12 +23373,12 @@ var require_dispatcher_base2 = __commonJS({ err = null; } if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { this.destroy(err, (err2, data) => { return err2 ? ( /* istanbul ignore next: should never error */ reject(err2) - ) : resolve8(data); + ) : resolve7(data); }); }); } @@ -25254,7 +25254,7 @@ var require_util10 = __commonJS({ var { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require_constants8(); var { getGlobalOrigin } = require_global3(); var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url2(); - var { performance: performance5 } = require("node:perf_hooks"); + var { performance: performance4 } = require("node:perf_hooks"); var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util9(); var assert = require("node:assert"); var { isUint8Array } = require("node:util/types"); @@ -25413,7 +25413,7 @@ var require_util10 = __commonJS({ }; } function coarsenedSharedCurrentTime(crossOriginIsolatedCapability) { - return coarsenTime(performance5.now(), crossOriginIsolatedCapability); + return coarsenTime(performance4.now(), crossOriginIsolatedCapability); } function createOpaqueTimingInfo(timingInfo) { return { @@ -25645,8 +25645,8 @@ var require_util10 = __commonJS({ function createDeferredPromise() { let res; let rej; - const promise = new Promise((resolve8, reject) => { - res = resolve8; + const promise = new Promise((resolve7, reject) => { + res = resolve7; rej = reject; }); return { promise, resolve: res, reject: rej }; @@ -25811,8 +25811,8 @@ var require_util10 = __commonJS({ errorSteps(e); } } - function isReadableStreamLike(stream2) { - return stream2 instanceof ReadableStream || stream2[Symbol.toStringTag] === "ReadableStream" && typeof stream2.tee === "function"; + function isReadableStreamLike(stream) { + return stream instanceof ReadableStream || stream[Symbol.toStringTag] === "ReadableStream" && typeof stream.tee === "function"; } function readableStreamClose(controller) { try { @@ -26628,20 +26628,20 @@ var require_body2 = __commonJS({ var streamRegistry; if (hasFinalizationRegistry) { streamRegistry = new FinalizationRegistry((weakRef) => { - const stream2 = weakRef.deref(); - if (stream2 && !stream2.locked && !isDisturbed(stream2) && !isErrored(stream2)) { - stream2.cancel("Response object has been garbage collected").catch(noop3); + const stream = weakRef.deref(); + if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) { + stream.cancel("Response object has been garbage collected").catch(noop3); } }); } function extractBody(object, keepalive = false) { - let stream2 = null; + let stream = null; if (object instanceof ReadableStream) { - stream2 = object; + stream = object; } else if (isBlobLike(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream({ + stream = new ReadableStream({ async pull(controller) { const buffer = typeof source === "string" ? textEncoder.encode(source) : source; if (buffer.byteLength) { @@ -26654,7 +26654,7 @@ var require_body2 = __commonJS({ type: "bytes" }); } - assert(isReadableStreamLike(stream2)); + assert(isReadableStreamLike(stream)); let action = null; let source = null; let length = null; @@ -26733,14 +26733,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = object instanceof ReadableStream ? object : ReadableStreamFrom(object); + stream = object instanceof ReadableStream ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator2; - stream2 = new ReadableStream({ + stream = new ReadableStream({ async start() { iterator2 = action(object)[Symbol.asyncIterator](); }, @@ -26752,7 +26752,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.byobRequest?.respond(0); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { const buffer = new Uint8Array(value); if (buffer.byteLength) { controller.enqueue(buffer); @@ -26767,7 +26767,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: "bytes" }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type2]; } function safelyExtractBody(object, keepalive = false) { @@ -27534,7 +27534,7 @@ var require_client_h12 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path16, host, upgrade, blocking, reset } = request2; + const { method, path: path13, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -27600,7 +27600,7 @@ var require_client_h12 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path16} HTTP/1.1\r + let header = `${method} ${path13} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -27787,12 +27787,12 @@ upgrade: ${upgrade}\r cb(); } } - const waitForDrain = () => new Promise((resolve8, reject) => { + const waitForDrain = () => new Promise((resolve7, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve8; + callback = resolve7; } }); socket.on("close", onDrain).on("drain", onDrain); @@ -28126,7 +28126,7 @@ var require_client_h22 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path16, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path13, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -28148,7 +28148,7 @@ var require_client_h22 = __commonJS({ headers[key] = val; } } - let stream2; + let stream; const { hostname, port } = client[kUrl]; headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; @@ -28158,8 +28158,8 @@ var require_client_h22 = __commonJS({ } err = err || new RequestAbortedError(); util.errorRequest(client, request2, err); - if (stream2 != null) { - util.destroy(stream2, err); + if (stream != null) { + util.destroy(stream, err); } util.destroy(body, err); client[kQueue][client[kRunningIdx]++] = null; @@ -28175,25 +28175,25 @@ var require_client_h22 = __commonJS({ } if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (stream2.id && !stream2.pending) { - request2.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (stream.id && !stream.pending) { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; } else { - stream2.once("ready", () => { - request2.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; }); } - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) session.unref(); }); return true; } - headers[HTTP2_HEADER_PATH] = path16; + headers[HTTP2_HEADER_PATH] = path13; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -28228,36 +28228,36 @@ var require_client_h22 = __commonJS({ const shouldEndStream = method === "GET" || method === "HEAD" || body === null; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++session[kOpenStreams]; - stream2.once("response", (headers2) => { + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; request2.onResponseStarted(); if (request2.aborted) { const err = new RequestAbortedError(); util.errorRequest(client, request2, err); - util.destroy(stream2, err); + util.destroy(stream, err); return; } - if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { + stream.pause(); } - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request2.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); }); - stream2.once("end", () => { - if (stream2.state?.state == null || stream2.state.state < 6) { + stream.once("end", () => { + if (stream.state?.state == null || stream.state.state < 6) { request2.onComplete([]); } if (session[kOpenStreams] === 0) { @@ -28268,16 +28268,16 @@ var require_client_h22 = __commonJS({ client[kPendingIdx] = client[kRunningIdx]; client[kResume](); }); - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) { session.unref(); } }); - stream2.once("error", function(err) { + stream.once("error", function(err) { abort(err); }); - stream2.once("frameError", (type2, code) => { + stream.once("frameError", (type2, code) => { abort(new InformationalError(`HTTP/2: "frameError" received - type ${type2}, code ${code}`)); }); return true; @@ -28285,7 +28285,7 @@ var require_client_h22 = __commonJS({ if (!body || contentLength === 0) { writeBuffer( abort, - stream2, + stream, null, client, request2, @@ -28296,7 +28296,7 @@ var require_client_h22 = __commonJS({ } else if (util.isBuffer(body)) { writeBuffer( abort, - stream2, + stream, body, client, request2, @@ -28308,7 +28308,7 @@ var require_client_h22 = __commonJS({ if (typeof body.stream === "function") { writeIterable( abort, - stream2, + stream, body.stream(), client, request2, @@ -28319,7 +28319,7 @@ var require_client_h22 = __commonJS({ } else { writeBlob( abort, - stream2, + stream, body, client, request2, @@ -28333,7 +28333,7 @@ var require_client_h22 = __commonJS({ abort, client[kSocket], expectsPayload, - stream2, + stream, body, client, request2, @@ -28342,7 +28342,7 @@ var require_client_h22 = __commonJS({ } else if (util.isIterable(body)) { writeIterable( abort, - stream2, + stream, body, client, request2, @@ -28429,12 +28429,12 @@ var require_client_h22 = __commonJS({ cb(); } } - const waitForDrain = () => new Promise((resolve8, reject) => { + const waitForDrain = () => new Promise((resolve7, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve8; + callback = resolve7; } }); h2stream.on("close", onDrain).on("drain", onDrain); @@ -28546,9 +28546,9 @@ var require_redirect_handler2 = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path16 = search ? `${pathname}${search}` : pathname; + const path13 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path16; + this.opts.path = path13; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -28911,16 +28911,16 @@ var require_client2 = __commonJS({ return this[kNeedDrain] < 2; } async [kClose]() { - return new Promise((resolve8) => { + return new Promise((resolve7) => { if (this[kSize]) { - this[kClosedResolve] = resolve8; + this[kClosedResolve] = resolve7; } else { - resolve8(null); + resolve7(null); } }); } async [kDestroy](err) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i = 0; i < requests.length; i++) { const request2 = requests[i]; @@ -28931,7 +28931,7 @@ var require_client2 = __commonJS({ this[kClosedResolve](); this[kClosedResolve] = null; } - resolve8(null); + resolve7(null); }; if (this[kHTTPContext]) { this[kHTTPContext].destroy(err, callback); @@ -28982,7 +28982,7 @@ var require_client2 = __commonJS({ }); } try { - const socket = await new Promise((resolve8, reject) => { + const socket = await new Promise((resolve7, reject) => { client[kConnector]({ host, hostname, @@ -28994,7 +28994,7 @@ var require_client2 = __commonJS({ if (err) { reject(err); } else { - resolve8(socket2); + resolve7(socket2); } }); }); @@ -29330,8 +29330,8 @@ var require_pool_base2 = __commonJS({ if (this[kQueue].isEmpty()) { await Promise.all(this[kClients].map((c) => c.close())); } else { - await new Promise((resolve8) => { - this[kClosedResolve] = resolve8; + await new Promise((resolve7) => { + this[kClosedResolve] = resolve7; }); } } @@ -29782,10 +29782,10 @@ var require_proxy_agent2 = __commonJS({ }; const { origin, - path: path16 = "/", + path: path13 = "/", headers = {} } = opts; - opts.path = origin + path16; + opts.path = origin + path13; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -30419,7 +30419,7 @@ var require_readable2 = __commonJS({ "node_modules/@actions/github/node_modules/undici/lib/api/readable.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require("node:stream"); + var { Readable } = require("node:stream"); var { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require_errors2(); var util = require_util9(); var { ReadableStreamFrom } = require_util9(); @@ -30431,7 +30431,7 @@ var require_readable2 = __commonJS({ var kContentLength = /* @__PURE__ */ Symbol("kContentLength"); var noop3 = () => { }; - var BodyReadable = class extends Readable2 { + var BodyReadable = class extends Readable { constructor({ resume, abort, @@ -30546,7 +30546,7 @@ var require_readable2 = __commonJS({ if (this._readableState.closeEmitted) { return null; } - return await new Promise((resolve8, reject) => { + return await new Promise((resolve7, reject) => { if (this[kContentLength] > limit) { this.destroy(new AbortError()); } @@ -30559,7 +30559,7 @@ var require_readable2 = __commonJS({ if (signal?.aborted) { reject(signal.reason ?? new AbortError()); } else { - resolve8(null); + resolve7(null); } }).on("error", noop3).on("data", function(chunk) { limit -= chunk.length; @@ -30576,13 +30576,13 @@ var require_readable2 = __commonJS({ function isUnusable(self2) { return util.isDisturbed(self2) || isLocked(self2); } - async function consume(stream2, type2) { - assert(!stream2[kConsume]); - return new Promise((resolve8, reject) => { - if (isUnusable(stream2)) { - const rState = stream2._readableState; + async function consume(stream, type2) { + assert(!stream[kConsume]); + return new Promise((resolve7, reject) => { + if (isUnusable(stream)) { + const rState = stream._readableState; if (rState.destroyed && rState.closeEmitted === false) { - stream2.on("error", (err) => { + stream.on("error", (err) => { reject(err); }).on("close", () => { reject(new TypeError("unusable")); @@ -30592,22 +30592,22 @@ var require_readable2 = __commonJS({ } } else { queueMicrotask(() => { - stream2[kConsume] = { + stream[kConsume] = { type: type2, - stream: stream2, - resolve: resolve8, + stream, + resolve: resolve7, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - consumeStart(stream2[kConsume]); + consumeStart(stream[kConsume]); }); } }); @@ -30665,22 +30665,22 @@ var require_readable2 = __commonJS({ return buffer; } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve8, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve7, stream, length } = consume2; try { if (type2 === "text") { - resolve8(chunksDecode(body, length)); + resolve7(chunksDecode(body, length)); } else if (type2 === "json") { - resolve8(JSON.parse(chunksDecode(body, length))); + resolve7(JSON.parse(chunksDecode(body, length))); } else if (type2 === "arrayBuffer") { - resolve8(chunksConcat(body, length).buffer); + resolve7(chunksConcat(body, length).buffer); } else if (type2 === "blob") { - resolve8(new Blob(body, { type: stream2[kContentType] })); + resolve7(new Blob(body, { type: stream[kContentType] })); } else if (type2 === "bytes") { - resolve8(chunksConcat(body, length)); + resolve7(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -30773,7 +30773,7 @@ var require_api_request2 = __commonJS({ "node_modules/@actions/github/node_modules/undici/lib/api/api-request.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require_readable2(); + var { Readable } = require_readable2(); var { InvalidArgumentError, RequestAbortedError } = require_errors2(); var util = require_util9(); var { getResolveErrorBodyCallback } = require_util11(); @@ -30868,7 +30868,7 @@ var require_api_request2 = __commonJS({ const parsedHeaders = responseHeaders === "raw" ? util.parseHeaders(rawHeaders) : headers; const contentType = parsedHeaders["content-type"]; const contentLength = parsedHeaders["content-length"]; - const res = new Readable2({ + const res = new Readable({ resume, abort, contentType, @@ -30933,9 +30933,9 @@ var require_api_request2 = __commonJS({ }; function request2(opts, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { request2.call(this, opts, (err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -31156,11 +31156,11 @@ var require_api_stream2 = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { - stream2.call(this, opts, factory, (err, data) => { - return err ? reject(err) : resolve8(data); + return new Promise((resolve7, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve7(data); }); }); } @@ -31174,7 +31174,7 @@ var require_api_stream2 = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -31183,7 +31183,7 @@ var require_api_pipeline2 = __commonJS({ "node_modules/@actions/github/node_modules/undici/lib/api/api-pipeline.js"(exports2, module2) { "use strict"; var { - Readable: Readable2, + Readable, Duplex, PassThrough } = require("node:stream"); @@ -31197,7 +31197,7 @@ var require_api_pipeline2 = __commonJS({ var { addSignal, removeSignal } = require_abort_signal2(); var assert = require("node:assert"); var kResume = /* @__PURE__ */ Symbol("resume"); - var PipelineRequest = class extends Readable2 { + var PipelineRequest = class extends Readable { constructor() { super({ autoDestroy: true }); this[kResume] = null; @@ -31214,7 +31214,7 @@ var require_api_pipeline2 = __commonJS({ callback(err); } }; - var PipelineResponse = class extends Readable2 { + var PipelineResponse = class extends Readable { constructor(resume) { super({ autoDestroy: true }); this[kResume] = resume; @@ -31445,9 +31445,9 @@ var require_api_upgrade2 = __commonJS({ }; function upgrade(opts, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { upgrade.call(this, opts, (err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -31539,9 +31539,9 @@ var require_api_connect2 = __commonJS({ }; function connect(opts, callback) { if (callback === void 0) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { connect.call(this, opts, (err, data) => { - return err ? reject(err) : resolve8(data); + return err ? reject(err) : resolve7(data); }); }); } @@ -31706,20 +31706,20 @@ var require_mock_utils2 = __commonJS({ } return true; } - function safeUrl(path16) { - if (typeof path16 !== "string") { - return path16; + function safeUrl(path13) { + if (typeof path13 !== "string") { + return path13; } - const pathSegments = path16.split("?"); + const pathSegments = path13.split("?"); if (pathSegments.length !== 2) { - return path16; + return path13; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path16, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path16); + function matchKey(mockDispatch2, { path: path13, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path13); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -31741,7 +31741,7 @@ var require_mock_utils2 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path16 }) => matchValue(safeUrl(path16), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path13 }) => matchValue(safeUrl(path13), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -31779,9 +31779,9 @@ var require_mock_utils2 = __commonJS({ } } function buildKey(opts) { - const { path: path16, method, body, headers, query } = opts; + const { path: path13, method, body, headers, query } = opts; return { - path: path16, + path: path13, method, body, headers, @@ -32244,10 +32244,10 @@ var require_pending_interceptors_formatter2 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path16, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path13, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path16, + Path: path13, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -34539,7 +34539,7 @@ var require_fetch2 = __commonJS({ subresourceSet } = require_constants8(); var EE = require("node:events"); - var { Readable: Readable2, pipeline, finished } = require("node:stream"); + var { Readable, pipeline, finished } = require("node:stream"); var { addAbortListener, isErrored, isReadable, bufferToLowerCasedHeaderName } = require_util9(); var { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = require_data_url2(); var { getGlobalDispatcher } = require_global4(); @@ -35324,7 +35324,7 @@ var require_fetch2 = __commonJS({ fetchParams.controller.abort(reason); } }; - const stream2 = new ReadableStream( + const stream = new ReadableStream( { async start(controller) { fetchParams.controller.controller = controller; @@ -35338,7 +35338,7 @@ var require_fetch2 = __commonJS({ type: "bytes" } ); - response.body = { stream: stream2, source: null, length: null }; + response.body = { stream, source: null, length: null }; fetchParams.controller.onAborted = onAborted; fetchParams.controller.on("terminated", onAborted); fetchParams.controller.resume = async () => { @@ -35373,7 +35373,7 @@ var require_fetch2 = __commonJS({ if (buffer.byteLength) { fetchParams.controller.controller.enqueue(buffer); } - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -35385,13 +35385,13 @@ var require_fetch2 = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -35403,7 +35403,7 @@ var require_fetch2 = __commonJS({ function dispatch({ body }) { const url2 = requestCurrentURL(request2); const agent = fetchParams.controller.dispatcher; - return new Promise((resolve8, reject) => agent.dispatch( + return new Promise((resolve7, reject) => agent.dispatch( { path: url2.pathname + url2.search, origin: url2.origin, @@ -35440,7 +35440,7 @@ var require_fetch2 = __commonJS({ headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } location = headersList.get("location", true); - this.body = new Readable2({ read: resume }); + this.body = new Readable({ read: resume }); const decoders = []; const willFollow = location && request2.redirect === "follow" && redirectStatusSet.has(status); if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { @@ -35479,7 +35479,7 @@ var require_fetch2 = __commonJS({ } } const onError = this.onError.bind(this); - resolve8({ + resolve7({ status, statusText, headersList, @@ -35525,7 +35525,7 @@ var require_fetch2 = __commonJS({ for (let i = 0; i < rawHeaders.length; i += 2) { headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } - resolve8({ + resolve7({ status, statusText: STATUS_CODES[status], headersList, @@ -35944,8 +35944,8 @@ var require_util12 = __commonJS({ fr[kState] = "loading"; fr[kResult] = null; fr[kError] = null; - const stream2 = blob.stream(); - const reader = stream2.getReader(); + const stream = blob.stream(); + const reader = stream.getReader(); const bytes = []; let chunkPromise = reader.read(); let isFirstChunk = true; @@ -36604,8 +36604,8 @@ var require_cache2 = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -37128,9 +37128,9 @@ var require_util14 = __commonJS({ } } } - function validateCookiePath(path16) { - for (let i = 0; i < path16.length; ++i) { - const code = path16.charCodeAt(i); + function validateCookiePath(path13) { + for (let i = 0; i < path13.length; ++i) { + const code = path13.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -38029,7 +38029,7 @@ var require_frame2 = __commonJS({ } catch { crypto3 = { // not full compatibility, but minimum. - randomFillSync: function randomFillSync2(buffer2, _offset, _size) { + randomFillSync: function randomFillSync(buffer2, _offset, _size) { for (let i = 0; i < buffer2.length; ++i) { buffer2[i] = Math.random() * 255 | 0; } @@ -39118,8 +39118,8 @@ var require_util16 = __commonJS({ return true; } function delay2(ms) { - return new Promise((resolve8) => { - setTimeout(resolve8, ms).unref(); + return new Promise((resolve7) => { + setTimeout(resolve7, ms).unref(); }); } module2.exports = { @@ -39724,11 +39724,11 @@ var require_undici2 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path16 = opts.path; + let path13 = opts.path; if (!opts.path.startsWith("/")) { - path16 = `/${path16}`; + path13 = `/${path13}`; } - url2 = new URL(util.parseOrigin(url2).origin + path16); + url2 = new URL(util.parseOrigin(url2).origin + path13); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -39842,11 +39842,11 @@ var require_utils4 = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -39862,7 +39862,7 @@ var require_utils4 = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -46543,8 +46543,8 @@ var require_light = __commonJS({ return this.Promise.resolve(); } yieldLoop(t = 0) { - return new this.Promise(function(resolve8, reject) { - return setTimeout(resolve8, t); + return new this.Promise(function(resolve7, reject) { + return setTimeout(resolve7, t); }); } computePenalty() { @@ -46755,15 +46755,15 @@ var require_light = __commonJS({ return this._queue.length === 0; } async _tryToRun() { - var args, cb, error3, reject, resolve8, returned, task; + var args, cb, error3, reject, resolve7, returned, task; if (this._running < 1 && this._queue.length > 0) { this._running++; - ({ task, args, resolve: resolve8, reject } = this._queue.shift()); + ({ task, args, resolve: resolve7, reject } = this._queue.shift()); cb = await (async function() { try { returned = await task(...args); return function() { - return resolve8(returned); + return resolve7(returned); }; } catch (error1) { error3 = error1; @@ -46778,13 +46778,13 @@ var require_light = __commonJS({ } } schedule(task, ...args) { - var promise, reject, resolve8; - resolve8 = reject = null; + var promise, reject, resolve7; + resolve7 = reject = null; promise = new this.Promise(function(_resolve, _reject) { - resolve8 = _resolve; + resolve7 = _resolve; return reject = _reject; }); - this._queue.push({ task, args, resolve: resolve8, reject }); + this._queue.push({ task, args, resolve: resolve7, reject }); this._tryToRun(); return promise; } @@ -47185,14 +47185,14 @@ var require_light = __commonJS({ counts = this._states.counts; return counts[0] + counts[1] + counts[2] + counts[3] === at; }; - return new this.Promise((resolve8, reject) => { + return new this.Promise((resolve7, reject) => { if (finished()) { - return resolve8(); + return resolve7(); } else { return this.on("done", () => { if (finished()) { this.removeAllListeners("done"); - return resolve8(); + return resolve7(); } }); } @@ -47285,9 +47285,9 @@ var require_light = __commonJS({ options = parser$5.load(options, this.jobDefaults); } task = (...args2) => { - return new this.Promise(function(resolve8, reject) { + return new this.Promise(function(resolve7, reject) { return fn(...args2, function(...args3) { - return (args3[0] != null ? reject : resolve8)(args3); + return (args3[0] != null ? reject : resolve7)(args3); }); }); }; @@ -47414,14 +47414,14 @@ var require_helpers = __commonJS({ "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path16, name, argument) { - if (Array.isArray(path16)) { - this.path = path16; - this.property = path16.reduce(function(sum, item) { + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path13, name, argument) { + if (Array.isArray(path13)) { + this.path = path13; + this.property = path13.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); - } else if (path16 !== void 0) { - this.property = path16; + } else if (path13 !== void 0) { + this.property = path13; } if (message) { this.message = message; @@ -47512,28 +47512,28 @@ var require_helpers = __commonJS({ name: { value: "SchemaError", enumerable: false } } ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path16, base, schemas) { + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path13, base, schemas) { this.schema = schema2; this.options = options; - if (Array.isArray(path16)) { - this.path = path16; - this.propertyPath = path16.reduce(function(sum, item) { + if (Array.isArray(path13)) { + this.path = path13; + this.propertyPath = path13.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); } else { - this.propertyPath = path16; + this.propertyPath = path13; } this.base = base; this.schemas = schemas; }; - SchemaContext.prototype.resolve = function resolve8(target) { + SchemaContext.prototype.resolve = function resolve7(target) { return uri.resolve(this.base, target); }; SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path16 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var path13 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); var id = schema2.$id || schema2.id; var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path16, base, Object.create(this.schemas)); + var ctx = new SchemaContext(schema2, this.options, path13, base, Object.create(this.schemas)); if (id && !ctx.schemas[base]) { ctx.schemas[base] = schema2; } @@ -48624,7 +48624,7 @@ var require_validator = __commonJS({ } return schema2; }; - Validator3.prototype.resolve = function resolve8(schema2, switchSchema, ctx) { + Validator3.prototype.resolve = function resolve7(schema2, switchSchema, ctx) { switchSchema = ctx.resolve(switchSchema); if (ctx.schemas[switchSchema]) { return { subschema: ctx.schemas[switchSchema], switchSchema }; @@ -48830,21 +48830,21 @@ var require_internal_path_helper = __commonJS({ return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.dirname = dirname3; + exports2.dirname = dirname2; exports2.ensureAbsoluteRoot = ensureAbsoluteRoot; exports2.hasAbsoluteRoot = hasAbsoluteRoot; exports2.hasRoot = hasRoot; exports2.normalizeSeparators = normalizeSeparators; exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; - function dirname3(p) { + function dirname2(p) { p = safeTrimTrailingSeparator(p); if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path16.dirname(p); + let result = path13.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -48881,7 +48881,7 @@ var require_internal_path_helper = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path16.sep; + root += path13.sep; } return root + itemPath; } @@ -48915,10 +48915,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path16.sep)) { + if (!p.endsWith(path13.sep)) { return p; } - if (p === path16.sep) { + if (p === path13.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -49263,7 +49263,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path16 = (function() { + var path13 = (function() { try { return require("path"); } catch (e) { @@ -49271,7 +49271,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path16.sep; + minimatch.sep = path13.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand2 = require_brace_expansion(); var plTypes = { @@ -49360,8 +49360,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path16.sep !== "/") { - pattern = pattern.split(path16.sep).join("/"); + if (!options.allowWindowsEscape && path13.sep !== "/") { + pattern = pattern.split(path13.sep).join("/"); } this.options = options; this.set = []; @@ -49730,8 +49730,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path16.sep !== "/") { - f = f.split(path16.sep).join("/"); + if (path13.sep !== "/") { + f = f.split(path13.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -49877,7 +49877,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -49892,12 +49892,12 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path16.sep); + this.segments = itemPath.split(path13.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path16.basename(remaining); + const basename = path13.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -49915,7 +49915,7 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path16.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path13.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -49926,12 +49926,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path16.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path13.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path16.sep; + result += path13.sep; } result += this.segments[i]; } @@ -49988,8 +49988,8 @@ var require_internal_pattern = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; - var os5 = __importStar2(require("os")); - var path16 = __importStar2(require("path")); + var os4 = __importStar2(require("os")); + var path13 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var minimatch_1 = require_minimatch(); @@ -50018,7 +50018,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir2); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path16.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path13.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -50042,8 +50042,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path16.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path16.sep}`; + if (!itemPath.endsWith(path13.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path13.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -50078,10 +50078,10 @@ var require_internal_pattern = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path16.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path13.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path16.sep}`)) { - homedir2 = homedir2 || os5.homedir(); + } else if (pattern === "~" || pattern.startsWith(`~${path13.sep}`)) { + homedir2 = homedir2 || os4.homedir(); (0, assert_1.default)(homedir2, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir2), `Expected HOME directory to be a rooted path. Actual '${homedir2}'`); pattern = _Pattern.globEscape(homedir2) + pattern.substr(1); @@ -50164,8 +50164,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path16, level) { - this.path = path16; + constructor(path13, level) { + this.path = path13; this.level = level; } }; @@ -50216,11 +50216,11 @@ var require_internal_globber = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -50236,7 +50236,7 @@ var require_internal_globber = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -50249,14 +50249,14 @@ var require_internal_globber = __commonJS({ }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve8, reject) { - v = o[n](v), settle(resolve8, reject, v.done, v.value); + return new Promise(function(resolve7, reject) { + v = o[n](v), settle(resolve7, reject, v.done, v.value); }); }; } - function settle(resolve8, reject, d, v) { + function settle(resolve7, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve8({ value: v2, done: d }); + resolve7({ value: v2, done: d }); }, reject); } }; @@ -50307,9 +50307,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core15 = __importStar2(require_core()); - var fs17 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -50361,7 +50361,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core15.debug(`Search path '${searchPath}'`); try { - yield __await2(fs17.promises.lstat(searchPath)); + yield __await2(fs14.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -50385,7 +50385,7 @@ var require_internal_globber = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path16.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path13.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -50395,7 +50395,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs17.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs14.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50430,7 +50430,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs17.promises.stat(item.path); + stats = yield fs14.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -50442,10 +50442,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs17.promises.lstat(item.path); + stats = yield fs14.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs17.promises.realpath(item.path); + const realPath = yield fs14.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -50506,11 +50506,11 @@ var require_internal_hash_files = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -50526,7 +50526,7 @@ var require_internal_hash_files = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -50539,14 +50539,14 @@ var require_internal_hash_files = __commonJS({ }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve8, reject) { - v = o[n](v), settle(resolve8, reject, v.done, v.value); + return new Promise(function(resolve7, reject) { + v = o[n](v), settle(resolve7, reject, v.done, v.value); }); }; } - function settle(resolve8, reject, d, v) { + function settle(resolve7, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve8({ value: v2, done: d }); + resolve7({ value: v2, done: d }); }, reject); } }; @@ -50554,10 +50554,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles2; var crypto3 = __importStar2(require("crypto")); var core15 = __importStar2(require_core()); - var fs17 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); + var fs14 = __importStar2(require("fs")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); function hashFiles2(globber_1, currentWorkspace_1) { return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; @@ -50573,17 +50573,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path16.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path13.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs17.statSync(file).isDirectory()) { + if (fs14.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto3.createHash("sha256"); - const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs17.createReadStream(file), hash2); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs14.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -50618,11 +50618,11 @@ var require_glob = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -50638,7 +50638,7 @@ var require_glob = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -51898,11 +51898,11 @@ var require_cacheUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -51918,7 +51918,7 @@ var require_cacheUtils = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -51931,14 +51931,14 @@ var require_cacheUtils = __commonJS({ }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve8, reject) { - v = o[n](v), settle(resolve8, reject, v.done, v.value); + return new Promise(function(resolve7, reject) { + v = o[n](v), settle(resolve7, reject, v.done, v.value); }); }; } - function settle(resolve8, reject, d, v) { + function settle(resolve7, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve8({ value: v2, done: d }); + resolve7({ value: v2, done: d }); }, reject); } }; @@ -51958,8 +51958,8 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); - var fs17 = __importStar2(require("fs")); - var path16 = __importStar2(require("path")); + var fs14 = __importStar2(require("fs")); + var path13 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); var constants_1 = require_constants12(); @@ -51979,15 +51979,15 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path16.join(baseLocation, "actions", "temp"); + tempDirectory = path13.join(baseLocation, "actions", "temp"); } - const dest = path16.join(tempDirectory, crypto3.randomUUID()); + const dest = path13.join(tempDirectory, crypto3.randomUUID()); yield io7.mkdirP(dest); return dest; }); } function getArchiveFileSizeInBytes(filePath) { - return fs17.statSync(filePath).size; + return fs14.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -52003,7 +52003,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); + const relativeFile = path13.relative(workspace, file).replace(new RegExp(`\\${path13.sep}`, "g"), "/"); core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -52025,7 +52025,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs17.unlink)(filePath); + return util.promisify(fs14.unlink)(filePath); }); } function getVersion(app_1) { @@ -52067,7 +52067,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs17.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs14.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -52220,11 +52220,11 @@ function __metadata(metadataKey, metadataValue) { } function __awaiter(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -52240,7 +52240,7 @@ function __awaiter(thisArg, _arguments, P, generator) { } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -52431,14 +52431,14 @@ function __asyncValues(o) { }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve8, reject) { - v = o[n](v), settle(resolve8, reject, v.done, v.value); + return new Promise(function(resolve7, reject) { + v = o[n](v), settle(resolve7, reject, v.done, v.value); }); }; } - function settle(resolve8, reject, d, v) { + function settle(resolve7, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve8({ value: v2, done: d }); + resolve7({ value: v2, done: d }); }, reject); } } @@ -52530,13 +52530,13 @@ function __disposeResources(env) { } return next(); } -function __rewriteRelativeImportExtension(path16, preserveJsx) { - if (typeof path16 === "string" && /^\.\.?\//.test(path16)) { - return path16.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { +function __rewriteRelativeImportExtension(path13, preserveJsx) { + if (typeof path13 === "string" && /^\.\.?\//.test(path13)) { + return path13.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; }); } - return path16; + return path13; } var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; var init_tslib_es6 = __esm({ @@ -53041,8 +53041,8 @@ var require_uuidUtils = __commonJS({ "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.randomUUID = randomUUID2; - function randomUUID2() { + exports2.randomUUID = randomUUID; + function randomUUID() { return crypto.randomUUID(); } } @@ -53606,20 +53606,20 @@ var require_nodeHttpClient = __commonJS({ function isReadableStream(body) { return body && typeof body.pipe === "function"; } - function isStreamComplete(stream2) { - if (stream2.readable === false) { + function isStreamComplete(stream) { + if (stream.readable === false) { return Promise.resolve(); } - return new Promise((resolve8) => { + return new Promise((resolve7) => { const handler2 = () => { - resolve8(); - stream2.removeListener("close", handler2); - stream2.removeListener("end", handler2); - stream2.removeListener("error", handler2); + resolve7(); + stream.removeListener("close", handler2); + stream.removeListener("end", handler2); + stream.removeListener("error", handler2); }; - stream2.on("close", handler2); - stream2.on("end", handler2); - stream2.on("error", handler2); + stream.on("close", handler2); + stream.on("end", handler2); + stream.on("error", handler2); }); } function isArrayBuffer(body) { @@ -53767,8 +53767,8 @@ var require_nodeHttpClient = __commonJS({ headers: request2.headers.toJSON({ preserveCase: true }), ...request2.requestOverrides }; - return new Promise((resolve8, reject) => { - const req = isInsecure ? node_http_1.default.request(options, resolve8) : node_https_1.default.request(options, resolve8); + return new Promise((resolve7, reject) => { + const req = isInsecure ? node_http_1.default.request(options, resolve7) : node_https_1.default.request(options, resolve7); req.once("error", (err) => { reject(new restError_js_1.RestError(err.message, { code: err.code ?? restError_js_1.RestError.REQUEST_SEND_ERROR, request: request2 })); }); @@ -53838,33 +53838,33 @@ var require_nodeHttpClient = __commonJS({ } return headers; } - function getDecodedResponseStream(stream2, headers) { + function getDecodedResponseStream(stream, headers) { const contentEncoding = headers.get("Content-Encoding"); if (contentEncoding === "gzip") { const unzip = node_zlib_1.default.createGunzip(); - stream2.pipe(unzip); + stream.pipe(unzip); return unzip; } else if (contentEncoding === "deflate") { const inflate = node_zlib_1.default.createInflate(); - stream2.pipe(inflate); + stream.pipe(inflate); return inflate; } - return stream2; + return stream; } - function streamToText(stream2) { - return new Promise((resolve8, reject) => { + function streamToText(stream) { + return new Promise((resolve7, reject) => { const buffer = []; - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (Buffer.isBuffer(chunk)) { buffer.push(chunk); } else { buffer.push(Buffer.from(chunk)); } }); - stream2.on("end", () => { - resolve8(Buffer.concat(buffer).toString("utf8")); + stream.on("end", () => { + resolve7(Buffer.concat(buffer).toString("utf8")); }); - stream2.on("error", (e) => { + stream.on("error", (e) => { if (e && e?.name === "AbortError") { reject(e); } else { @@ -54140,7 +54140,7 @@ var require_helpers2 = __commonJS({ var AbortError_js_1 = require_AbortError(); var StandardAbortMessage = "The operation was aborted."; function delay2(delayInMs, value, options) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { let timer = void 0; let onAborted = void 0; const rejectOnAbort = () => { @@ -54163,7 +54163,7 @@ var require_helpers2 = __commonJS({ } timer = setTimeout(() => { removeListeners(); - resolve8(value); + resolve7(value); }, delayInMs); if (options?.abortSignal) { options.abortSignal.addEventListener("abort", onAborted); @@ -54983,7 +54983,7 @@ var require_has_flag = __commonJS({ var require_supports_color = __commonJS({ "node_modules/supports-color/index.js"(exports2, module2) { "use strict"; - var os5 = require("os"); + var os4 = require("os"); var tty = require("tty"); var hasFlag = require_has_flag(); var { env } = process; @@ -55031,7 +55031,7 @@ var require_supports_color = __commonJS({ return min; } if (process.platform === "win32") { - const osRelease = os5.release().split("."); + const osRelease = os4.release().split("."); if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { return Number(osRelease[2]) >= 14931 ? 3 : 2; } @@ -55069,8 +55069,8 @@ var require_supports_color = __commonJS({ } return min; } - function getSupportLevel(stream2) { - const level = supportsColor(stream2, stream2 && stream2.isTTY); + function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); return translateLevel(level); } module2.exports = { @@ -55301,18 +55301,18 @@ var require_helpers3 = __commonJS({ exports2.req = exports2.json = exports2.toBuffer = void 0; var http = __importStar2(require("http")); var https2 = __importStar2(require("https")); - async function toBuffer(stream2) { + async function toBuffer(stream) { let length = 0; const chunks = []; - for await (const chunk of stream2) { + for await (const chunk of stream) { length += chunk.length; chunks.push(chunk); } return Buffer.concat(chunks, length); } exports2.toBuffer = toBuffer; - async function json2(stream2) { - const buf = await toBuffer(stream2); + async function json2(stream) { + const buf = await toBuffer(stream); const str2 = buf.toString("utf8"); try { return JSON.parse(str2); @@ -55326,8 +55326,8 @@ var require_helpers3 = __commonJS({ function req(url2, opts = {}) { const href = typeof url2 === "string" ? url2 : url2.href; const req2 = (href.startsWith("https:") ? https2 : http).request(url2, opts); - const promise = new Promise((resolve8, reject) => { - req2.once("response", resolve8).once("error", reject).end(); + const promise = new Promise((resolve7, reject) => { + req2.once("response", resolve7).once("error", reject).end(); }); req2.then = promise.then.bind(promise); return req2; @@ -55504,7 +55504,7 @@ var require_parse_proxy_response = __commonJS({ var debug_1 = __importDefault2(require_src()); var debug5 = (0, debug_1.default)("https-proxy-agent:parse-proxy-response"); function parseProxyResponse(socket) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { let buffersLength = 0; const buffers = []; function read() { @@ -55570,7 +55570,7 @@ var require_parse_proxy_response = __commonJS({ } debug5("got proxy server response: %o %o", firstLine, headers); cleanup(); - resolve8({ + resolve7({ connect: { statusCode, statusText, @@ -56121,12 +56121,12 @@ var require_concat = __commonJS({ webStream.values = streamAsyncIterator.bind(webStream); } } - function ensureNodeStream(stream2) { - if (stream2 instanceof ReadableStream) { - makeAsyncIterable(stream2); - return stream_1.Readable.fromWeb(stream2); + function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return stream_1.Readable.fromWeb(stream); } else { - return stream2; + return stream; } } function toStream(source) { @@ -56142,8 +56142,8 @@ var require_concat = __commonJS({ return function() { const streams = sources.map((x) => typeof x === "function" ? x() : x).map(toStream); return stream_1.Readable.from((async function* () { - for (const stream2 of streams) { - for await (const chunk of stream2) { + for (const stream of streams) { + for await (const chunk of stream) { yield chunk; } } @@ -56678,9 +56678,9 @@ var require_sendRequest = __commonJS({ try { const response = await pipeline.sendRequest(httpClient, request2); const headers = response.headers.toJSON(); - const stream2 = response.readableStreamBody ?? response.browserStreamBody; - const parsedBody = options.responseAsStream || stream2 !== void 0 ? void 0 : getResponseBody(response); - const body = stream2 ?? parsedBody; + const stream = response.readableStreamBody ?? response.browserStreamBody; + const parsedBody = options.responseAsStream || stream !== void 0 ? void 0 : getResponseBody(response); + const body = stream ?? parsedBody; if (options?.onResponse) { options.onResponse({ ...response, request: request2, rawHeaders: headers, parsedBody }); } @@ -56950,8 +56950,8 @@ var require_getClient = __commonJS({ } const { allowInsecureConnection, httpClient } = clientOptions; const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path16, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path16, args, { allowInsecureConnection, ...requestOptions }); + const client = (path13, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path13, args, { allowInsecureConnection, ...requestOptions }); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); @@ -57656,7 +57656,7 @@ var require_createAbortablePromise = __commonJS({ var abort_controller_1 = require_commonjs3(); function createAbortablePromise(buildPromise, options) { const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { function rejectOnAbort() { reject(new abort_controller_1.AbortError(abortErrorMsg ?? "The operation was aborted.")); } @@ -57674,7 +57674,7 @@ var require_createAbortablePromise = __commonJS({ try { buildPromise((x) => { removeListeners(); - resolve8(x); + resolve7(x); }, (x) => { removeListeners(); reject(x); @@ -57701,8 +57701,8 @@ var require_delay2 = __commonJS({ function delay2(timeInMs, options) { let token; const { abortSignal, abortErrorMsg } = options ?? {}; - return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve8) => { - token = setTimeout(resolve8, timeInMs); + return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve7) => { + token = setTimeout(resolve7, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, @@ -57785,7 +57785,7 @@ var require_commonjs4 = __commonJS({ exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; exports2.isError = isError; exports2.isObject = isObject2; - exports2.randomUUID = randomUUID2; + exports2.randomUUID = randomUUID; exports2.uint8ArrayToString = uint8ArrayToString; exports2.stringToUint8Array = stringToUint8Array; var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); @@ -57834,7 +57834,7 @@ var require_commonjs4 = __commonJS({ function isObject2(input) { return tspRuntime.isObject(input); } - function randomUUID2() { + function randomUUID() { return tspRuntime.randomUUID(); } exports2.isBrowser = tspRuntime.isBrowser; @@ -57892,7 +57892,7 @@ var require_file3 = __commonJS({ return blob; } } - function createFileFromStream(stream2, name, options = {}) { + function createFileFromStream(stream, name, options = {}) { return { ...unimplementedMethods, type: options.type ?? "", @@ -57901,13 +57901,13 @@ var require_file3 = __commonJS({ size: options.size ?? -1, name, stream: () => { - const s = stream2(); + const s = stream(); if (isNodeReadableStream(s)) { throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); } return s; }, - [rawContent]: stream2 + [rawContent]: stream }; } function createFile(content, name, options = {}) { @@ -60822,15 +60822,15 @@ var require_urlHelpers2 = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path16 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path16.startsWith("/")) { - path16 = path16.substring(1); + let path13 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path13.startsWith("/")) { + path13 = path13.substring(1); } - if (isAbsoluteUrl(path16)) { - requestUrl = path16; + if (isAbsoluteUrl(path13)) { + requestUrl = path13; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path16); + requestUrl = appendPath(requestUrl, path13); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -60876,9 +60876,9 @@ var require_urlHelpers2 = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path16 = pathToAppend.substring(0, searchStart); + const path13 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path16; + newPath = newPath + path13; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -63129,10 +63129,10 @@ var require_utils_common = __commonJS({ var constants_js_1 = require_constants15(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path16 = urlParsed.pathname; - path16 = path16 || "/"; - path16 = escape(path16); - urlParsed.pathname = path16; + let path13 = urlParsed.pathname; + path13 = path13 || "/"; + path13 = escape(path13); + urlParsed.pathname = path13; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -63217,9 +63217,9 @@ var require_utils_common = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path16 = urlParsed.pathname; - path16 = path16 ? path16.endsWith("/") ? `${path16}${name}` : `${path16}/${name}` : name; - urlParsed.pathname = path16; + let path13 = urlParsed.pathname; + path13 = path13 ? path13.endsWith("/") ? `${path13}${name}` : `${path13}/${name}` : name; + urlParsed.pathname = path13; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -63334,7 +63334,7 @@ var require_utils_common = __commonJS({ return base64encode(res); } async function delay2(timeInMs, aborter, abortError) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { let timeout; const abortHandler = () => { if (timeout !== void 0) { @@ -63346,7 +63346,7 @@ var require_utils_common = __commonJS({ if (aborter !== void 0) { aborter.removeEventListener("abort", abortHandler); } - resolve8(); + resolve7(); }; timeout = setTimeout(resolveHandler, timeInMs); if (aborter !== void 0) { @@ -64446,9 +64446,9 @@ var require_StorageSharedKeyCredentialPolicy = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path16 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path16}`; + canonicalizedResourceString += `/${this.factory.accountName}${path13}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -64887,7 +64887,7 @@ var require_BufferScheduler = __commonJS({ * */ async do() { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { this.readable.on("data", (data) => { data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; this.appendUnresolvedData(data); @@ -64915,11 +64915,11 @@ var require_BufferScheduler = __commonJS({ if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve8).catch(reject); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve7).catch(reject); } else if (this.unresolvedLength >= this.bufferSize) { return; } else { - resolve8(); + resolve7(); } } }); @@ -65187,10 +65187,10 @@ var require_utils_common2 = __commonJS({ var constants_js_1 = require_constants16(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path16 = urlParsed.pathname; - path16 = path16 || "/"; - path16 = escape(path16); - urlParsed.pathname = path16; + let path13 = urlParsed.pathname; + path13 = path13 || "/"; + path13 = escape(path13); + urlParsed.pathname = path13; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -65275,9 +65275,9 @@ var require_utils_common2 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path16 = urlParsed.pathname; - path16 = path16 ? path16.endsWith("/") ? `${path16}${name}` : `${path16}/${name}` : name; - urlParsed.pathname = path16; + let path13 = urlParsed.pathname; + path13 = path13 ? path13.endsWith("/") ? `${path13}${name}` : `${path13}/${name}` : name; + urlParsed.pathname = path13; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -65392,7 +65392,7 @@ var require_utils_common2 = __commonJS({ return base64encode(res); } async function delay2(timeInMs, aborter, abortError) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { let timeout; const abortHandler = () => { if (timeout !== void 0) { @@ -65404,7 +65404,7 @@ var require_utils_common2 = __commonJS({ if (aborter !== void 0) { aborter.removeEventListener("abort", abortHandler); } - resolve8(); + resolve7(); }; timeout = setTimeout(resolveHandler, timeInMs); if (aborter !== void 0) { @@ -66198,9 +66198,9 @@ var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path16 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path16}`; + canonicalizedResourceString += `/${this.factory.accountName}${path13}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -66830,9 +66830,9 @@ var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path16 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path16}`; + canonicalizedResourceString += `/${options.accountName}${path13}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -67177,9 +67177,9 @@ var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path16 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path16}`; + canonicalizedResourceString += `/${options.accountName}${path13}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -83105,8 +83105,8 @@ var require_AvroParser = __commonJS({ * @param length - * @param options - */ - static async readFixedBytes(stream2, length, options = {}) { - const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); if (bytes.length !== length) { throw new Error("Hit stream end."); } @@ -83118,19 +83118,19 @@ var require_AvroParser = __commonJS({ * @param stream - * @param options - */ - static async readByte(stream2, options = {}) { - const buf = await _AvroParser.readFixedBytes(stream2, 1, options); + static async readByte(stream, options = {}) { + const buf = await _AvroParser.readFixedBytes(stream, 1, options); return buf[0]; } // int and long are stored in variable-length zig-zag coding. // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream2, options = {}) { + static async readZigZagLong(stream, options = {}) { let zigZagEncoded = 0; let significanceInBit = 0; let byte, haveMoreByte, significanceInFloat; do { - byte = await _AvroParser.readByte(stream2, options); + byte = await _AvroParser.readByte(stream, options); haveMoreByte = byte & 128; zigZagEncoded |= (byte & 127) << significanceInBit; significanceInBit += 7; @@ -83139,7 +83139,7 @@ var require_AvroParser = __commonJS({ zigZagEncoded = zigZagEncoded; significanceInFloat = 268435456; do { - byte = await _AvroParser.readByte(stream2, options); + byte = await _AvroParser.readByte(stream, options); zigZagEncoded += (byte & 127) * significanceInFloat; significanceInFloat *= 128; } while (byte & 128); @@ -83151,17 +83151,17 @@ var require_AvroParser = __commonJS({ } return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); } - static async readLong(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); + static async readLong(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } - static async readInt(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); + static async readInt(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } static async readNull() { return null; } - static async readBoolean(stream2, options = {}) { - const b = await _AvroParser.readByte(stream2, options); + static async readBoolean(stream, options = {}) { + const b = await _AvroParser.readByte(stream, options); if (b === 1) { return true; } else if (b === 0) { @@ -83170,53 +83170,53 @@ var require_AvroParser = __commonJS({ throw new Error("Byte was not a boolean."); } } - static async readFloat(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); + static async readFloat(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 4, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat32(0, true); } - static async readDouble(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); + static async readDouble(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 8, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat64(0, true); } - static async readBytes(stream2, options = {}) { - const size = await _AvroParser.readLong(stream2, options); + static async readBytes(stream, options = {}) { + const size = await _AvroParser.readLong(stream, options); if (size < 0) { throw new Error("Bytes size was negative."); } - return stream2.read(size, { abortSignal: options.abortSignal }); + return stream.read(size, { abortSignal: options.abortSignal }); } - static async readString(stream2, options = {}) { - const u8arr = await _AvroParser.readBytes(stream2, options); + static async readString(stream, options = {}) { + const u8arr = await _AvroParser.readBytes(stream, options); const utf8decoder = new TextDecoder(); return utf8decoder.decode(u8arr); } - static async readMapPair(stream2, readItemMethod, options = {}) { - const key = await _AvroParser.readString(stream2, options); - const value = await readItemMethod(stream2, options); + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await _AvroParser.readString(stream, options); + const value = await readItemMethod(stream, options); return { key, value }; } - static async readMap(stream2, readItemMethod, options = {}) { + static async readMap(stream, readItemMethod, options = {}) { const readPairMethod = (s, opts = {}) => { return _AvroParser.readMapPair(s, readItemMethod, opts); }; - const pairs2 = await _AvroParser.readArray(stream2, readPairMethod, options); + const pairs2 = await _AvroParser.readArray(stream, readPairMethod, options); const dict = {}; for (const pair of pairs2) { dict[pair.key] = pair.value; } return dict; } - static async readArray(stream2, readItemMethod, options = {}) { + static async readArray(stream, readItemMethod, options = {}) { const items = []; - for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { + for (let count = await _AvroParser.readLong(stream, options); count !== 0; count = await _AvroParser.readLong(stream, options)) { if (count < 0) { - await _AvroParser.readLong(stream2, options); + await _AvroParser.readLong(stream, options); count = -count; } while (count--) { - const item = await readItemMethod(stream2, options); + const item = await readItemMethod(stream, options); items.push(item); } } @@ -83328,24 +83328,24 @@ var require_AvroParser = __commonJS({ this._primitive = primitive; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { + read(stream, options = {}) { switch (this._primitive) { case AvroPrimitive.NULL: return AvroParser.readNull(); case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream2, options); + return AvroParser.readBoolean(stream, options); case AvroPrimitive.INT: - return AvroParser.readInt(stream2, options); + return AvroParser.readInt(stream, options); case AvroPrimitive.LONG: - return AvroParser.readLong(stream2, options); + return AvroParser.readLong(stream, options); case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream2, options); + return AvroParser.readFloat(stream, options); case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream2, options); + return AvroParser.readDouble(stream, options); case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream2, options); + return AvroParser.readBytes(stream, options); case AvroPrimitive.STRING: - return AvroParser.readString(stream2, options); + return AvroParser.readString(stream, options); default: throw new Error("Unknown Avro Primitive"); } @@ -83358,8 +83358,8 @@ var require_AvroParser = __commonJS({ this._symbols = symbols; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { - const value = await AvroParser.readInt(stream2, options); + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); return this._symbols[value]; } }; @@ -83369,9 +83369,9 @@ var require_AvroParser = __commonJS({ super(); this._types = types; } - async read(stream2, options = {}) { - const typeIndex = await AvroParser.readInt(stream2, options); - return this._types[typeIndex].read(stream2, options); + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); } }; var AvroMapType = class extends AvroType { @@ -83381,11 +83381,11 @@ var require_AvroParser = __commonJS({ this._itemType = itemType; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { + read(stream, options = {}) { const readItemMethod = (s, opts) => { return this._itemType.read(s, opts); }; - return AvroParser.readMap(stream2, readItemMethod, options); + return AvroParser.readMap(stream, readItemMethod, options); } }; var AvroRecordType = class extends AvroType { @@ -83397,12 +83397,12 @@ var require_AvroParser = __commonJS({ this._name = name; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { + async read(stream, options = {}) { const record = {}; record["$schema"] = this._name; for (const key in this._fields) { if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream2, options); + record[key] = await this._fields[key].read(stream, options); } } return record; @@ -83602,7 +83602,7 @@ var require_AvroReadableFromStream = __commonJS({ this._position += chunk.length; return this.toUint8Array(chunk); } else { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { const cleanUp = () => { this._readable.removeListener("readable", readableCallback); this._readable.removeListener("error", rejectCallback); @@ -83617,7 +83617,7 @@ var require_AvroReadableFromStream = __commonJS({ if (callbackChunk) { this._position += callbackChunk.length; cleanUp(); - resolve8(this.toUint8Array(callbackChunk)); + resolve7(this.toUint8Array(callbackChunk)); } }; const rejectCallback = () => { @@ -85097,8 +85097,8 @@ var require_poller3 = __commonJS({ this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; - this.promise = new Promise((resolve8, reject) => { - this.resolve = resolve8; + this.promise = new Promise((resolve7, reject) => { + this.resolve = resolve7; this.reject = reject; }); this.promise.catch(() => { @@ -85351,7 +85351,7 @@ var require_lroEngine = __commonJS({ * The method used by the poller to wait before attempting to update its operation. */ delay() { - return new Promise((resolve8) => setTimeout(() => resolve8(), this.config.intervalInMs)); + return new Promise((resolve7) => setTimeout(() => resolve7(), this.config.intervalInMs)); } }; exports2.LroEngine = LroEngine; @@ -85597,8 +85597,8 @@ var require_Batch = __commonJS({ return Promise.resolve(); } this.parallelExecute(); - return new Promise((resolve8, reject) => { - this.emitter.on("finish", resolve8); + return new Promise((resolve7, reject) => { + this.emitter.on("finish", resolve7); this.emitter.on("error", (error3) => { this.state = BatchStates.Error; reject(error3); @@ -85656,18 +85656,18 @@ var require_utils7 = __commonJS({ var node_fs_1 = tslib_1.__importDefault(require("node:fs")); var node_util_1 = tslib_1.__importDefault(require("node:util")); var constants_js_1 = require_constants15(); - async function streamToBuffer(stream2, buffer, offset, end, encoding) { + async function streamToBuffer(stream, buffer, offset, end, encoding) { let pos = 0; const count = end - offset; - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); - stream2.on("readable", () => { + stream.on("readable", () => { if (pos >= count) { clearTimeout(timeout); - resolve8(); + resolve7(); return; } - let chunk = stream2.read(); + let chunk = stream.read(); if (!chunk) { return; } @@ -85678,25 +85678,25 @@ var require_utils7 = __commonJS({ buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); - stream2.on("end", () => { + stream.on("end", () => { clearTimeout(timeout); if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } - resolve8(); + resolve7(); }); - stream2.on("error", (msg) => { + stream.on("error", (msg) => { clearTimeout(timeout); reject(msg); }); }); } - async function streamToBuffer2(stream2, buffer, encoding) { + async function streamToBuffer2(stream, buffer, encoding) { let pos = 0; const bufferSize = buffer.length; - return new Promise((resolve8, reject) => { - stream2.on("readable", () => { - let chunk = stream2.read(); + return new Promise((resolve7, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); if (!chunk) { return; } @@ -85710,26 +85710,26 @@ var require_utils7 = __commonJS({ buffer.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); - stream2.on("end", () => { - resolve8(pos); + stream.on("end", () => { + resolve7(pos); }); - stream2.on("error", reject); + stream.on("error", reject); }); } async function streamToBuffer3(readableStream, encoding) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { const chunks = []; readableStream.on("data", (data) => { chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); }); readableStream.on("end", () => { - resolve8(Buffer.concat(chunks)); + resolve7(Buffer.concat(chunks)); }); readableStream.on("error", reject); }); } async function readStreamToLocalFile(rs, file) { - return new Promise((resolve8, reject) => { + return new Promise((resolve7, reject) => { const ws = node_fs_1.default.createWriteStream(file); rs.on("error", (err) => { reject(err); @@ -85737,7 +85737,7 @@ var require_utils7 = __commonJS({ ws.on("error", (err) => { reject(err); }); - ws.on("close", resolve8); + ws.on("close", resolve7); rs.pipe(ws); }); } @@ -86551,8 +86551,8 @@ var require_Clients = __commonJS({ customerProvidedKey: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions }); - const stream2 = response.readableStreamBody; - await (0, utils_js_1.streamToBuffer)(stream2, buffer, off - offset, chunkEnd - offset); + const stream = response.readableStreamBody; + await (0, utils_js_1.streamToBuffer)(stream, buffer, off - offset, chunkEnd - offset); transferProgress += chunkEnd - off; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress }); @@ -86676,7 +86676,7 @@ var require_Clients = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } @@ -86687,7 +86687,7 @@ var require_Clients = __commonJS({ versionId: this._versionId, ...options }, this.credential).toString(); - resolve8((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve7((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -86726,7 +86726,7 @@ var require_Clients = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ containerName: this._containerName, blobName: this._name, @@ -86734,7 +86734,7 @@ var require_Clients = __commonJS({ versionId: this._versionId, ...options }, userDelegationKey, this.accountName).toString(); - resolve8((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve7((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -87655,7 +87655,7 @@ var require_Clients = __commonJS({ * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadStream(stream2, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + async uploadStream(stream, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { if (!options.blobHTTPHeaders) { options.blobHTTPHeaders = {}; } @@ -87668,7 +87668,7 @@ var require_Clients = __commonJS({ let transferProgress = 0; const blockList = []; const scheduler = new storage_common_1.BufferScheduler( - stream2, + stream, bufferSize, maxConcurrency, async (body, length) => { @@ -88589,14 +88589,14 @@ var require_Mutex = __commonJS({ * @param key - lock key */ static async lock(key) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { this.keys[key] = MutexLockStatus.LOCKED; - resolve8(); + resolve7(); } else { this.onUnlockEvent(key, () => { this.keys[key] = MutexLockStatus.LOCKED; - resolve8(); + resolve7(); }); } }); @@ -88607,12 +88607,12 @@ var require_Mutex = __commonJS({ * @param key - */ static async unlock(key) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { if (this.keys[key] === MutexLockStatus.LOCKED) { this.emitUnlockEvent(key); } delete this.keys[key]; - resolve8(); + resolve7(); }); } static keys = {}; @@ -88834,8 +88834,8 @@ var require_BlobBatch = __commonJS({ if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path16 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path16 || path16 === "") { + const path13 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path13 || path13 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -88913,8 +88913,8 @@ var require_BlobBatchClient = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path16 = (0, utils_common_js_1.getURLPath)(url2); - if (path16 && path16 !== "/") { + const path13 = (0, utils_common_js_1.getURLPath)(url2); + if (path13 && path13 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -90215,7 +90215,7 @@ var require_ContainerClient = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } @@ -90223,7 +90223,7 @@ var require_ContainerClient = __commonJS({ containerName: this._containerName, ...options }, this.credential).toString(); - resolve8((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve7((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -90258,12 +90258,12 @@ var require_ContainerClient = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve8) => { + return new Promise((resolve7) => { const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ containerName: this._containerName, ...options }, userDelegationKey, this.accountName).toString(); - resolve8((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve7((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -91659,11 +91659,11 @@ var require_uploadUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -91679,7 +91679,7 @@ var require_uploadUtils = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -91846,11 +91846,11 @@ var require_requestUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -91866,7 +91866,7 @@ var require_requestUtils = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -91906,7 +91906,7 @@ var require_requestUtils = __commonJS({ } function sleep(milliseconds) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8) => setTimeout(resolve8, milliseconds)); + return new Promise((resolve7) => setTimeout(resolve7, milliseconds)); }); } function retry2(name_1, method_1, getStatusCode_1) { @@ -92167,11 +92167,11 @@ var require_downloadUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -92187,7 +92187,7 @@ var require_downloadUtils = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -92201,8 +92201,8 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs17 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); + var fs14 = __importStar2(require("fs")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); @@ -92210,7 +92210,7 @@ var require_downloadUtils = __commonJS({ var abort_controller_1 = require_dist4(); function pipeResponseToStream(response, output) { return __awaiter2(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); yield pipeline(response.message, output); }); } @@ -92312,7 +92312,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs17.createWriteStream(archivePath); + const writeStream = fs14.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -92337,7 +92337,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs17.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs14.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -92453,7 +92453,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs17.openSync(archivePath, "w"); + const fd = fs14.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -92471,20 +92471,20 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs17.writeFileSync(fd, result); + fs14.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs17.closeSync(fd); + fs14.closeSync(fd); } } }); } var promiseWithTimeout = (timeoutMs, promise) => __awaiter2(void 0, void 0, void 0, function* () { let timeoutHandle; - const timeoutPromise = new Promise((resolve8) => { - timeoutHandle = setTimeout(() => resolve8("timeout"), timeoutMs); + const timeoutPromise = new Promise((resolve7) => { + timeoutHandle = setTimeout(() => resolve7("timeout"), timeoutMs); }); return Promise.race([promise, timeoutPromise]).then((result) => { clearTimeout(timeoutHandle); @@ -92765,11 +92765,11 @@ var require_cacheHttpClient = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -92785,7 +92785,7 @@ var require_cacheHttpClient = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -92798,7 +92798,7 @@ var require_cacheHttpClient = __commonJS({ var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs17 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -92933,7 +92933,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs17.openSync(archivePath, "r"); + const fd = fs14.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -92947,7 +92947,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs17.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs14.createReadStream(archivePath, { fd, start, end, @@ -92958,7 +92958,7 @@ Other caches with similar key:`); } }))); } finally { - fs17.closeSync(fd); + fs14.closeSync(fd); } return; }); @@ -96255,8 +96255,8 @@ var require_deferred = __commonJS({ */ constructor(preventUnhandledRejectionWarning = true) { this._state = DeferredState.PENDING; - this._promise = new Promise((resolve8, reject) => { - this._resolve = resolve8; + this._promise = new Promise((resolve7, reject) => { + this._resolve = resolve7; this._reject = reject; }); if (preventUnhandledRejectionWarning) { @@ -96471,11 +96471,11 @@ var require_unary_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96491,7 +96491,7 @@ var require_unary_call = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96540,11 +96540,11 @@ var require_server_streaming_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96560,7 +96560,7 @@ var require_server_streaming_call = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96610,11 +96610,11 @@ var require_client_streaming_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96630,7 +96630,7 @@ var require_client_streaming_call = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96679,11 +96679,11 @@ var require_duplex_streaming_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96699,7 +96699,7 @@ var require_duplex_streaming_call = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96747,11 +96747,11 @@ var require_test_transport = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96767,7 +96767,7 @@ var require_test_transport = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96848,7 +96848,7 @@ var require_test_transport = __commonJS({ * The returned promise resolves when the stream is closed. It should * not reject. If it does, code is broken. */ - streamResponses(method, stream2, abort) { + streamResponses(method, stream, abort) { return __awaiter2(this, void 0, void 0, function* () { const messages = []; if (this.data.response === void 0) { @@ -96865,31 +96865,31 @@ var require_test_transport = __commonJS({ try { yield delay2(this.responseDelay, abort)(void 0); } catch (error3) { - stream2.notifyError(error3); + stream.notifyError(error3); return; } if (this.data.response instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.response); + stream.notifyError(this.data.response); return; } for (let msg of messages) { - stream2.notifyMessage(msg); + stream.notifyMessage(msg); try { yield delay2(this.betweenResponseDelay, abort)(void 0); } catch (error3) { - stream2.notifyError(error3); + stream.notifyError(error3); return; } } if (this.data.status instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.status); + stream.notifyError(this.data.status); return; } if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.trailers); + stream.notifyError(this.data.trailers); return; } - stream2.notifyComplete(); + stream.notifyComplete(); }); } // Creates a promise for response status from the mock data. @@ -96964,11 +96964,11 @@ var require_test_transport = __commonJS({ responseTrailer: "test" }; function delay2(ms, abort) { - return (v) => new Promise((resolve8, reject) => { + return (v) => new Promise((resolve7, reject) => { if (abort === null || abort === void 0 ? void 0 : abort.aborted) { reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); } else { - const id = setTimeout(() => resolve8(v), ms); + const id = setTimeout(() => resolve7(v), ms); if (abort) { abort.addEventListener("abort", (ev) => { clearTimeout(id); @@ -97966,11 +97966,11 @@ var require_cacheTwirpClient = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -97986,7 +97986,7 @@ var require_cacheTwirpClient = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98126,7 +98126,7 @@ var require_cacheTwirpClient = __commonJS({ } sleep(milliseconds) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8) => setTimeout(resolve8, milliseconds)); + return new Promise((resolve7) => setTimeout(resolve7, milliseconds)); }); } getExponentialRetryTimeMilliseconds(attempt) { @@ -98191,11 +98191,11 @@ var require_tar = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -98211,7 +98211,7 @@ var require_tar = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98223,7 +98223,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io7 = __importStar2(require_io()); var fs_1 = require("fs"); - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); var IS_WINDOWS = process.platform === "win32"; @@ -98269,13 +98269,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path16.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path13.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -98321,7 +98321,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path16.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -98330,7 +98330,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path16.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -98345,7 +98345,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -98354,7 +98354,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path16.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -98392,7 +98392,7 @@ var require_tar = __commonJS({ } function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path16.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path13.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -98443,11 +98443,11 @@ var require_cache5 = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -98463,7 +98463,7 @@ var require_cache5 = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98474,7 +98474,7 @@ var require_cache5 = __commonJS({ exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; var core15 = __importStar2(require_core()); - var path16 = __importStar2(require("path")); + var path13 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); @@ -98569,7 +98569,7 @@ var require_cache5 = __commonJS({ core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core15.isDebug()) { @@ -98638,7 +98638,7 @@ var require_cache5 = __commonJS({ core15.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core15.debug(`Archive path: ${archivePath}`); core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -98700,7 +98700,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98764,7 +98764,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98881,11 +98881,11 @@ var require_manifest = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -98901,7 +98901,7 @@ var require_manifest = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98912,12 +98912,12 @@ var require_manifest = __commonJS({ exports2._readLinuxVersionFile = _readLinuxVersionFile; var semver9 = __importStar2(require_semver2()); var core_1 = require_core(); - var os5 = require("os"); + var os4 = require("os"); var cp = require("child_process"); - var fs17 = require("fs"); + var fs14 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { - const platFilter = os5.platform(); + const platFilter = os4.platform(); let result; let match; let file; @@ -98953,7 +98953,7 @@ var require_manifest = __commonJS({ }); } function _getOsVersion() { - const plat = os5.platform(); + const plat = os4.platform(); let version = ""; if (plat === "darwin") { version = cp.execSync("sw_vers -productVersion").toString(); @@ -98976,10 +98976,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs17.existsSync(lsbReleaseFile)) { - contents = fs17.readFileSync(lsbReleaseFile).toString(); - } else if (fs17.existsSync(osReleaseFile)) { - contents = fs17.readFileSync(osReleaseFile).toString(); + if (fs14.existsSync(lsbReleaseFile)) { + contents = fs14.readFileSync(lsbReleaseFile).toString(); + } else if (fs14.existsSync(osReleaseFile)) { + contents = fs14.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99029,11 +99029,11 @@ var require_retry_helper = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -99049,7 +99049,7 @@ var require_retry_helper = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -99094,7 +99094,7 @@ var require_retry_helper = __commonJS({ } sleep(seconds) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve8) => setTimeout(resolve8, seconds * 1e3)); + return new Promise((resolve7) => setTimeout(resolve7, seconds * 1e3)); }); } }; @@ -99145,11 +99145,11 @@ var require_tool_cache = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve8) { - resolve8(value); + return value instanceof P ? value : new P(function(resolve7) { + resolve7(value); }); } - return new (P || (P = Promise))(function(resolve8, reject) { + return new (P || (P = Promise))(function(resolve7, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -99165,7 +99165,7 @@ var require_tool_cache = __commonJS({ } } function step(result) { - result.done ? resolve8(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve7(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -99188,13 +99188,13 @@ var require_tool_cache = __commonJS({ var core15 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); - var fs17 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); - var os5 = __importStar2(require("os")); - var path16 = __importStar2(require("path")); + var os4 = __importStar2(require("os")); + var path13 = __importStar2(require("path")); var httpm = __importStar2(require_lib()); var semver9 = __importStar2(require_semver2()); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var assert_1 = require("assert"); var exec_1 = require_exec(); @@ -99212,8 +99212,8 @@ var require_tool_cache = __commonJS({ var userAgent2 = "actions/tool-cache"; function downloadTool2(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - dest = dest || path16.join(_getTempDirectory(), crypto3.randomUUID()); - yield io7.mkdirP(path16.dirname(dest)); + dest = dest || path13.join(_getTempDirectory(), crypto3.randomUUID()); + yield io7.mkdirP(path13.dirname(dest)); core15.debug(`Downloading ${url2}`); core15.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -99234,7 +99234,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs17.existsSync(dest)) { + if (fs14.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99253,12 +99253,12 @@ var require_tool_cache = __commonJS({ core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs17.createWriteStream(dest)); + yield pipeline(readStream, fs14.createWriteStream(dest)); core15.debug("download complete"); succeeded = true; return dest; @@ -99303,7 +99303,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path16.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path13.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -99464,61 +99464,61 @@ var require_tool_cache = __commonJS({ yield (0, exec_1.exec)(`"${unzipPath}"`, args, { cwd: dest }); }); } - function cacheDir(sourceDir, tool, version, arch2) { + function cacheDir(sourceDir, tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; - arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + arch = arch || os4.arch(); + core15.debug(`Caching tool ${tool} ${version} ${arch}`); core15.debug(`source dir: ${sourceDir}`); - if (!fs17.statSync(sourceDir).isDirectory()) { + if (!fs14.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } - const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs17.readdirSync(sourceDir)) { - const s = path16.join(sourceDir, itemName); + const destPath = yield _createToolPath(tool, version, arch); + for (const itemName of fs14.readdirSync(sourceDir)) { + const s = path13.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } - _completeToolPath(tool, version, arch2); + _completeToolPath(tool, version, arch); return destPath; }); } - function cacheFile(sourceFile, targetFile, tool, version, arch2) { + function cacheFile(sourceFile, targetFile, tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; - arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + arch = arch || os4.arch(); + core15.debug(`Caching tool ${tool} ${version} ${arch}`); core15.debug(`source file: ${sourceFile}`); - if (!fs17.statSync(sourceFile).isFile()) { + if (!fs14.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } - const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path16.join(destFolder, targetFile); + const destFolder = yield _createToolPath(tool, version, arch); + const destPath = path13.join(destFolder, targetFile); core15.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); - _completeToolPath(tool, version, arch2); + _completeToolPath(tool, version, arch); return destFolder; }); } - function find2(toolName, versionSpec, arch2) { + function find2(toolName, versionSpec, arch) { if (!toolName) { throw new Error("toolName parameter is required"); } if (!versionSpec) { throw new Error("versionSpec parameter is required"); } - arch2 = arch2 || os5.arch(); + arch = arch || os4.arch(); if (!isExplicitVersion(versionSpec)) { - const localVersions = findAllVersions2(toolName, arch2); + const localVersions = findAllVersions2(toolName, arch); const match = evaluateVersions(localVersions, versionSpec); versionSpec = match; } let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch); core15.debug(`checking cache: ${cachePath}`); - if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) { - core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) { + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { core15.debug("not found"); @@ -99526,16 +99526,16 @@ var require_tool_cache = __commonJS({ } return toolPath; } - function findAllVersions2(toolName, arch2) { + function findAllVersions2(toolName, arch) { const versions = []; - arch2 = arch2 || os5.arch(); - const toolPath = path16.join(_getCacheDirectory(), toolName); - if (fs17.existsSync(toolPath)) { - const children = fs17.readdirSync(toolPath); + arch = arch || os4.arch(); + const toolPath = path13.join(_getCacheDirectory(), toolName); + if (fs14.existsSync(toolPath)) { + const children = fs14.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path16.join(toolPath, child, arch2 || ""); - if (fs17.existsSync(fullPath) && fs17.existsSync(`${fullPath}.complete`)) { + const fullPath = path13.join(toolPath, child, arch || ""); + if (fs14.existsSync(fullPath) && fs14.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99578,7 +99578,7 @@ var require_tool_cache = __commonJS({ }); } function findFromManifest(versionSpec_1, stable_1, manifest_1) { - return __awaiter2(this, arguments, void 0, function* (versionSpec, stable, manifest, archFilter = os5.arch()) { + return __awaiter2(this, arguments, void 0, function* (versionSpec, stable, manifest, archFilter = os4.arch()) { const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); return match; }); @@ -99586,15 +99586,15 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter2(this, void 0, void 0, function* () { if (!dest) { - dest = path16.join(_getTempDirectory(), crypto3.randomUUID()); + dest = path13.join(_getTempDirectory(), crypto3.randomUUID()); } yield io7.mkdirP(dest); return dest; }); } - function _createToolPath(tool, version, arch2) { + function _createToolPath(tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { - const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || ""); core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); @@ -99603,10 +99603,10 @@ var require_tool_cache = __commonJS({ return folderPath; }); } - function _completeToolPath(tool, version, arch2) { - const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + function _completeToolPath(tool, version, arch) { + const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs17.writeFileSync(markerPath, ""); + fs14.writeFileSync(markerPath, ""); core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -100124,8 +100124,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl2(url2.resolve(base, relative2)); + function resolveUrl(relative, base) { + return useNativeURL ? new URL2(relative, base) : parseUrl2(url2.resolve(base, relative)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -103113,14 +103113,13 @@ __export(analyze_action_exports, { runPromise: () => runPromise }); module.exports = __toCommonJS(analyze_action_exports); -var fs16 = __toESM(require("fs")); +var fs13 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); -var import_perf_hooks3 = require("perf_hooks"); +var import_perf_hooks2 = require("perf_hooks"); var core14 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); -var path2 = __toESM(require("path")); var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); @@ -103142,21 +103141,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs17 = options.fs || await import("node:fs/promises"); + const fs14 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs14.lstat(itemPath, { bigint: true }) : await fs14.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs14.readdir(itemPath) : await fs14.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -105778,7 +105777,6 @@ var safeDump = renamed("safeDump", "dump"); // src/util.ts var semver = __toESM(require_semver2()); var BASE_DATABASE_OIDS_FILE_NAME = "base-database-oids.json"; -var BROKEN_VERSIONS = ["0.0.0-20211207"]; var GITHUB_DOTCOM_URL = "https://github.com"; var DEFAULT_RESERVED_RAM_SCALING_FACTOR = 0.05; var MINIMUM_CGROUP_MEMORY_LIMIT_BYTES = 1024 * 1024; @@ -106127,16 +106125,13 @@ async function bundleDb(config, language, codeql, dbName) { } async function delay(milliseconds, opts) { const { allowProcessExit } = opts || {}; - return new Promise((resolve8) => { - const timer = setTimeout(resolve8, milliseconds); + return new Promise((resolve7) => { + const timer = setTimeout(resolve7, milliseconds); if (allowProcessExit) { timer.unref(); } }); } -function isGoodVersion(versionSpec) { - return !BROKEN_VERSIONS.includes(versionSpec); -} function isInTestMode() { return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true"; } @@ -106254,27 +106249,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) { function cloneObject(obj) { return JSON.parse(JSON.stringify(obj)); } -async function cleanUpPath(file, name, logger) { - logger.debug(`Cleaning up ${name}.`); - try { - await fs.promises.rm(file, { - force: true, - recursive: true - }); - } catch (e) { - logger.warning(`Failed to clean up ${name}: ${e}.`); - } -} -async function isBinaryAccessible(binary2, logger) { - try { - await io.which(binary2, true); - logger.debug(`Found ${binary2}.`); - return true; - } catch (e) { - logger.debug(`Could not find ${binary2}: ${e}`); - return false; - } -} async function asyncSome(array, predicate) { const results = await Promise.all(array.map(predicate)); return results.some((result) => result); @@ -106308,15 +106282,6 @@ function getActionVersion() { function getWorkflowEventName() { return getRequiredEnvParam("GITHUB_EVENT_NAME"); } -function isRunningLocalAction() { - const relativeScriptPath = getRelativeScriptPath(); - return relativeScriptPath.startsWith("..") || path2.isAbsolute(relativeScriptPath); -} -function getRelativeScriptPath() { - const runnerTemp = getRequiredEnvParam("RUNNER_TEMP"); - const actionsDirectory = path2.join(path2.dirname(runnerTemp), "_actions"); - return path2.relative(actionsDirectory, __filename); -} function getWorkflowEvent() { const eventJsonFile = getRequiredEnvParam("GITHUB_EVENT_PATH"); try { @@ -106566,9 +106531,9 @@ var SarifScanOrder = [ ]; // src/analyze.ts -var fs12 = __toESM(require("fs")); -var path12 = __toESM(require("path")); -var import_perf_hooks2 = require("perf_hooks"); +var fs9 = __toESM(require("fs")); +var path9 = __toESM(require("path")); +var import_perf_hooks = require("perf_hooks"); var io5 = __toESM(require_io()); // src/autobuild.ts @@ -106707,14 +106672,6 @@ function getApiDetails() { function getApiClient() { return createApiClientWithDetails(getApiDetails()); } -function getAuthorizationHeaderFor(logger, apiDetails, url2) { - if (url2.startsWith(`${apiDetails.url}/`) || apiDetails.apiURL && url2.startsWith(`${apiDetails.apiURL}/`)) { - logger.debug(`Providing an authorization token.`); - return `token ${apiDetails.auth}`; - } - logger.debug(`Not using an authorization token.`); - return void 0; -} var cachedGitHubVersion = void 0; async function getGitHubVersionFromApi(apiClient, apiDetails) { if (parseGitHubUrl(apiDetails.url) === GITHUB_DOTCOM_URL) { @@ -106848,8 +106805,8 @@ function wrapApiConfigurationError(e) { } // src/codeql.ts -var fs11 = __toESM(require("fs")); -var path11 = __toESM(require("path")); +var fs8 = __toESM(require("fs")); +var path8 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -107071,7 +107028,7 @@ function getCliConfigCategoryIfExists(cliError) { } function isUnsupportedPlatform() { return !SUPPORTED_PLATFORMS.some( - ([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch + ([platform2, arch]) => platform2 === process.platform && arch === process.arch ); } function getUnsupportedPlatformError(cliError) { @@ -107097,7 +107054,7 @@ function wrapCliConfigurationError(cliError) { // src/config-utils.ts var fs6 = __toESM(require("fs")); -var path7 = __toESM(require("path")); +var path6 = __toESM(require("path")); // src/caching-utils.ts var crypto2 = __toESM(require("crypto")); @@ -107152,21 +107109,9 @@ async function withGroupAsync(groupName, f) { core7.endGroup(); } } -function formatDuration(durationMs) { - if (durationMs < 1e3) { - return `${durationMs}ms`; - } - if (durationMs < 60 * 1e3) { - return `${(durationMs / 1e3).toFixed(1)}s`; - } - const minutes = Math.floor(durationMs / (60 * 1e3)); - const seconds = Math.floor(durationMs % (60 * 1e3) / 1e3); - return `${minutes}m${seconds}s`; -} // src/diagnostics.ts var unwrittenDiagnostics = []; -var unwrittenDefaultLanguageDiagnostics = []; function makeDiagnostic(id, name, data = void 0) { return { ...data, @@ -107186,19 +107131,6 @@ function addDiagnostic(config, language, diagnostic) { unwrittenDiagnostics.push({ diagnostic, language }); } } -function addNoLanguageDiagnostic(config, diagnostic) { - if (config !== void 0) { - addDiagnostic( - config, - // Arbitrarily choose the first language. We could also choose all languages, but that - // increases the risk of misinterpreting the data. - config.languages[0], - diagnostic - ); - } else { - unwrittenDefaultLanguageDiagnostics.push(diagnostic); - } -} function writeDiagnostic(config, language, diagnostic) { const logger = getActionsLogger(); const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; @@ -107223,11 +107155,11 @@ function writeDiagnostic(config, language, diagnostic) { // src/diff-informed-analysis-utils.ts var fs5 = __toESM(require("fs")); -var path6 = __toESM(require("path")); +var path5 = __toESM(require("path")); // src/feature-flags.ts var fs4 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var path4 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json @@ -107236,7 +107168,7 @@ var cliVersion = "2.24.2"; // src/overlay-database-utils.ts var fs3 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache5()); // src/git-utils.ts @@ -107364,8 +107296,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path16 = decodeGitFilePath(match[2]); - fileOidMap[path16] = oid; + const path13 = decodeGitFilePath(match[2]); + fileOidMap[path13] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -107471,7 +107403,7 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); - const overlayChangesFile = path4.join( + const overlayChangesFile = path3.join( getTemporaryDirectory(), "overlay-changes.json" ); @@ -107640,7 +107572,6 @@ function isSupportedToolsFeature(versionInfo, feature) { // src/feature-flags.ts var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_"; var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled"; -var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0"; var featureConfig = { ["allow_toolcache_input" /* AllowToolcacheInput */]: { defaultValue: false, @@ -107978,7 +107909,7 @@ var Features = class extends OfflineFeatures { super(logger); this.gitHubFeatureFlags = new GitHubFeatureFlags( repositoryNwo, - path5.join(tempDir, FEATURE_FLAGS_FILE_NAME), + path4.join(tempDir, FEATURE_FLAGS_FILE_NAME), logger ); } @@ -108211,7 +108142,7 @@ async function getDiffInformedAnalysisBranches(codeql, features, logger) { return branches; } function getDiffRangesJsonFilePath() { - return path6.join(getTemporaryDirectory(), "pr-diff-range.json"); + return path5.join(getTemporaryDirectory(), "pr-diff-range.json"); } function writeDiffRangesJsonFile(logger, ranges) { const jsonContents = JSON.stringify(ranges, null, 2); @@ -108291,7 +108222,7 @@ Error Response: ${JSON.stringify(error3.response, null, 2)}` } } function getDiffRanges(fileDiff, logger) { - const filename = path6.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path6.sep, "/"); + const filename = path5.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path5.sep, "/"); if (fileDiff.patch === void 0) { if (fileDiff.changes === 0) { return []; @@ -108498,7 +108429,7 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path7.join(tempDir, "config"); + return path6.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); @@ -108556,953 +108487,27 @@ function getPrimaryAnalysisConfig(config) { } // src/setup-codeql.ts -var fs9 = __toESM(require("fs")); -var path9 = __toESM(require("path")); var toolcache3 = __toESM(require_tool_cache()); var import_fast_deep_equal = __toESM(require_fast_deep_equal()); var semver8 = __toESM(require_semver2()); -// node_modules/uuid/dist-node/stringify.js -var byteToHex = []; -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 256).toString(16).slice(1)); -} -function unsafeStringify(arr, offset = 0) { - return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); -} - -// node_modules/uuid/dist-node/rng.js -var import_node_crypto = require("node:crypto"); -var rnds8Pool = new Uint8Array(256); -var poolPtr = rnds8Pool.length; -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - (0, import_node_crypto.randomFillSync)(rnds8Pool); - poolPtr = 0; - } - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -// node_modules/uuid/dist-node/native.js -var import_node_crypto2 = require("node:crypto"); -var native_default = { randomUUID: import_node_crypto2.randomUUID }; - -// node_modules/uuid/dist-node/v4.js -function _v4(options, buf, offset) { - options = options || {}; - const rnds = options.random ?? options.rng?.() ?? rng(); - if (rnds.length < 16) { - throw new Error("Random bytes length must be >= 16"); - } - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - offset = offset || 0; - if (offset < 0 || offset + 16 > buf.length) { - throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`); - } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - return buf; - } - return unsafeStringify(rnds); -} -function v4(options, buf, offset) { - if (native_default.randomUUID && !buf && !options) { - return native_default.randomUUID(); - } - return _v4(options, buf, offset); -} -var v4_default = v4; - // src/tar.ts -var import_child_process = require("child_process"); -var fs7 = __toESM(require("fs")); -var stream = __toESM(require("stream")); var import_toolrunner = __toESM(require_toolrunner()); var io4 = __toESM(require_io()); var toolcache = __toESM(require_tool_cache()); var semver6 = __toESM(require_semver2()); -var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3"; -var MIN_REQUIRED_GNU_TAR_VERSION = "1.31"; -async function getTarVersion() { - const tar = await io4.which("tar", true); - let stdout = ""; - const exitCode = await new import_toolrunner.ToolRunner(tar, ["--version"], { - listeners: { - stdout: (data) => { - stdout += data.toString(); - } - } - }).exec(); - if (exitCode !== 0) { - throw new Error("Failed to call tar --version"); - } - if (stdout.includes("GNU tar")) { - const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); - } - return { type: "gnu", version: match[1] }; - } else if (stdout.includes("bsdtar")) { - const match = stdout.match(/bsdtar ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); - } - return { type: "bsd", version: match[1] }; - } else { - throw new Error("Unknown tar version"); - } -} -async function isZstdAvailable(logger) { - const foundZstdBinary = await isBinaryAccessible("zstd", logger); - try { - const tarVersion = await getTarVersion(); - const { type: type2, version } = tarVersion; - logger.info(`Found ${type2} tar version ${version}.`); - switch (type2) { - case "gnu": - return { - available: foundZstdBinary && // GNU tar only uses major and minor version numbers - semver6.gte( - semver6.coerce(version), - semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION) - ), - foundZstdBinary, - version: tarVersion - }; - case "bsd": - return { - available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain - // a patch version number. - semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION), - foundZstdBinary, - version: tarVersion - }; - default: - assertNever(type2); - } - } catch (e) { - logger.warning( - `Failed to determine tar version, therefore will assume zstd is not available. The underlying error was: ${e}` - ); - return { available: false, foundZstdBinary }; - } -} -async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs7.mkdirSync(dest, { recursive: true }); - switch (compressionMethod) { - case "gzip": - return await toolcache.extractTar(tarPath, dest); - case "zstd": { - if (!tarVersion) { - throw new Error( - "Could not determine tar version, which is required to extract a Zstandard archive." - ); - } - await extractTarZst(tarPath, dest, tarVersion, logger); - return dest; - } - } -} -async function extractTarZst(tar, dest, tarVersion, logger) { - logger.debug( - `Extracting to ${dest}.${tar instanceof stream.Readable ? ` Input stream has high water mark ${tar.readableHighWaterMark}.` : ""}` - ); - try { - const args = ["-x", "--zstd", "--ignore-zeros"]; - if (tarVersion.type === "gnu") { - args.push("--warning=no-unknown-keyword"); - args.push("--overwrite"); - } - args.push("-f", tar instanceof stream.Readable ? "-" : tar, "-C", dest); - process.stdout.write(`[command]tar ${args.join(" ")} -`); - await new Promise((resolve8, reject) => { - const tarProcess = (0, import_child_process.spawn)("tar", args, { stdio: "pipe" }); - let stdout = ""; - tarProcess.stdout?.on("data", (data) => { - stdout += data.toString(); - process.stdout.write(data); - }); - let stderr = ""; - tarProcess.stderr?.on("data", (data) => { - stderr += data.toString(); - process.stdout.write(data); - }); - tarProcess.on("error", (err) => { - reject(new Error(`Error while extracting tar: ${err}`)); - }); - if (tar instanceof stream.Readable) { - tar.pipe(tarProcess.stdin).on("error", (err) => { - reject( - new Error(`Error while downloading and extracting tar: ${err}`) - ); - }); - } - tarProcess.on("exit", (code) => { - if (code !== 0) { - reject( - new CommandInvocationError( - "tar", - args, - code ?? void 0, - stdout, - stderr - ) - ); - } - resolve8(); - }); - }); - } catch (e) { - await cleanUpPath(dest, "extraction destination directory", logger); - throw e; - } -} -var KNOWN_EXTENSIONS = { - "tar.gz": "gzip", - "tar.zst": "zstd" -}; -function inferCompressionMethod(tarPath) { - for (const [ext, method] of Object.entries(KNOWN_EXTENSIONS)) { - if (tarPath.endsWith(`.${ext}`)) { - return method; - } - } - return void 0; -} // src/tools-download.ts -var fs8 = __toESM(require("fs")); -var os2 = __toESM(require("os")); -var path8 = __toESM(require("path")); -var import_perf_hooks = require("perf_hooks"); var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); var semver7 = __toESM(require_semver2()); var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; -var TOOLCACHE_TOOL_NAME = "CodeQL"; -function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) { - return { - combinedDurationMs: downloadDurationMs + extractionDurationMs, - downloadDurationMs, - extractionDurationMs, - streamExtraction: false - }; -} -function makeStreamedToolsDownloadDurations(combinedDurationMs) { - return { - combinedDurationMs, - downloadDurationMs: void 0, - extractionDurationMs: void 0, - streamExtraction: true - }; -} -async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorization, headers, tarVersion, logger) { - logger.info( - `Downloading CodeQL tools from ${codeqlURL} . This may take a while.` - ); - try { - if (compressionMethod === "zstd" && process.platform === "linux") { - logger.info(`Streaming the extraction of the CodeQL bundle.`); - const toolsInstallStart = import_perf_hooks.performance.now(); - await downloadAndExtractZstdWithStreaming( - codeqlURL, - dest, - authorization, - headers, - tarVersion, - logger - ); - const combinedDurationMs = Math.round( - import_perf_hooks.performance.now() - toolsInstallStart - ); - logger.info( - `Finished downloading and extracting CodeQL bundle to ${dest} (${formatDuration( - combinedDurationMs - )}).` - ); - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeStreamedToolsDownloadDurations(combinedDurationMs) - }; - } - } catch (e) { - core9.warning( - `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` - ); - core9.warning(`Falling back to downloading the bundle before extracting.`); - await cleanUpPath(dest, "CodeQL bundle", logger); - } - const toolsDownloadStart = import_perf_hooks.performance.now(); - const archivedBundlePath = await toolcache2.downloadTool( - codeqlURL, - void 0, - authorization, - headers - ); - const downloadDurationMs = Math.round(import_perf_hooks.performance.now() - toolsDownloadStart); - logger.info( - `Finished downloading CodeQL bundle to ${archivedBundlePath} (${formatDuration( - downloadDurationMs - )}).` - ); - let extractionDurationMs; - try { - logger.info("Extracting CodeQL bundle."); - const extractionStart = import_perf_hooks.performance.now(); - await extract( - archivedBundlePath, - dest, - compressionMethod, - tarVersion, - logger - ); - extractionDurationMs = Math.round(import_perf_hooks.performance.now() - extractionStart); - logger.info( - `Finished extracting CodeQL bundle to ${dest} (${formatDuration( - extractionDurationMs - )}).` - ); - } finally { - await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger); - } - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeDownloadFirstToolsDownloadDurations( - downloadDurationMs, - extractionDurationMs - ) - }; -} -async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs8.mkdirSync(dest, { recursive: true }); - const agent = new import_http_client.HttpClient().getAgent(codeqlURL); - headers = Object.assign( - { "User-Agent": "CodeQL Action" }, - authorization ? { authorization } : {}, - headers - ); - const response = await new Promise( - (resolve8) => import_follow_redirects.https.get( - codeqlURL, - { - headers, - // Increase the high water mark to improve performance. - highWaterMark: STREAMING_HIGH_WATERMARK_BYTES, - // Use the agent to respect proxy settings. - agent - }, - (r) => resolve8(r) - ) - ); - if (response.statusCode !== 200) { - throw new Error( - `Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.` - ); - } - await extractTarZst(response, dest, tarVersion, logger); -} -function getToolcacheDirectory(version) { - return path8.join( - getRequiredEnvParam("RUNNER_TOOL_CACHE"), - TOOLCACHE_TOOL_NAME, - semver7.clean(version) || version, - os2.arch() || "" - ); -} -function writeToolcacheMarkerFile(extractedPath, logger) { - const markerFilePath = `${extractedPath}.complete`; - fs8.writeFileSync(markerFilePath, ""); - logger.info(`Created toolcache marker file ${markerFilePath}`); -} -function sanitizeUrlForStatusReport(url2) { - return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some( - (repo) => url2.startsWith(`https://github.com/${repo}/releases/download/`) - ) ? url2 : "sanitized-value"; -} - -// src/setup-codeql.ts -var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; -var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing"; -var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies"; -var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"]; -var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"]; -var CODEQL_TOOLCACHE_INPUT = "toolcache"; -function getCodeQLBundleExtension(compressionMethod) { - switch (compressionMethod) { - case "gzip": - return ".tar.gz"; - case "zstd": - return ".tar.zst"; - default: - assertNever(compressionMethod); - } -} -function getCodeQLBundleName(compressionMethod) { - const extension = getCodeQLBundleExtension(compressionMethod); - let platform2; - if (process.platform === "win32") { - platform2 = "win64"; - } else if (process.platform === "linux") { - platform2 = "linux64"; - } else if (process.platform === "darwin") { - platform2 = "osx64"; - } else { - return `codeql-bundle${extension}`; - } - return `codeql-bundle-${platform2}${extension}`; -} -function getCodeQLActionRepository(logger) { - if (isRunningLocalAction()) { - logger.info( - "The CodeQL Action is checked out locally. Using the default CodeQL Action repository." - ); - return CODEQL_DEFAULT_ACTION_REPOSITORY; - } - return getRequiredEnvParam("GITHUB_ACTION_REPOSITORY"); -} -async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod, logger) { - const codeQLActionRepository = getCodeQLActionRepository(logger); - const potentialDownloadSources = [ - // This GitHub instance, and this Action. - [apiDetails.url, codeQLActionRepository], - // This GitHub instance, and the canonical Action. - [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY], - // GitHub.com, and the canonical Action. - [GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY] - ]; - const uniqueDownloadSources = potentialDownloadSources.filter( - (source, index, self2) => { - return !self2.slice(0, index).some((other) => (0, import_fast_deep_equal.default)(source, other)); - } - ); - const codeQLBundleName = getCodeQLBundleName(compressionMethod); - for (const downloadSource of uniqueDownloadSources) { - const [apiURL, repository] = downloadSource; - if (apiURL === GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { - break; - } - const [repositoryOwner, repositoryName] = repository.split("/"); - try { - const release2 = await getApiClient().rest.repos.getReleaseByTag({ - owner: repositoryOwner, - repo: repositoryName, - tag: tagName - }); - for (const asset of release2.data.assets) { - if (asset.name === codeQLBundleName) { - logger.info( - `Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.` - ); - return asset.url; - } - } - } catch (e) { - logger.info( - `Looked for CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} but got error ${e}.` - ); - } - } - return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`; -} -function tryGetBundleVersionFromTagName(tagName, logger) { - const match = tagName.match(/^codeql-bundle-(.*)$/); - if (match === null || match.length < 2) { - logger.debug(`Could not determine bundle version from tag ${tagName}.`); - return void 0; - } - return match[1]; -} -function tryGetTagNameFromUrl(url2, logger) { - const matches = [...url2.matchAll(/\/(codeql-bundle-[^/]*)\//g)]; - if (matches.length === 0) { - logger.debug(`Could not determine tag name for URL ${url2}.`); - return void 0; - } - const match = matches[matches.length - 1]; - if (match?.length !== 2) { - logger.debug( - `Could not determine tag name for URL ${url2}. Matched ${JSON.stringify( - match - )}.` - ); - return void 0; - } - return match[1]; -} -function convertToSemVer(version, logger) { - if (!semver8.valid(version)) { - logger.debug( - `Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.` - ); - version = `0.0.0-${version}`; - } - const s = semver8.clean(version); - if (!s) { - throw new Error(`Bundle version ${version} is not in SemVer format.`); - } - return s; -} -async function findOverridingToolsInCache(humanReadableVersion, logger) { - const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ - folder: toolcache3.find("CodeQL", version), - version - })).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version"))); - if (candidates.length === 1) { - const candidate = candidates[0]; - logger.debug( - `CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.` - ); - return { - codeqlFolder: candidate.folder, - sourceType: "toolcache", - toolsVersion: candidate.version - }; - } else if (candidates.length === 0) { - logger.debug( - "Did not find any candidate pinned versions of the CodeQL tools in the toolcache." - ); - } else { - logger.debug( - "Could not use CodeQL tools from the toolcache since more than one candidate pinned version was found in the toolcache." - ); - } - return void 0; -} -async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) { - if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) { - logger.info(`Using CodeQL CLI from local path ${toolsInput}`); - const compressionMethod2 = inferCompressionMethod(toolsInput); - if (compressionMethod2 === void 0) { - throw new ConfigurationError( - `Could not infer compression method from path ${toolsInput}. Please specify a path ending in '.tar.gz' or '.tar.zst'.` - ); - } - return { - codeqlTarPath: toolsInput, - compressionMethod: compressionMethod2, - sourceType: "local", - toolsVersion: "local" - }; - } - let cliVersion2; - let tagName; - let url2; - const canForceNightlyWithFF = isDynamicWorkflow() || isInTestMode(); - const forceNightlyValueFF = await features.getValue("force_nightly" /* ForceNightly */); - const forceNightly = forceNightlyValueFF && canForceNightlyWithFF; - const nightlyRequestedByToolsInput = toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput); - if (forceNightly || nightlyRequestedByToolsInput) { - if (forceNightly) { - logger.info( - `Using the latest CodeQL CLI nightly, as forced by the ${"force_nightly" /* ForceNightly */} feature flag.` - ); - addNoLanguageDiagnostic( - void 0, - makeDiagnostic( - "codeql-action/forced-nightly-cli", - "A nightly release of CodeQL was used", - { - markdownMessage: "GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\nNightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\nIf use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.", - visibility: { - cliSummaryTable: true, - statusPage: true, - telemetry: true - }, - severity: "note" - } - ) - ); - } else { - logger.info( - `Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.` - ); - } - toolsInput = await getNightlyToolsUrl(logger); - } - const forceShippedTools = toolsInput && CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput); - if (forceShippedTools) { - cliVersion2 = cliVersion; - tagName = bundleVersion; - logger.info( - `'tools: ${toolsInput}' was requested, so using CodeQL version ${cliVersion2}, the version shipped with the Action.` - ); - if (toolsInput === "latest") { - logger.warning( - "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required." - ); - } - } else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) { - let latestToolcacheVersion; - const allowToolcacheValueFF = await features.getValue( - "allow_toolcache_input" /* AllowToolcacheInput */ - ); - const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode()); - if (allowToolcacheValue) { - logger.info( - `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.` - ); - latestToolcacheVersion = getLatestToolcacheVersion(logger); - if (latestToolcacheVersion) { - cliVersion2 = latestToolcacheVersion; - } - } - if (latestToolcacheVersion === void 0) { - if (allowToolcacheValue) { - logger.info( - `Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...` - ); - } else { - if (allowToolcacheValueFF) { - logger.warning( - `Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.` - ); - } else { - logger.info( - `Ignoring 'tools: ${toolsInput}' because the feature is not enabled.` - ); - } - } - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; - } - } else if (toolsInput !== void 0) { - tagName = tryGetTagNameFromUrl(toolsInput, logger); - url2 = toolsInput; - if (tagName) { - const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger); - if (bundleVersion3 && semver8.valid(bundleVersion3)) { - cliVersion2 = convertToSemVer(bundleVersion3, logger); - } - } - } else { - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; - } - const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger); - const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown"; - logger.debug( - `Attempting to obtain CodeQL tools. CLI version: ${cliVersion2 ?? "unknown"}, bundle tag name: ${tagName ?? "unknown"}, URL: ${url2 ?? "unspecified"}.` - ); - let codeqlFolder; - if (cliVersion2) { - codeqlFolder = toolcache3.find("CodeQL", cliVersion2); - if (!codeqlFolder) { - logger.debug( - `Didn't find a version of the CodeQL tools in the toolcache with a version number exactly matching ${cliVersion2}.` - ); - const allVersions = toolcache3.findAllVersions("CodeQL"); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - const candidateVersions = allVersions.filter( - (version) => version.startsWith(`${cliVersion2}-`) - ); - if (candidateVersions.length === 1) { - logger.debug( - `Exactly one version of the CodeQL tools starting with ${cliVersion2} found in the toolcache, using that.` - ); - codeqlFolder = toolcache3.find("CodeQL", candidateVersions[0]); - } else if (candidateVersions.length === 0) { - logger.debug( - `Didn't find any versions of the CodeQL tools starting with ${cliVersion2} in the toolcache. Trying next fallback method.` - ); - } else { - logger.warning( - `Found ${candidateVersions.length} versions of the CodeQL tools starting with ${cliVersion2} in the toolcache, but at most one was expected.` - ); - logger.debug("Trying next fallback method."); - } - } - } - if (!codeqlFolder && tagName) { - const fallbackVersion = await tryGetFallbackToolcacheVersion( - cliVersion2, - tagName, - logger - ); - if (fallbackVersion) { - codeqlFolder = toolcache3.find("CodeQL", fallbackVersion); - } else { - logger.debug( - `Could not determine a fallback toolcache version number for CodeQL tools version ${humanReadableVersion}.` - ); - } - } - if (codeqlFolder) { - logger.info( - `Found CodeQL tools version ${humanReadableVersion} in the toolcache.` - ); - } else { - logger.info( - `Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.` - ); - } - if (codeqlFolder) { - if (cliVersion2) { - logger.info( - `Using CodeQL CLI version ${cliVersion2} from toolcache at ${codeqlFolder}` - ); - } else { - logger.info(`Using CodeQL CLI from toolcache at ${codeqlFolder}`); - } - return { - codeqlFolder, - sourceType: "toolcache", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; - } - if (variant === "GitHub Enterprise Server" /* GHES */ && !forceShippedTools && !toolsInput) { - const result = await findOverridingToolsInCache( - humanReadableVersion, - logger - ); - if (result !== void 0) { - return result; - } - } - let compressionMethod; - if (!url2) { - compressionMethod = cliVersion2 !== void 0 && await useZstdBundle(cliVersion2, tarSupportsZstd) ? "zstd" : "gzip"; - url2 = await getCodeQLBundleDownloadURL( - tagName, - apiDetails, - compressionMethod, - logger - ); - } else { - const method = inferCompressionMethod(url2); - if (method === void 0) { - throw new ConfigurationError( - `Could not infer compression method from URL ${url2}. Please specify a URL ending in '.tar.gz' or '.tar.zst'.` - ); - } - compressionMethod = method; - } - if (cliVersion2) { - logger.info(`Using CodeQL CLI version ${cliVersion2} sourced from ${url2} .`); - } else { - logger.info(`Using CodeQL CLI sourced from ${url2} .`); - } - return { - bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger), - cliVersion: cliVersion2, - codeqlURL: url2, - compressionMethod, - sourceType: "download", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; -} -async function tryGetFallbackToolcacheVersion(cliVersion2, tagName, logger) { - const bundleVersion2 = tryGetBundleVersionFromTagName(tagName, logger); - if (!bundleVersion2) { - return void 0; - } - const fallbackVersion = convertToSemVer(bundleVersion2, logger); - logger.debug( - `Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ${cliVersion2 ?? tagName}.` - ); - return fallbackVersion; -} -var downloadCodeQL = async function(codeqlURL, compressionMethod, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) { - const parsedCodeQLURL = new URL(codeqlURL); - const searchParams = new URLSearchParams(parsedCodeQLURL.search); - const headers = { - accept: "application/octet-stream" - }; - let authorization = void 0; - if (searchParams.has("token")) { - logger.debug("CodeQL tools URL contains an authorization token."); - } else { - authorization = getAuthorizationHeaderFor( - logger, - apiDetails, - codeqlURL - ); - } - const toolcacheInfo = getToolcacheDestinationInfo( - maybeBundleVersion, - maybeCliVersion, - logger - ); - const extractedBundlePath = toolcacheInfo?.path ?? getTempExtractionDir(tempDir); - const statusReport = await downloadAndExtract( - codeqlURL, - compressionMethod, - extractedBundlePath, - authorization, - { "User-Agent": "CodeQL Action", ...headers }, - tarVersion, - logger - ); - if (!toolcacheInfo) { - logger.debug( - `Could not cache CodeQL tools because we could not determine the bundle version from the URL ${codeqlURL}.` - ); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? "unknown" - }; - } - writeToolcacheMarkerFile(toolcacheInfo.path, logger); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? toolcacheInfo.version - }; -}; -function getToolcacheDestinationInfo(maybeBundleVersion, maybeCliVersion, logger) { - if (maybeBundleVersion) { - const version = getCanonicalToolcacheVersion( - maybeCliVersion, - maybeBundleVersion, - logger - ); - return { - path: getToolcacheDirectory(version), - version - }; - } - return void 0; -} -function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) { - if (!cliVersion2?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) { - return convertToSemVer(bundleVersion2, logger); - } - return cliVersion2; -} -async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { - if (!await isBinaryAccessible("tar", logger)) { - throw new ConfigurationError( - "Could not find tar in PATH, so unable to extract CodeQL bundle." - ); - } - const zstdAvailability = await isZstdAvailable(logger); - const source = await getCodeQLSource( - toolsInput, - defaultCliVersion, - apiDetails, - variant, - zstdAvailability.available, - features, - logger - ); - let codeqlFolder; - let toolsVersion = source.toolsVersion; - let toolsDownloadStatusReport; - let toolsSource; - switch (source.sourceType) { - case "local": { - codeqlFolder = await extract( - source.codeqlTarPath, - getTempExtractionDir(tempDir), - source.compressionMethod, - zstdAvailability.version, - logger - ); - toolsSource = "LOCAL" /* Local */; - break; - } - case "toolcache": - codeqlFolder = source.codeqlFolder; - logger.debug(`CodeQL found in cache ${codeqlFolder}`); - toolsSource = "TOOLCACHE" /* Toolcache */; - break; - case "download": { - const result = await downloadCodeQL( - source.codeqlURL, - source.compressionMethod, - source.bundleVersion, - source.cliVersion, - apiDetails, - zstdAvailability.version, - tempDir, - logger - ); - toolsVersion = result.toolsVersion; - codeqlFolder = result.codeqlFolder; - toolsDownloadStatusReport = result.statusReport; - toolsSource = "DOWNLOAD" /* Download */; - break; - } - default: - assertNever(source); - } - return { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; -} -async function useZstdBundle(cliVersion2, tarSupportsZstd) { - return ( - // In testing, gzip performs better than zstd on Windows. - process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE) - ); -} -function getTempExtractionDir(tempDir) { - return path9.join(tempDir, v4_default()); -} -async function getNightlyToolsUrl(logger) { - const zstdAvailability = await isZstdAvailable(logger); - const compressionMethod = await useZstdBundle( - CODEQL_VERSION_ZSTD_BUNDLE, - zstdAvailability.available - ) ? "zstd" : "gzip"; - try { - const release2 = await getApiClient().rest.repos.listReleases({ - owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER, - repo: CODEQL_NIGHTLIES_REPOSITORY_NAME, - per_page: 1, - page: 1, - prerelease: true - }); - const latestRelease = release2.data[0]; - if (!latestRelease) { - throw new Error("Could not find the latest nightly release."); - } - return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`; - } catch (e) { - throw new Error( - `Failed to retrieve the latest nightly release: ${wrapError(e)}` - ); - } -} -function getLatestToolcacheVersion(logger) { - const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a)); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - if (allVersions.length > 0) { - const latestToolcacheVersion = allVersions[0]; - logger.info( - `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.` - ); - return latestToolcacheVersion; - } - return void 0; -} -function isReservedToolsValue(tools) { - return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT; -} // src/tracer-config.ts -var fs10 = __toESM(require("fs")); -var path10 = __toESM(require("path")); +var fs7 = __toESM(require("fs")); +var path7 = __toESM(require("path")); async function shouldEnableIndirectTracing(codeql, config) { if (config.buildMode === "none" /* None */) { return false; @@ -109517,18 +108522,18 @@ async function endTracingForCluster(codeql, config, logger) { logger.info( "Unsetting build tracing environment variables. Subsequent steps of this job will not be traced." ); - const envVariablesFile = path10.resolve( + const envVariablesFile = path7.resolve( config.dbLocation, "temp/tracingEnvironment/end-tracing.json" ); - if (!fs10.existsSync(envVariablesFile)) { + if (!fs7.existsSync(envVariablesFile)) { throw new Error( `Environment file for ending tracing not found: ${envVariablesFile}` ); } try { const endTracingEnvVariables = JSON.parse( - fs10.readFileSync(envVariablesFile, "utf8") + fs7.readFileSync(envVariablesFile, "utf8") ); for (const [key, value] of Object.entries(endTracingEnvVariables)) { if (value !== null) { @@ -109552,54 +108557,6 @@ var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13"; var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19"; var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++"; var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1"; -async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) { - try { - const { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - } = await setupCodeQLBundle( - toolsInput, - apiDetails, - tempDir, - variant, - defaultCliVersion, - features, - logger - ); - logger.debug( - `Bundle download status report: ${JSON.stringify( - toolsDownloadStatusReport - )}` - ); - let codeqlCmd = path11.join(codeqlFolder, "codeql", "codeql"); - if (process.platform === "win32") { - codeqlCmd += ".exe"; - } else if (process.platform !== "linux" && process.platform !== "darwin") { - throw new ConfigurationError( - `Unsupported platform: ${process.platform}` - ); - } - cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion); - return { - codeql: cachedCodeQL, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; - } catch (rawError) { - const e = wrapApiConfigurationError(rawError); - const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error; - throw new ErrorClass( - `Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? ` - -Details: ${e.stack}` : ""}` - ); - } -} async function getCodeQL(cmd) { if (cachedCodeQL === void 0) { cachedCodeQL = await getCodeQLForCmd(cmd, true); @@ -109636,12 +108593,12 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path11.join( + const tracingConfigPath = path8.join( extractorPath, "tools", "tracing-config.lua" ); - return fs11.existsSync(tracingConfigPath); + return fs8.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -109718,7 +108675,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path11.join( + const autobuildCmd = path8.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -110117,7 +109074,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs11.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs8.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -110140,7 +109097,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path11.resolve(config.tempDir, "user-config.yaml"); + return path8.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; @@ -110207,7 +109164,7 @@ async function runAutobuild(config, language, logger) { } // src/dependency-caching.ts -var os3 = __toESM(require("os")); +var os2 = __toESM(require("os")); var import_path2 = require("path"); var actionsCache3 = __toESM(require_cache5()); var glob = __toESM(require_glob()); @@ -110219,9 +109176,9 @@ function getJavaTempDependencyDir() { async function getJavaDependencyDirs() { return [ // Maven - (0, import_path2.join)(os3.homedir(), ".m2", "repository"), + (0, import_path2.join)(os2.homedir(), ".m2", "repository"), // Gradle - (0, import_path2.join)(os3.homedir(), ".gradle", "caches"), + (0, import_path2.join)(os2.homedir(), ".gradle", "caches"), // CodeQL Java build-mode: none getJavaTempDependencyDir() ]; @@ -110232,7 +109189,7 @@ function getCsharpTempDependencyDir() { async function getCsharpDependencyDirs(codeql, features) { const dirs = [ // Nuget - (0, import_path2.join)(os3.homedir(), ".nuget", "packages") + (0, import_path2.join)(os2.homedir(), ".nuget", "packages") ]; if (await features.getValue("csharp_cache_bmn" /* CsharpCacheBuildModeNone */, codeql)) { dirs.push(getCsharpTempDependencyDir()); @@ -110287,7 +109244,7 @@ var defaultCacheConfigs = { getHashPatterns: getCsharpHashPatterns }, go: { - getDependencyPaths: async () => [(0, import_path2.join)(os3.homedir(), "go", "pkg", "mod")], + getDependencyPaths: async () => [(0, import_path2.join)(os2.homedir(), "go", "pkg", "mod")], getHashPatterns: async () => internal.makePatternCheck(["**/go.sum"]) } }; @@ -110464,7 +109421,7 @@ function dbIsFinalized(config, language, logger) { const dbPath = getCodeQLDatabasePath(config, language); try { const dbInfo = load( - fs12.readFileSync(path12.resolve(dbPath, "codeql-database.yml"), "utf8") + fs9.readFileSync(path9.resolve(dbPath, "codeql-database.yml"), "utf8") ); return !("inProgress" in dbInfo); } catch { @@ -110475,10 +109432,10 @@ function dbIsFinalized(config, language, logger) { } } async function finalizeDatabaseCreation(codeql, features, config, threadsFlag, memoryFlag, logger) { - const extractionStart = import_perf_hooks2.performance.now(); + const extractionStart = import_perf_hooks.performance.now(); await runExtraction(codeql, features, config, logger); - const extractionTime = import_perf_hooks2.performance.now() - extractionStart; - const trapImportStart = import_perf_hooks2.performance.now(); + const extractionTime = import_perf_hooks.performance.now() - extractionStart; + const trapImportStart = import_perf_hooks.performance.now(); for (const language of config.languages) { if (dbIsFinalized(config, language, logger)) { logger.info( @@ -110495,7 +109452,7 @@ async function finalizeDatabaseCreation(codeql, features, config, threadsFlag, m logger.endGroup(); } } - const trapImportTime = import_perf_hooks2.performance.now() - trapImportStart; + const trapImportTime = import_perf_hooks.performance.now() - trapImportStart; return { scanned_language_extraction_duration_ms: Math.round(extractionTime), trap_import_duration_ms: Math.round(trapImportTime) @@ -110530,10 +109487,10 @@ function writeDiffRangeDataExtensionPack(logger, ranges) { if (ranges.length === 0) { ranges = [{ path: "", startLine: 0, endLine: 0 }]; } - const diffRangeDir = path12.join(getTemporaryDirectory(), "pr-diff-range"); - fs12.mkdirSync(diffRangeDir, { recursive: true }); - fs12.writeFileSync( - path12.join(diffRangeDir, "qlpack.yml"), + const diffRangeDir = path9.join(getTemporaryDirectory(), "pr-diff-range"); + fs9.mkdirSync(diffRangeDir, { recursive: true }); + fs9.writeFileSync( + path9.join(diffRangeDir, "qlpack.yml"), ` name: codeql-action/pr-diff-range version: 0.0.0 @@ -110565,8 +109522,8 @@ extensions: data = ' - ["", 0, 0]\n'; } const extensionContents = header + data; - const extensionFilePath = path12.join(diffRangeDir, "pr-diff-range.yml"); - fs12.writeFileSync(extensionFilePath, extensionContents); + const extensionFilePath = path9.join(diffRangeDir, "pr-diff-range.yml"); + fs9.writeFileSync(extensionFilePath, extensionContents); logger.debug( `Wrote pr-diff-range extension pack to ${extensionFilePath}: ${extensionContents}` @@ -110690,7 +109647,7 @@ async function runQueries(sarifFolder, memoryFlag, threadsFlag, diffRangePackDir async function runInterpretResultsFor(analysis, language, queries, enableDebugLogging) { logger.info(`Interpreting ${analysis.name} results for ${language}`); const category = analysis.fixCategory(logger, automationDetailsId); - const sarifFile = path12.join( + const sarifFile = path9.join( sarifFolder, addSarifExtension(analysis, language) ); @@ -110719,7 +109676,7 @@ async function runQueries(sarifFolder, memoryFlag, threadsFlag, diffRangePackDir } function getPerQueryAlertCounts(sarifPath) { const sarifObject = JSON.parse( - fs12.readFileSync(sarifPath, "utf8") + fs9.readFileSync(sarifPath, "utf8") ); const perQueryAlertCounts = {}; for (const sarifRun of sarifObject.runs) { @@ -110737,13 +109694,13 @@ async function runQueries(sarifFolder, memoryFlag, threadsFlag, diffRangePackDir } async function runFinalize(features, outputDir, threadsFlag, memoryFlag, codeql, config, logger) { try { - await fs12.promises.rm(outputDir, { force: true, recursive: true }); + await fs9.promises.rm(outputDir, { force: true, recursive: true }); } catch (error3) { if (error3?.code !== "ENOENT") { throw error3; } } - await fs12.promises.mkdir(outputDir, { recursive: true }); + await fs9.promises.mkdir(outputDir, { recursive: true }); const timings = await finalizeDatabaseCreation( codeql, features, @@ -110787,7 +109744,7 @@ async function warnIfGoInstalledAfterInit(config, logger) { } // src/database-upload.ts -var fs13 = __toESM(require("fs")); +var fs10 = __toESM(require("fs")); async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetails, features, logger) { if (getRequiredInput("upload-database") !== "true") { logger.debug("Database upload disabled in workflow. Skipping upload."); @@ -110828,8 +109785,8 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai let bundledDbSize = void 0; try { const bundledDb = await bundleDb(config, language, codeql, language); - bundledDbSize = fs13.statSync(bundledDb).size; - const bundledDbReadStream = fs13.createReadStream(bundledDb); + bundledDbSize = fs10.statSync(bundledDb).size; + const bundledDbReadStream = fs10.createReadStream(bundledDb); const commitOid = await getCommitOid( getRequiredInput("checkout_path") ); @@ -110878,7 +109835,7 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai } // src/status-report.ts -var os4 = __toESM(require("os")); +var os3 = __toESM(require("os")); var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { @@ -110984,7 +109941,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi statusReport.runner_arch = process.env["RUNNER_ARCH"]; } if (!(runnerOs === "Linux" && isSelfHostedRunner())) { - statusReport.runner_os_release = os4.release(); + statusReport.runner_os_release = os3.release(); } if (codeQlCliVersion !== void 0) { statusReport.codeql_version = codeQlCliVersion.version; @@ -111085,15 +110042,15 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error } // src/upload-lib.ts -var fs15 = __toESM(require("fs")); -var path14 = __toESM(require("path")); +var fs12 = __toESM(require("fs")); +var path11 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts -var fs14 = __toESM(require("fs")); +var fs11 = __toESM(require("fs")); var import_path3 = __toESM(require("path")); // node_modules/long/index.js @@ -112081,7 +111038,7 @@ async function hash(callback, filepath) { } updateHash(current); }; - const readStream = fs14.createReadStream(filepath, "utf8"); + const readStream = fs11.createReadStream(filepath, "utf8"); for await (const data of readStream) { for (let i = 0; i < data.length; ++i) { processCharacter(data.charCodeAt(i)); @@ -112156,11 +111113,11 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) { if (!import_path3.default.isAbsolute(uri)) { uri = srcRootPrefix + uri; } - if (!fs14.existsSync(uri)) { + if (!fs11.existsSync(uri)) { logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); return void 0; } - if (fs14.statSync(uri).isDirectory()) { + if (fs11.statSync(uri).isDirectory()) { logger.debug(`Unable to compute fingerprint for directory: ${uri}`); return void 0; } @@ -112217,34 +111174,6 @@ async function addFingerprints(sarif, sourceRoot, logger) { // src/init.ts var toolrunner4 = __toESM(require_toolrunner()); var io6 = __toESM(require_io()); -async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { - logger.startGroup("Setup CodeQL tools"); - const { - codeql, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - } = await setupCodeQL( - toolsInput, - apiDetails, - tempDir, - variant, - defaultCliVersion, - features, - logger, - true - ); - await codeql.printVersion(); - logger.endGroup(); - return { - codeql, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; -} // src/upload-lib.ts var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; @@ -112258,7 +111187,7 @@ function combineSarifFiles(sarifFiles, logger) { for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); const sarifObject = JSON.parse( - fs15.readFileSync(sarifFile, "utf8") + fs12.readFileSync(sarifFile, "utf8") ); if (combinedSarif.version === null) { combinedSarif.version = sarifObject.version; @@ -112327,35 +111256,10 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } return true; } -async function minimalInitCodeQL(logger, gitHubVersion, features) { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step." - ); - const apiDetails = { - auth: getRequiredInput("token"), - externalRepoAuth: getOptionalInput("external-repository-token"), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL") - }; - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type - ); - const initCodeQLResult = await initCodeQL( - void 0, - // There is no tools input on the upload action - apiDetails, - getTemporaryDirectory(), - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger - ); - return initCodeQLResult.codeql; -} -async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { +async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, _features, logger, getCodeQL2, tempDir) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs15.readFileSync(sarifFile, "utf8")); + return JSON.parse(fs12.readFileSync(sarifFile, "utf8")); }); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; @@ -112375,23 +111279,15 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo } return combineSarifFiles(sarifFiles, logger); } - let codeQL; - let tempDir = getTemporaryDirectory(); - const config = await getConfig(tempDir, logger); - if (config !== void 0) { - codeQL = await getCodeQL(config.codeQLCmd); - tempDir = config.tempDir; - } else { - codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); - } - const baseTempDir = path14.resolve(tempDir, "combined-sarif"); - fs15.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs15.mkdtempSync(path14.resolve(baseTempDir, "output-")); - const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif"); + const codeQL = await getCodeQL2(); + const baseTempDir = path11.resolve(tempDir, "combined-sarif"); + fs12.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs12.mkdtempSync(path11.resolve(baseTempDir, "output-")); + const outputFile = path11.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs15.readFileSync(outputFile, "utf8")); + return JSON.parse(fs12.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); @@ -112420,7 +111316,7 @@ function getAutomationID2(category, analysis_key, environment) { async function uploadPayload(payload, repositoryNwo, logger, analysis) { logger.info("Uploading results"); if (shouldSkipSarifUpload()) { - const payloadSaveFile = path14.join( + const payloadSaveFile = path11.join( getTemporaryDirectory(), `payload-${analysis.kind}.json` ); @@ -112428,7 +111324,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs15.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -112462,12 +111358,12 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs15.readdirSync(dir, { withFileTypes: true }); + const entries = fs12.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { - sarifFiles.push(path14.resolve(dir, entry.name)); + sarifFiles.push(path11.resolve(dir, entry.name)); } else if (entry.isDirectory()) { - walkSarifFiles(path14.resolve(dir, entry.name)); + walkSarifFiles(path11.resolve(dir, entry.name)); } } }; @@ -112475,7 +111371,7 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } async function getGroupedSarifFilePaths(logger, sarifPath) { - const stats = fs15.statSync(sarifPath, { throwIfNoEntry: false }); + const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false }); if (stats === void 0) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } @@ -112483,7 +111379,7 @@ async function getGroupedSarifFilePaths(logger, sarifPath) { if (stats.isDirectory()) { let unassignedSarifFiles = findSarifFilesInDir( sarifPath, - (name) => path14.extname(name) === ".sarif" + (name) => path11.extname(name) === ".sarif" ); logger.debug( `Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}` @@ -112540,7 +111436,7 @@ function countResultsInSarif(sarif) { } function readSarifFile(sarifFilePath) { try { - return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8")); + return JSON.parse(fs12.readFileSync(sarifFilePath, "utf8")); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` @@ -112609,7 +111505,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs15.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -112617,7 +111513,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { +async function postProcessSarifFiles(logger, features, getCodeQL2, tempPath, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; @@ -112631,7 +111527,9 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, sarifPaths, gitHubVersion, features, - logger + logger, + getCodeQL2, + tempPath ); } else { const sarifPath = sarifPaths[0]; @@ -112713,19 +111611,19 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post }; } function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { - if (!fs15.existsSync(outputDir)) { - fs15.mkdirSync(outputDir, { recursive: true }); - } else if (!fs15.lstatSync(outputDir).isDirectory()) { + if (!fs12.existsSync(outputDir)) { + fs12.mkdirSync(outputDir, { recursive: true }); + } else if (!fs12.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); } - const outputFile = path14.resolve( + const outputFile = path11.resolve( outputDir, `upload${uploadTarget.sarifExtension}` ); logger.info(`Writing processed SARIF file to ${outputFile}`); - fs15.writeFileSync(outputFile, sarifPayload); + fs12.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3; @@ -112868,7 +111766,7 @@ function filterAlertsByDiffRange(logger, sarif) { if (!locationUri || locationStartLine === void 0) { return false; } - const locationPath = path14.join(checkoutPath, locationUri).replaceAll(path14.sep, "/"); + const locationPath = path11.join(checkoutPath, locationUri).replaceAll(path11.sep, "/"); return diffRanges.some( (range) => range.path === locationPath && (range.startLine <= locationStartLine && range.endLine >= locationStartLine || range.startLine === 0 && range.endLine === 0) ); @@ -112880,7 +111778,7 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/upload-sarif.ts -async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { +async function postProcessAndUploadSarif(logger, tempPath, features, getCodeQL2, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { const sarifGroups = await getGroupedSarifFilePaths( logger, sarifPath @@ -112893,6 +111791,8 @@ async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutP const postProcessingResults = await postProcessSarifFiles( logger, features, + getCodeQL2, + tempPath, checkoutPath, sarifFiles, category, @@ -112961,7 +111861,7 @@ function doesGoExtractionOutputExist(config) { "go" /* go */ ); const trapDirectory = import_path4.default.join(golangDbDirectory, "trap", "go" /* go */); - return fs16.existsSync(trapDirectory) && fs16.readdirSync(trapDirectory).some( + return fs13.existsSync(trapDirectory) && fs13.readdirSync(trapDirectory).some( (fileName) => [ ".trap", ".trap.gz", @@ -113124,7 +112024,9 @@ async function run(startedAt2) { const category = getOptionalInput("category"); uploadResults = await postProcessAndUploadSarif( logger, + config.tempDir, features, + async () => codeql, uploadKind, checkoutPath, outputDir, @@ -113155,9 +112057,9 @@ async function run(startedAt2) { features, logger ); - const trapCacheUploadStartTime = import_perf_hooks3.performance.now(); + const trapCacheUploadStartTime = import_perf_hooks2.performance.now(); didUploadTrapCaches = await uploadTrapCaches(codeql, config, logger); - trapCacheUploadTime = import_perf_hooks3.performance.now() - trapCacheUploadStartTime; + trapCacheUploadTime = import_perf_hooks2.performance.now() - trapCacheUploadStartTime; trapCacheCleanupTelemetry = await cleanupTrapCaches( config, features, diff --git a/lib/init-action-post.js b/lib/init-action-post.js index c5e72a6c58..209eb22fa3 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -108,11 +108,11 @@ var require_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.issueCommand = issueCommand; exports2.issue = issue; - var os4 = __importStar2(require("os")); + var os3 = __importStar2(require("os")); var utils_1 = require_utils(); function issueCommand(command, properties, message) { const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os4.EOL); + process.stdout.write(cmd.toString() + os3.EOL); } function issue(name, message = "") { issueCommand(name, {}, message); @@ -204,18 +204,18 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto2 = __importStar2(require("crypto")); - var fs18 = __importStar2(require("fs")); - var os4 = __importStar2(require("os")); + var fs15 = __importStar2(require("fs")); + var os3 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs18.existsSync(filePath)) { + if (!fs15.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { + fs15.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, { encoding: "utf8" }); } @@ -228,7 +228,7 @@ var require_file_command = __commonJS({ if (convertedValue.includes(delimiter)) { throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); } - return `${key}<<${delimiter}${os4.EOL}${convertedValue}${os4.EOL}${delimiter}`; + return `${key}<<${delimiter}${os3.EOL}${convertedValue}${os3.EOL}${delimiter}`; } } }); @@ -1228,7 +1228,7 @@ var require_util = __commonJS({ var assert = require("node:assert"); var { kDestroyed, kBodyUsed, kListeners, kBody } = require_symbols(); var { IncomingMessage } = require("node:http"); - var stream2 = require("node:stream"); + var stream = require("node:stream"); var net = require("node:net"); var { Blob: Blob2 } = require("node:buffer"); var nodeUtil = require("node:util"); @@ -1337,14 +1337,14 @@ var require_util = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path17 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path14 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path17 && path17[0] !== "/") { - path17 = `/${path17}`; + if (path14 && path14[0] !== "/") { + path14 = `/${path14}`; } - return new URL(`${origin}${path17}`); + return new URL(`${origin}${path14}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -1402,24 +1402,24 @@ var require_util = __commonJS({ return null; } function isDestroyed(body) { - return body && !!(body.destroyed || body[kDestroyed] || stream2.isDestroyed?.(body)); + return body && !!(body.destroyed || body[kDestroyed] || stream.isDestroyed?.(body)); } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { queueMicrotask(() => { - stream3.emit("error", err); + stream2.emit("error", err); }); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -1518,13 +1518,13 @@ var require_util = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed(body) || body[kBodyUsed])); + return !!(body && (stream.isDisturbed(body) || body[kBodyUsed])); } function isErrored(body) { - return !!(body && stream2.isErrored(body)); + return !!(body && stream.isErrored(body)); } function isReadable(body) { - return !!(body && stream2.isReadable(body)); + return !!(body && stream.isReadable(body)); } function getSocketInfo(socket) { return { @@ -1795,39 +1795,39 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path17, origin } + request: { method, path: path14, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path17); + debuglog("sending request to %s %s/%s", method, origin, path14); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path17, origin }, + request: { method, path: path14, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path17, + path14, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path17, origin } + request: { method, path: path14, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path17); + debuglog("trailers received from %s %s/%s", method, origin, path14); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path17, origin }, + request: { method, path: path14, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path17, + path14, error3.message ); }); @@ -1876,9 +1876,9 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path17, origin } + request: { method, path: path14, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path17); + debuglog("sending request to %s %s/%s", method, origin, path14); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -1941,7 +1941,7 @@ var require_request = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path17, + path: path14, method, body, headers, @@ -1956,11 +1956,11 @@ var require_request = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path17 !== "string") { + if (typeof path14 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path17[0] !== "/" && !(path17.startsWith("http://") || path17.startsWith("https://")) && method !== "CONNECT") { + } else if (path14[0] !== "/" && !(path14.startsWith("http://") || path14.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path17)) { + } else if (invalidPathRegex.test(path14)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -2023,7 +2023,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path17, query) : path17; + this.path = query ? buildURL(path14, query) : path14; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -4256,7 +4256,7 @@ var require_util2 = __commonJS({ var { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require_constants3(); var { getGlobalOrigin } = require_global(); var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url(); - var { performance: performance3 } = require("node:perf_hooks"); + var { performance: performance2 } = require("node:perf_hooks"); var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util(); var assert = require("node:assert"); var { isUint8Array } = require("node:util/types"); @@ -4415,7 +4415,7 @@ var require_util2 = __commonJS({ }; } function coarsenedSharedCurrentTime(crossOriginIsolatedCapability) { - return coarsenTime(performance3.now(), crossOriginIsolatedCapability); + return coarsenTime(performance2.now(), crossOriginIsolatedCapability); } function createOpaqueTimingInfo(timingInfo) { return { @@ -4813,8 +4813,8 @@ var require_util2 = __commonJS({ errorSteps(e); } } - function isReadableStreamLike(stream2) { - return stream2 instanceof ReadableStream || stream2[Symbol.toStringTag] === "ReadableStream" && typeof stream2.tee === "function"; + function isReadableStreamLike(stream) { + return stream instanceof ReadableStream || stream[Symbol.toStringTag] === "ReadableStream" && typeof stream.tee === "function"; } function readableStreamClose(controller) { try { @@ -5630,20 +5630,20 @@ var require_body = __commonJS({ var streamRegistry; if (hasFinalizationRegistry) { streamRegistry = new FinalizationRegistry((weakRef) => { - const stream2 = weakRef.deref(); - if (stream2 && !stream2.locked && !isDisturbed(stream2) && !isErrored(stream2)) { - stream2.cancel("Response object has been garbage collected").catch(noop3); + const stream = weakRef.deref(); + if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) { + stream.cancel("Response object has been garbage collected").catch(noop3); } }); } function extractBody(object, keepalive = false) { - let stream2 = null; + let stream = null; if (object instanceof ReadableStream) { - stream2 = object; + stream = object; } else if (isBlobLike(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream({ + stream = new ReadableStream({ async pull(controller) { const buffer = typeof source === "string" ? textEncoder.encode(source) : source; if (buffer.byteLength) { @@ -5656,7 +5656,7 @@ var require_body = __commonJS({ type: "bytes" }); } - assert(isReadableStreamLike(stream2)); + assert(isReadableStreamLike(stream)); let action = null; let source = null; let length = null; @@ -5735,14 +5735,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = object instanceof ReadableStream ? object : ReadableStreamFrom(object); + stream = object instanceof ReadableStream ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator2; - stream2 = new ReadableStream({ + stream = new ReadableStream({ async start() { iterator2 = action(object)[Symbol.asyncIterator](); }, @@ -5754,7 +5754,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.byobRequest?.respond(0); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { const buffer = new Uint8Array(value); if (buffer.byteLength) { controller.enqueue(buffer); @@ -5769,7 +5769,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: "bytes" }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type2]; } function safelyExtractBody(object, keepalive = false) { @@ -6536,7 +6536,7 @@ var require_client_h1 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path17, host, upgrade, blocking, reset } = request2; + const { method, path: path14, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -6602,7 +6602,7 @@ var require_client_h1 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path17} HTTP/1.1\r + let header = `${method} ${path14} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -7128,7 +7128,7 @@ var require_client_h2 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path17, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path14, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -7150,7 +7150,7 @@ var require_client_h2 = __commonJS({ headers[key] = val; } } - let stream2; + let stream; const { hostname, port } = client[kUrl]; headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; @@ -7160,8 +7160,8 @@ var require_client_h2 = __commonJS({ } err = err || new RequestAbortedError(); util.errorRequest(client, request2, err); - if (stream2 != null) { - util.destroy(stream2, err); + if (stream != null) { + util.destroy(stream, err); } util.destroy(body, err); client[kQueue][client[kRunningIdx]++] = null; @@ -7177,25 +7177,25 @@ var require_client_h2 = __commonJS({ } if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (stream2.id && !stream2.pending) { - request2.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (stream.id && !stream.pending) { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; } else { - stream2.once("ready", () => { - request2.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; }); } - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) session.unref(); }); return true; } - headers[HTTP2_HEADER_PATH] = path17; + headers[HTTP2_HEADER_PATH] = path14; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -7230,36 +7230,36 @@ var require_client_h2 = __commonJS({ const shouldEndStream = method === "GET" || method === "HEAD" || body === null; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++session[kOpenStreams]; - stream2.once("response", (headers2) => { + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; request2.onResponseStarted(); if (request2.aborted) { const err = new RequestAbortedError(); util.errorRequest(client, request2, err); - util.destroy(stream2, err); + util.destroy(stream, err); return; } - if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { + stream.pause(); } - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request2.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); }); - stream2.once("end", () => { - if (stream2.state?.state == null || stream2.state.state < 6) { + stream.once("end", () => { + if (stream.state?.state == null || stream.state.state < 6) { request2.onComplete([]); } if (session[kOpenStreams] === 0) { @@ -7270,16 +7270,16 @@ var require_client_h2 = __commonJS({ client[kPendingIdx] = client[kRunningIdx]; client[kResume](); }); - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) { session.unref(); } }); - stream2.once("error", function(err) { + stream.once("error", function(err) { abort(err); }); - stream2.once("frameError", (type2, code) => { + stream.once("frameError", (type2, code) => { abort(new InformationalError(`HTTP/2: "frameError" received - type ${type2}, code ${code}`)); }); return true; @@ -7287,7 +7287,7 @@ var require_client_h2 = __commonJS({ if (!body || contentLength === 0) { writeBuffer( abort, - stream2, + stream, null, client, request2, @@ -7298,7 +7298,7 @@ var require_client_h2 = __commonJS({ } else if (util.isBuffer(body)) { writeBuffer( abort, - stream2, + stream, body, client, request2, @@ -7310,7 +7310,7 @@ var require_client_h2 = __commonJS({ if (typeof body.stream === "function") { writeIterable( abort, - stream2, + stream, body.stream(), client, request2, @@ -7321,7 +7321,7 @@ var require_client_h2 = __commonJS({ } else { writeBlob( abort, - stream2, + stream, body, client, request2, @@ -7335,7 +7335,7 @@ var require_client_h2 = __commonJS({ abort, client[kSocket], expectsPayload, - stream2, + stream, body, client, request2, @@ -7344,7 +7344,7 @@ var require_client_h2 = __commonJS({ } else if (util.isIterable(body)) { writeIterable( abort, - stream2, + stream, body, client, request2, @@ -7548,9 +7548,9 @@ var require_redirect_handler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path17 = search ? `${pathname}${search}` : pathname; + const path14 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path17; + this.opts.path = path14; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -8784,10 +8784,10 @@ var require_proxy_agent = __commonJS({ }; const { origin, - path: path17 = "/", + path: path14 = "/", headers = {} } = opts; - opts.path = origin + path17; + opts.path = origin + path14; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -9421,7 +9421,7 @@ var require_readable = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/api/readable.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require("node:stream"); + var { Readable } = require("node:stream"); var { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require_errors(); var util = require_util(); var { ReadableStreamFrom } = require_util(); @@ -9433,7 +9433,7 @@ var require_readable = __commonJS({ var kContentLength = /* @__PURE__ */ Symbol("kContentLength"); var noop3 = () => { }; - var BodyReadable = class extends Readable2 { + var BodyReadable = class extends Readable { constructor({ resume, abort, @@ -9578,13 +9578,13 @@ var require_readable = __commonJS({ function isUnusable(self2) { return util.isDisturbed(self2) || isLocked(self2); } - async function consume(stream2, type2) { - assert(!stream2[kConsume]); + async function consume(stream, type2) { + assert(!stream[kConsume]); return new Promise((resolve8, reject) => { - if (isUnusable(stream2)) { - const rState = stream2._readableState; + if (isUnusable(stream)) { + const rState = stream._readableState; if (rState.destroyed && rState.closeEmitted === false) { - stream2.on("error", (err) => { + stream.on("error", (err) => { reject(err); }).on("close", () => { reject(new TypeError("unusable")); @@ -9594,22 +9594,22 @@ var require_readable = __commonJS({ } } else { queueMicrotask(() => { - stream2[kConsume] = { + stream[kConsume] = { type: type2, - stream: stream2, + stream, resolve: resolve8, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - consumeStart(stream2[kConsume]); + consumeStart(stream[kConsume]); }); } }); @@ -9667,7 +9667,7 @@ var require_readable = __commonJS({ return buffer; } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve8, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve8, stream, length } = consume2; try { if (type2 === "text") { resolve8(chunksDecode(body, length)); @@ -9676,13 +9676,13 @@ var require_readable = __commonJS({ } else if (type2 === "arrayBuffer") { resolve8(chunksConcat(body, length).buffer); } else if (type2 === "blob") { - resolve8(new Blob(body, { type: stream2[kContentType] })); + resolve8(new Blob(body, { type: stream[kContentType] })); } else if (type2 === "bytes") { resolve8(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -9775,7 +9775,7 @@ var require_api_request = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/api/api-request.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require_readable(); + var { Readable } = require_readable(); var { InvalidArgumentError, RequestAbortedError } = require_errors(); var util = require_util(); var { getResolveErrorBodyCallback } = require_util3(); @@ -9870,7 +9870,7 @@ var require_api_request = __commonJS({ const parsedHeaders = responseHeaders === "raw" ? util.parseHeaders(rawHeaders) : headers; const contentType = parsedHeaders["content-type"]; const contentLength = parsedHeaders["content-length"]; - const res = new Readable2({ + const res = new Readable({ resume, abort, contentType, @@ -10158,10 +10158,10 @@ var require_api_stream = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { return new Promise((resolve8, reject) => { - stream2.call(this, opts, factory, (err, data) => { + stream.call(this, opts, factory, (err, data) => { return err ? reject(err) : resolve8(data); }); }); @@ -10176,7 +10176,7 @@ var require_api_stream = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -10185,7 +10185,7 @@ var require_api_pipeline = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/api/api-pipeline.js"(exports2, module2) { "use strict"; var { - Readable: Readable2, + Readable, Duplex, PassThrough } = require("node:stream"); @@ -10199,7 +10199,7 @@ var require_api_pipeline = __commonJS({ var { addSignal, removeSignal } = require_abort_signal(); var assert = require("node:assert"); var kResume = /* @__PURE__ */ Symbol("resume"); - var PipelineRequest = class extends Readable2 { + var PipelineRequest = class extends Readable { constructor() { super({ autoDestroy: true }); this[kResume] = null; @@ -10216,7 +10216,7 @@ var require_api_pipeline = __commonJS({ callback(err); } }; - var PipelineResponse = class extends Readable2 { + var PipelineResponse = class extends Readable { constructor(resume) { super({ autoDestroy: true }); this[kResume] = resume; @@ -10708,20 +10708,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path17) { - if (typeof path17 !== "string") { - return path17; + function safeUrl(path14) { + if (typeof path14 !== "string") { + return path14; } - const pathSegments = path17.split("?"); + const pathSegments = path14.split("?"); if (pathSegments.length !== 2) { - return path17; + return path14; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path17, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path17); + function matchKey(mockDispatch2, { path: path14, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path14); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10743,7 +10743,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path17 }) => matchValue(safeUrl(path17), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path14 }) => matchValue(safeUrl(path14), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10781,9 +10781,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path17, method, body, headers, query } = opts; + const { path: path14, method, body, headers, query } = opts; return { - path: path17, + path: path14, method, body, headers, @@ -11246,10 +11246,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path17, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path14, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path17, + Path: path14, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -13541,7 +13541,7 @@ var require_fetch = __commonJS({ subresourceSet } = require_constants3(); var EE = require("node:events"); - var { Readable: Readable2, pipeline, finished } = require("node:stream"); + var { Readable, pipeline, finished } = require("node:stream"); var { addAbortListener, isErrored, isReadable, bufferToLowerCasedHeaderName } = require_util(); var { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = require_data_url(); var { getGlobalDispatcher } = require_global2(); @@ -14326,7 +14326,7 @@ var require_fetch = __commonJS({ fetchParams.controller.abort(reason); } }; - const stream2 = new ReadableStream( + const stream = new ReadableStream( { async start(controller) { fetchParams.controller.controller = controller; @@ -14340,7 +14340,7 @@ var require_fetch = __commonJS({ type: "bytes" } ); - response.body = { stream: stream2, source: null, length: null }; + response.body = { stream, source: null, length: null }; fetchParams.controller.onAborted = onAborted; fetchParams.controller.on("terminated", onAborted); fetchParams.controller.resume = async () => { @@ -14375,7 +14375,7 @@ var require_fetch = __commonJS({ if (buffer.byteLength) { fetchParams.controller.controller.enqueue(buffer); } - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -14387,13 +14387,13 @@ var require_fetch = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -14442,7 +14442,7 @@ var require_fetch = __commonJS({ headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } location = headersList.get("location", true); - this.body = new Readable2({ read: resume }); + this.body = new Readable({ read: resume }); const decoders = []; const willFollow = location && request2.redirect === "follow" && redirectStatusSet.has(status); if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { @@ -14946,8 +14946,8 @@ var require_util4 = __commonJS({ fr[kState] = "loading"; fr[kResult] = null; fr[kError] = null; - const stream2 = blob.stream(); - const reader = stream2.getReader(); + const stream = blob.stream(); + const reader = stream.getReader(); const bytes = []; let chunkPromise = reader.read(); let isFirstChunk = true; @@ -15606,8 +15606,8 @@ var require_cache = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -16130,9 +16130,9 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path17) { - for (let i = 0; i < path17.length; ++i) { - const code = path17.charCodeAt(i); + function validateCookiePath(path14) { + for (let i = 0; i < path14.length; ++i) { + const code = path14.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -17031,7 +17031,7 @@ var require_frame = __commonJS({ } catch { crypto2 = { // not full compatibility, but minimum. - randomFillSync: function randomFillSync2(buffer2, _offset, _size) { + randomFillSync: function randomFillSync(buffer2, _offset, _size) { for (let i = 0; i < buffer2.length; ++i) { buffer2[i] = Math.random() * 255 | 0; } @@ -18726,11 +18726,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path17 = opts.path; + let path14 = opts.path; if (!opts.path.startsWith("/")) { - path17 = `/${path17}`; + path14 = `/${path14}`; } - url2 = new URL(util.parseOrigin(url2).origin + path17); + url2 = new URL(util.parseOrigin(url2).origin + path14); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -19054,9 +19054,9 @@ var require_lib = __commonJS({ return this.request("HEAD", requestUrl, null, additionalHeaders || {}); }); } - sendStream(verb, requestUrl, stream2, additionalHeaders) { + sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter2(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream2, additionalHeaders); + return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** @@ -20033,7 +20033,7 @@ var require_path_utils = __commonJS({ exports2.toPosixPath = toPosixPath; exports2.toWin32Path = toWin32Path; exports2.toPlatformPath = toPlatformPath; - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -20041,7 +20041,7 @@ var require_path_utils = __commonJS({ return pth.replace(/[/]/g, "\\"); } function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path17.sep); + return pth.replace(/[/\\]/g, path14.sep); } } }); @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs18 = __importStar2(require("fs")); - var path17 = __importStar2(require("path")); - _a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs15 = __importStar2(require("fs")); + var path14 = __importStar2(require("path")); + _a = fs15.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs18.promises.readlink(fsPath); + const result = yield fs15.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs18.constants.O_RDONLY; + exports2.READONLY = fs15.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -20179,7 +20179,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path17.extname(filePath).toUpperCase(); + const upperExt = path14.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -20203,11 +20203,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path17.dirname(filePath); - const upperName = path17.basename(filePath).toUpperCase(); + const directory = path14.dirname(filePath); + const upperName = path14.basename(filePath).toUpperCase(); for (const actualName of yield (0, exports2.readdir)(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path17.join(directory, actualName); + filePath = path14.join(directory, actualName); break; } } @@ -20319,7 +20319,7 @@ var require_io = __commonJS({ exports2.which = which7; exports2.findInPath = findInPath; var assert_1 = require("assert"); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util()); function cp(source_1, dest_1) { return __awaiter2(this, arguments, void 0, function* (source, dest, options = {}) { @@ -20328,7 +20328,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path17.join(dest, path17.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path14.join(dest, path14.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -20340,7 +20340,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path17.relative(source, newDest) === "") { + if (path14.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -20352,7 +20352,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path17.join(dest, path17.basename(source)); + dest = path14.join(dest, path14.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -20363,7 +20363,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path17.dirname(dest)); + yield mkdirP(path14.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -20422,7 +20422,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path17.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path14.delimiter)) { if (extension) { extensions.push(extension); } @@ -20435,12 +20435,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path17.sep)) { + if (tool.includes(path14.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path17.delimiter)) { + for (const p of process.env.PATH.split(path14.delimiter)) { if (p) { directories.push(p); } @@ -20448,7 +20448,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path17.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path14.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -20575,10 +20575,10 @@ var require_toolrunner = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.ToolRunner = void 0; exports2.argStringToArray = argStringToArray; - var os4 = __importStar2(require("os")); + var os3 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var io7 = __importStar2(require_io()); var ioUtil = __importStar2(require_io_util()); var timers_1 = require("timers"); @@ -20630,12 +20630,12 @@ var require_toolrunner = __commonJS({ _processLineBuffer(data, strBuffer, onLine) { try { let s = strBuffer + data.toString(); - let n = s.indexOf(os4.EOL); + let n = s.indexOf(os3.EOL); while (n > -1) { const line = s.substring(0, n); onLine(line); - s = s.substring(n + os4.EOL.length); - n = s.indexOf(os4.EOL); + s = s.substring(n + os3.EOL.length); + n = s.indexOf(os3.EOL); } return s; } catch (err) { @@ -20793,7 +20793,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path17.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path14.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -20804,7 +20804,7 @@ var require_toolrunner = __commonJS({ } const optionsNonNull = this._cloneExecOptions(this.options); if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os4.EOL); + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os3.EOL); } const state = new ExecState(optionsNonNull, this.toolPath); state.on("debug", (message) => { @@ -21345,8 +21345,8 @@ var require_core = __commonJS({ var command_1 = require_command(); var file_command_1 = require_file_command(); var utils_1 = require_utils(); - var os4 = __importStar2(require("os")); - var path17 = __importStar2(require("path")); + var os3 = __importStar2(require("os")); + var path14 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -21372,7 +21372,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path17.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path14.delimiter}${process.env["PATH"]}`; } function getInput2(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; @@ -21407,7 +21407,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); if (filePath) { return (0, file_command_1.issueFileCommand)("OUTPUT", (0, file_command_1.prepareKeyValueMessage)(name, value)); } - process.stdout.write(os4.EOL); + process.stdout.write(os3.EOL); (0, command_1.issueCommand)("set-output", { name }, (0, utils_1.toCommandValue)(value)); } function setCommandEcho(enabled) { @@ -21433,7 +21433,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } function info7(message) { - process.stdout.write(message + os4.EOL); + process.stdout.write(message + os3.EOL); } function startGroup4(name) { (0, command_1.issue)("group", name); @@ -21509,8 +21509,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path17 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path17} does not exist${os_1.EOL}`); + const path14 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path14} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -22226,7 +22226,7 @@ var require_util9 = __commonJS({ var assert = require("node:assert"); var { kDestroyed, kBodyUsed, kListeners, kBody } = require_symbols6(); var { IncomingMessage } = require("node:http"); - var stream2 = require("node:stream"); + var stream = require("node:stream"); var net = require("node:net"); var { Blob: Blob2 } = require("node:buffer"); var nodeUtil = require("node:util"); @@ -22335,14 +22335,14 @@ var require_util9 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path17 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path14 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path17 && path17[0] !== "/") { - path17 = `/${path17}`; + if (path14 && path14[0] !== "/") { + path14 = `/${path14}`; } - return new URL(`${origin}${path17}`); + return new URL(`${origin}${path14}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -22400,24 +22400,24 @@ var require_util9 = __commonJS({ return null; } function isDestroyed(body) { - return body && !!(body.destroyed || body[kDestroyed] || stream2.isDestroyed?.(body)); + return body && !!(body.destroyed || body[kDestroyed] || stream.isDestroyed?.(body)); } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { queueMicrotask(() => { - stream3.emit("error", err); + stream2.emit("error", err); }); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -22516,13 +22516,13 @@ var require_util9 = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed(body) || body[kBodyUsed])); + return !!(body && (stream.isDisturbed(body) || body[kBodyUsed])); } function isErrored(body) { - return !!(body && stream2.isErrored(body)); + return !!(body && stream.isErrored(body)); } function isReadable(body) { - return !!(body && stream2.isReadable(body)); + return !!(body && stream.isReadable(body)); } function getSocketInfo(socket) { return { @@ -22793,39 +22793,39 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path17, origin } + request: { method, path: path14, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path17); + debuglog("sending request to %s %s/%s", method, origin, path14); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path17, origin }, + request: { method, path: path14, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path17, + path14, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path17, origin } + request: { method, path: path14, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path17); + debuglog("trailers received from %s %s/%s", method, origin, path14); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path17, origin }, + request: { method, path: path14, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path17, + path14, error3.message ); }); @@ -22874,9 +22874,9 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path17, origin } + request: { method, path: path14, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path17); + debuglog("sending request to %s %s/%s", method, origin, path14); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -22939,7 +22939,7 @@ var require_request3 = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path17, + path: path14, method, body, headers, @@ -22954,11 +22954,11 @@ var require_request3 = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path17 !== "string") { + if (typeof path14 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path17[0] !== "/" && !(path17.startsWith("http://") || path17.startsWith("https://")) && method !== "CONNECT") { + } else if (path14[0] !== "/" && !(path14.startsWith("http://") || path14.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path17)) { + } else if (invalidPathRegex.test(path14)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -23021,7 +23021,7 @@ var require_request3 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path17, query) : path17; + this.path = query ? buildURL(path14, query) : path14; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -25254,7 +25254,7 @@ var require_util10 = __commonJS({ var { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require_constants8(); var { getGlobalOrigin } = require_global3(); var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url2(); - var { performance: performance3 } = require("node:perf_hooks"); + var { performance: performance2 } = require("node:perf_hooks"); var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util9(); var assert = require("node:assert"); var { isUint8Array } = require("node:util/types"); @@ -25413,7 +25413,7 @@ var require_util10 = __commonJS({ }; } function coarsenedSharedCurrentTime(crossOriginIsolatedCapability) { - return coarsenTime(performance3.now(), crossOriginIsolatedCapability); + return coarsenTime(performance2.now(), crossOriginIsolatedCapability); } function createOpaqueTimingInfo(timingInfo) { return { @@ -25811,8 +25811,8 @@ var require_util10 = __commonJS({ errorSteps(e); } } - function isReadableStreamLike(stream2) { - return stream2 instanceof ReadableStream || stream2[Symbol.toStringTag] === "ReadableStream" && typeof stream2.tee === "function"; + function isReadableStreamLike(stream) { + return stream instanceof ReadableStream || stream[Symbol.toStringTag] === "ReadableStream" && typeof stream.tee === "function"; } function readableStreamClose(controller) { try { @@ -26628,20 +26628,20 @@ var require_body2 = __commonJS({ var streamRegistry; if (hasFinalizationRegistry) { streamRegistry = new FinalizationRegistry((weakRef) => { - const stream2 = weakRef.deref(); - if (stream2 && !stream2.locked && !isDisturbed(stream2) && !isErrored(stream2)) { - stream2.cancel("Response object has been garbage collected").catch(noop3); + const stream = weakRef.deref(); + if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) { + stream.cancel("Response object has been garbage collected").catch(noop3); } }); } function extractBody(object, keepalive = false) { - let stream2 = null; + let stream = null; if (object instanceof ReadableStream) { - stream2 = object; + stream = object; } else if (isBlobLike(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream({ + stream = new ReadableStream({ async pull(controller) { const buffer = typeof source === "string" ? textEncoder.encode(source) : source; if (buffer.byteLength) { @@ -26654,7 +26654,7 @@ var require_body2 = __commonJS({ type: "bytes" }); } - assert(isReadableStreamLike(stream2)); + assert(isReadableStreamLike(stream)); let action = null; let source = null; let length = null; @@ -26733,14 +26733,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = object instanceof ReadableStream ? object : ReadableStreamFrom(object); + stream = object instanceof ReadableStream ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator2; - stream2 = new ReadableStream({ + stream = new ReadableStream({ async start() { iterator2 = action(object)[Symbol.asyncIterator](); }, @@ -26752,7 +26752,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.byobRequest?.respond(0); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { const buffer = new Uint8Array(value); if (buffer.byteLength) { controller.enqueue(buffer); @@ -26767,7 +26767,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: "bytes" }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type2]; } function safelyExtractBody(object, keepalive = false) { @@ -27534,7 +27534,7 @@ var require_client_h12 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path17, host, upgrade, blocking, reset } = request2; + const { method, path: path14, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -27600,7 +27600,7 @@ var require_client_h12 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path17} HTTP/1.1\r + let header = `${method} ${path14} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -28126,7 +28126,7 @@ var require_client_h22 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path17, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path14, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -28148,7 +28148,7 @@ var require_client_h22 = __commonJS({ headers[key] = val; } } - let stream2; + let stream; const { hostname, port } = client[kUrl]; headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; @@ -28158,8 +28158,8 @@ var require_client_h22 = __commonJS({ } err = err || new RequestAbortedError(); util.errorRequest(client, request2, err); - if (stream2 != null) { - util.destroy(stream2, err); + if (stream != null) { + util.destroy(stream, err); } util.destroy(body, err); client[kQueue][client[kRunningIdx]++] = null; @@ -28175,25 +28175,25 @@ var require_client_h22 = __commonJS({ } if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (stream2.id && !stream2.pending) { - request2.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (stream.id && !stream.pending) { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; } else { - stream2.once("ready", () => { - request2.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request2.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; }); } - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) session.unref(); }); return true; } - headers[HTTP2_HEADER_PATH] = path17; + headers[HTTP2_HEADER_PATH] = path14; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -28228,36 +28228,36 @@ var require_client_h22 = __commonJS({ const shouldEndStream = method === "GET" || method === "HEAD" || body === null; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++session[kOpenStreams]; - stream2.once("response", (headers2) => { + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; request2.onResponseStarted(); if (request2.aborted) { const err = new RequestAbortedError(); util.errorRequest(client, request2, err); - util.destroy(stream2, err); + util.destroy(stream, err); return; } - if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { + stream.pause(); } - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request2.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); }); - stream2.once("end", () => { - if (stream2.state?.state == null || stream2.state.state < 6) { + stream.once("end", () => { + if (stream.state?.state == null || stream.state.state < 6) { request2.onComplete([]); } if (session[kOpenStreams] === 0) { @@ -28268,16 +28268,16 @@ var require_client_h22 = __commonJS({ client[kPendingIdx] = client[kRunningIdx]; client[kResume](); }); - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) { session.unref(); } }); - stream2.once("error", function(err) { + stream.once("error", function(err) { abort(err); }); - stream2.once("frameError", (type2, code) => { + stream.once("frameError", (type2, code) => { abort(new InformationalError(`HTTP/2: "frameError" received - type ${type2}, code ${code}`)); }); return true; @@ -28285,7 +28285,7 @@ var require_client_h22 = __commonJS({ if (!body || contentLength === 0) { writeBuffer( abort, - stream2, + stream, null, client, request2, @@ -28296,7 +28296,7 @@ var require_client_h22 = __commonJS({ } else if (util.isBuffer(body)) { writeBuffer( abort, - stream2, + stream, body, client, request2, @@ -28308,7 +28308,7 @@ var require_client_h22 = __commonJS({ if (typeof body.stream === "function") { writeIterable( abort, - stream2, + stream, body.stream(), client, request2, @@ -28319,7 +28319,7 @@ var require_client_h22 = __commonJS({ } else { writeBlob( abort, - stream2, + stream, body, client, request2, @@ -28333,7 +28333,7 @@ var require_client_h22 = __commonJS({ abort, client[kSocket], expectsPayload, - stream2, + stream, body, client, request2, @@ -28342,7 +28342,7 @@ var require_client_h22 = __commonJS({ } else if (util.isIterable(body)) { writeIterable( abort, - stream2, + stream, body, client, request2, @@ -28546,9 +28546,9 @@ var require_redirect_handler2 = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path17 = search ? `${pathname}${search}` : pathname; + const path14 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path17; + this.opts.path = path14; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -29782,10 +29782,10 @@ var require_proxy_agent2 = __commonJS({ }; const { origin, - path: path17 = "/", + path: path14 = "/", headers = {} } = opts; - opts.path = origin + path17; + opts.path = origin + path14; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -30419,7 +30419,7 @@ var require_readable2 = __commonJS({ "node_modules/@actions/github/node_modules/undici/lib/api/readable.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require("node:stream"); + var { Readable } = require("node:stream"); var { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require_errors2(); var util = require_util9(); var { ReadableStreamFrom } = require_util9(); @@ -30431,7 +30431,7 @@ var require_readable2 = __commonJS({ var kContentLength = /* @__PURE__ */ Symbol("kContentLength"); var noop3 = () => { }; - var BodyReadable = class extends Readable2 { + var BodyReadable = class extends Readable { constructor({ resume, abort, @@ -30576,13 +30576,13 @@ var require_readable2 = __commonJS({ function isUnusable(self2) { return util.isDisturbed(self2) || isLocked(self2); } - async function consume(stream2, type2) { - assert(!stream2[kConsume]); + async function consume(stream, type2) { + assert(!stream[kConsume]); return new Promise((resolve8, reject) => { - if (isUnusable(stream2)) { - const rState = stream2._readableState; + if (isUnusable(stream)) { + const rState = stream._readableState; if (rState.destroyed && rState.closeEmitted === false) { - stream2.on("error", (err) => { + stream.on("error", (err) => { reject(err); }).on("close", () => { reject(new TypeError("unusable")); @@ -30592,22 +30592,22 @@ var require_readable2 = __commonJS({ } } else { queueMicrotask(() => { - stream2[kConsume] = { + stream[kConsume] = { type: type2, - stream: stream2, + stream, resolve: resolve8, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - consumeStart(stream2[kConsume]); + consumeStart(stream[kConsume]); }); } }); @@ -30665,7 +30665,7 @@ var require_readable2 = __commonJS({ return buffer; } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve8, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve8, stream, length } = consume2; try { if (type2 === "text") { resolve8(chunksDecode(body, length)); @@ -30674,13 +30674,13 @@ var require_readable2 = __commonJS({ } else if (type2 === "arrayBuffer") { resolve8(chunksConcat(body, length).buffer); } else if (type2 === "blob") { - resolve8(new Blob(body, { type: stream2[kContentType] })); + resolve8(new Blob(body, { type: stream[kContentType] })); } else if (type2 === "bytes") { resolve8(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -30773,7 +30773,7 @@ var require_api_request2 = __commonJS({ "node_modules/@actions/github/node_modules/undici/lib/api/api-request.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); - var { Readable: Readable2 } = require_readable2(); + var { Readable } = require_readable2(); var { InvalidArgumentError, RequestAbortedError } = require_errors2(); var util = require_util9(); var { getResolveErrorBodyCallback } = require_util11(); @@ -30868,7 +30868,7 @@ var require_api_request2 = __commonJS({ const parsedHeaders = responseHeaders === "raw" ? util.parseHeaders(rawHeaders) : headers; const contentType = parsedHeaders["content-type"]; const contentLength = parsedHeaders["content-length"]; - const res = new Readable2({ + const res = new Readable({ resume, abort, contentType, @@ -31156,10 +31156,10 @@ var require_api_stream2 = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { return new Promise((resolve8, reject) => { - stream2.call(this, opts, factory, (err, data) => { + stream.call(this, opts, factory, (err, data) => { return err ? reject(err) : resolve8(data); }); }); @@ -31174,7 +31174,7 @@ var require_api_stream2 = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -31183,7 +31183,7 @@ var require_api_pipeline2 = __commonJS({ "node_modules/@actions/github/node_modules/undici/lib/api/api-pipeline.js"(exports2, module2) { "use strict"; var { - Readable: Readable2, + Readable, Duplex, PassThrough } = require("node:stream"); @@ -31197,7 +31197,7 @@ var require_api_pipeline2 = __commonJS({ var { addSignal, removeSignal } = require_abort_signal2(); var assert = require("node:assert"); var kResume = /* @__PURE__ */ Symbol("resume"); - var PipelineRequest = class extends Readable2 { + var PipelineRequest = class extends Readable { constructor() { super({ autoDestroy: true }); this[kResume] = null; @@ -31214,7 +31214,7 @@ var require_api_pipeline2 = __commonJS({ callback(err); } }; - var PipelineResponse = class extends Readable2 { + var PipelineResponse = class extends Readable { constructor(resume) { super({ autoDestroy: true }); this[kResume] = resume; @@ -31706,20 +31706,20 @@ var require_mock_utils2 = __commonJS({ } return true; } - function safeUrl(path17) { - if (typeof path17 !== "string") { - return path17; + function safeUrl(path14) { + if (typeof path14 !== "string") { + return path14; } - const pathSegments = path17.split("?"); + const pathSegments = path14.split("?"); if (pathSegments.length !== 2) { - return path17; + return path14; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path17, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path17); + function matchKey(mockDispatch2, { path: path14, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path14); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -31741,7 +31741,7 @@ var require_mock_utils2 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path17 }) => matchValue(safeUrl(path17), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path14 }) => matchValue(safeUrl(path14), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -31779,9 +31779,9 @@ var require_mock_utils2 = __commonJS({ } } function buildKey(opts) { - const { path: path17, method, body, headers, query } = opts; + const { path: path14, method, body, headers, query } = opts; return { - path: path17, + path: path14, method, body, headers, @@ -32244,10 +32244,10 @@ var require_pending_interceptors_formatter2 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path17, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path14, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path17, + Path: path14, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -34539,7 +34539,7 @@ var require_fetch2 = __commonJS({ subresourceSet } = require_constants8(); var EE = require("node:events"); - var { Readable: Readable2, pipeline, finished } = require("node:stream"); + var { Readable, pipeline, finished } = require("node:stream"); var { addAbortListener, isErrored, isReadable, bufferToLowerCasedHeaderName } = require_util9(); var { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = require_data_url2(); var { getGlobalDispatcher } = require_global4(); @@ -35324,7 +35324,7 @@ var require_fetch2 = __commonJS({ fetchParams.controller.abort(reason); } }; - const stream2 = new ReadableStream( + const stream = new ReadableStream( { async start(controller) { fetchParams.controller.controller = controller; @@ -35338,7 +35338,7 @@ var require_fetch2 = __commonJS({ type: "bytes" } ); - response.body = { stream: stream2, source: null, length: null }; + response.body = { stream, source: null, length: null }; fetchParams.controller.onAborted = onAborted; fetchParams.controller.on("terminated", onAborted); fetchParams.controller.resume = async () => { @@ -35373,7 +35373,7 @@ var require_fetch2 = __commonJS({ if (buffer.byteLength) { fetchParams.controller.controller.enqueue(buffer); } - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -35385,13 +35385,13 @@ var require_fetch2 = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -35440,7 +35440,7 @@ var require_fetch2 = __commonJS({ headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } location = headersList.get("location", true); - this.body = new Readable2({ read: resume }); + this.body = new Readable({ read: resume }); const decoders = []; const willFollow = location && request2.redirect === "follow" && redirectStatusSet.has(status); if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { @@ -35944,8 +35944,8 @@ var require_util12 = __commonJS({ fr[kState] = "loading"; fr[kResult] = null; fr[kError] = null; - const stream2 = blob.stream(); - const reader = stream2.getReader(); + const stream = blob.stream(); + const reader = stream.getReader(); const bytes = []; let chunkPromise = reader.read(); let isFirstChunk = true; @@ -36604,8 +36604,8 @@ var require_cache2 = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -37128,9 +37128,9 @@ var require_util14 = __commonJS({ } } } - function validateCookiePath(path17) { - for (let i = 0; i < path17.length; ++i) { - const code = path17.charCodeAt(i); + function validateCookiePath(path14) { + for (let i = 0; i < path14.length; ++i) { + const code = path14.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -38029,7 +38029,7 @@ var require_frame2 = __commonJS({ } catch { crypto2 = { // not full compatibility, but minimum. - randomFillSync: function randomFillSync2(buffer2, _offset, _size) { + randomFillSync: function randomFillSync(buffer2, _offset, _size) { for (let i = 0; i < buffer2.length; ++i) { buffer2[i] = Math.random() * 255 | 0; } @@ -39724,11 +39724,11 @@ var require_undici2 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path17 = opts.path; + let path14 = opts.path; if (!opts.path.startsWith("/")) { - path17 = `/${path17}`; + path14 = `/${path14}`; } - url2 = new URL(util.parseOrigin(url2).origin + path17); + url2 = new URL(util.parseOrigin(url2).origin + path14); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -47414,14 +47414,14 @@ var require_helpers = __commonJS({ "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path17, name, argument) { - if (Array.isArray(path17)) { - this.path = path17; - this.property = path17.reduce(function(sum, item) { + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path14, name, argument) { + if (Array.isArray(path14)) { + this.path = path14; + this.property = path14.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); - } else if (path17 !== void 0) { - this.property = path17; + } else if (path14 !== void 0) { + this.property = path14; } if (message) { this.message = message; @@ -47512,16 +47512,16 @@ var require_helpers = __commonJS({ name: { value: "SchemaError", enumerable: false } } ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path17, base, schemas) { + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path14, base, schemas) { this.schema = schema2; this.options = options; - if (Array.isArray(path17)) { - this.path = path17; - this.propertyPath = path17.reduce(function(sum, item) { + if (Array.isArray(path14)) { + this.path = path14; + this.propertyPath = path14.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); } else { - this.propertyPath = path17; + this.propertyPath = path14; } this.base = base; this.schemas = schemas; @@ -47530,10 +47530,10 @@ var require_helpers = __commonJS({ return uri.resolve(this.base, target); }; SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path17 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var path14 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); var id = schema2.$id || schema2.id; var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path17, base, Object.create(this.schemas)); + var ctx = new SchemaContext(schema2, this.options, path14, base, Object.create(this.schemas)); if (id && !ctx.schemas[base]) { ctx.schemas[base] = schema2; } @@ -48836,7 +48836,7 @@ var require_internal_path_helper = __commonJS({ exports2.hasRoot = hasRoot; exports2.normalizeSeparators = normalizeSeparators; exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname4(p) { @@ -48844,7 +48844,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path17.dirname(p); + let result = path14.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -48881,7 +48881,7 @@ var require_internal_path_helper = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path17.sep; + root += path14.sep; } return root + itemPath; } @@ -48915,10 +48915,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path17.sep)) { + if (!p.endsWith(path14.sep)) { return p; } - if (p === path17.sep) { + if (p === path14.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -49263,7 +49263,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path17 = (function() { + var path14 = (function() { try { return require("path"); } catch (e) { @@ -49271,7 +49271,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path17.sep; + minimatch.sep = path14.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand2 = require_brace_expansion(); var plTypes = { @@ -49360,8 +49360,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path17.sep !== "/") { - pattern = pattern.split(path17.sep).join("/"); + if (!options.allowWindowsEscape && path14.sep !== "/") { + pattern = pattern.split(path14.sep).join("/"); } this.options = options; this.set = []; @@ -49730,8 +49730,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path17.sep !== "/") { - f = f.split(path17.sep).join("/"); + if (path14.sep !== "/") { + f = f.split(path14.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -49877,7 +49877,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -49892,12 +49892,12 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path17.sep); + this.segments = itemPath.split(path14.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename2 = path17.basename(remaining); + const basename2 = path14.basename(remaining); this.segments.unshift(basename2); remaining = dir; dir = pathHelper.dirname(remaining); @@ -49915,7 +49915,7 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path17.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path14.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -49926,12 +49926,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path17.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path14.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path17.sep; + result += path14.sep; } result += this.segments[i]; } @@ -49988,8 +49988,8 @@ var require_internal_pattern = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; - var os4 = __importStar2(require("os")); - var path17 = __importStar2(require("path")); + var os3 = __importStar2(require("os")); + var path14 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var minimatch_1 = require_minimatch(); @@ -50018,7 +50018,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path17.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path14.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -50042,8 +50042,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path17.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path17.sep}`; + if (!itemPath.endsWith(path14.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path14.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -50078,10 +50078,10 @@ var require_internal_pattern = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path17.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path14.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path17.sep}`)) { - homedir = homedir || os4.homedir(); + } else if (pattern === "~" || pattern.startsWith(`~${path14.sep}`)) { + homedir = homedir || os3.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = _Pattern.globEscape(homedir) + pattern.substr(1); @@ -50164,8 +50164,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path17, level) { - this.path = path17; + constructor(path14, level) { + this.path = path14; this.level = level; } }; @@ -50307,9 +50307,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core17 = __importStar2(require_core()); - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -50361,7 +50361,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core17.debug(`Search path '${searchPath}'`); try { - yield __await2(fs18.promises.lstat(searchPath)); + yield __await2(fs15.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -50385,7 +50385,7 @@ var require_internal_globber = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path17.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path14.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -50395,7 +50395,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path17.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs15.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path14.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50430,7 +50430,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs18.promises.stat(item.path); + stats = yield fs15.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -50442,10 +50442,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs18.promises.lstat(item.path); + stats = yield fs15.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs18.promises.realpath(item.path); + const realPath = yield fs15.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -50554,10 +50554,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles2; var crypto2 = __importStar2(require("crypto")); var core17 = __importStar2(require_core()); - var fs18 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); + var fs15 = __importStar2(require("fs")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); function hashFiles2(globber_1, currentWorkspace_1) { return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; @@ -50573,17 +50573,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path17.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path14.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs18.statSync(file).isDirectory()) { + if (fs15.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto2.createHash("sha256"); - const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs18.createReadStream(file), hash2); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs15.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -51958,8 +51958,8 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs18 = __importStar2(require("fs")); - var path17 = __importStar2(require("path")); + var fs15 = __importStar2(require("fs")); + var path14 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); var constants_1 = require_constants12(); @@ -51979,15 +51979,15 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path17.join(baseLocation, "actions", "temp"); + tempDirectory = path14.join(baseLocation, "actions", "temp"); } - const dest = path17.join(tempDirectory, crypto2.randomUUID()); + const dest = path14.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); } function getArchiveFileSizeInBytes(filePath) { - return fs18.statSync(filePath).size; + return fs15.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -52003,7 +52003,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path17.relative(workspace, file).replace(new RegExp(`\\${path17.sep}`, "g"), "/"); + const relativeFile = path14.relative(workspace, file).replace(new RegExp(`\\${path14.sep}`, "g"), "/"); core17.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -52025,7 +52025,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs18.unlink)(filePath); + return util.promisify(fs15.unlink)(filePath); }); } function getVersion(app_1) { @@ -52067,7 +52067,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs15.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -52530,13 +52530,13 @@ function __disposeResources(env) { } return next(); } -function __rewriteRelativeImportExtension(path17, preserveJsx) { - if (typeof path17 === "string" && /^\.\.?\//.test(path17)) { - return path17.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { +function __rewriteRelativeImportExtension(path14, preserveJsx) { + if (typeof path14 === "string" && /^\.\.?\//.test(path14)) { + return path14.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; }); } - return path17; + return path14; } var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; var init_tslib_es6 = __esm({ @@ -53041,8 +53041,8 @@ var require_uuidUtils = __commonJS({ "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.randomUUID = randomUUID2; - function randomUUID2() { + exports2.randomUUID = randomUUID; + function randomUUID() { return crypto.randomUUID(); } } @@ -53606,20 +53606,20 @@ var require_nodeHttpClient = __commonJS({ function isReadableStream(body) { return body && typeof body.pipe === "function"; } - function isStreamComplete(stream2) { - if (stream2.readable === false) { + function isStreamComplete(stream) { + if (stream.readable === false) { return Promise.resolve(); } return new Promise((resolve8) => { const handler2 = () => { resolve8(); - stream2.removeListener("close", handler2); - stream2.removeListener("end", handler2); - stream2.removeListener("error", handler2); + stream.removeListener("close", handler2); + stream.removeListener("end", handler2); + stream.removeListener("error", handler2); }; - stream2.on("close", handler2); - stream2.on("end", handler2); - stream2.on("error", handler2); + stream.on("close", handler2); + stream.on("end", handler2); + stream.on("error", handler2); }); } function isArrayBuffer(body) { @@ -53838,33 +53838,33 @@ var require_nodeHttpClient = __commonJS({ } return headers; } - function getDecodedResponseStream(stream2, headers) { + function getDecodedResponseStream(stream, headers) { const contentEncoding = headers.get("Content-Encoding"); if (contentEncoding === "gzip") { const unzip = node_zlib_1.default.createGunzip(); - stream2.pipe(unzip); + stream.pipe(unzip); return unzip; } else if (contentEncoding === "deflate") { const inflate = node_zlib_1.default.createInflate(); - stream2.pipe(inflate); + stream.pipe(inflate); return inflate; } - return stream2; + return stream; } - function streamToText(stream2) { + function streamToText(stream) { return new Promise((resolve8, reject) => { const buffer = []; - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (Buffer.isBuffer(chunk)) { buffer.push(chunk); } else { buffer.push(Buffer.from(chunk)); } }); - stream2.on("end", () => { + stream.on("end", () => { resolve8(Buffer.concat(buffer).toString("utf8")); }); - stream2.on("error", (e) => { + stream.on("error", (e) => { if (e && e?.name === "AbortError") { reject(e); } else { @@ -54983,7 +54983,7 @@ var require_has_flag = __commonJS({ var require_supports_color = __commonJS({ "node_modules/supports-color/index.js"(exports2, module2) { "use strict"; - var os4 = require("os"); + var os3 = require("os"); var tty = require("tty"); var hasFlag = require_has_flag(); var { env } = process; @@ -55031,7 +55031,7 @@ var require_supports_color = __commonJS({ return min; } if (process.platform === "win32") { - const osRelease = os4.release().split("."); + const osRelease = os3.release().split("."); if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { return Number(osRelease[2]) >= 14931 ? 3 : 2; } @@ -55069,8 +55069,8 @@ var require_supports_color = __commonJS({ } return min; } - function getSupportLevel(stream2) { - const level = supportsColor(stream2, stream2 && stream2.isTTY); + function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); return translateLevel(level); } module2.exports = { @@ -55301,18 +55301,18 @@ var require_helpers3 = __commonJS({ exports2.req = exports2.json = exports2.toBuffer = void 0; var http = __importStar2(require("http")); var https2 = __importStar2(require("https")); - async function toBuffer(stream2) { + async function toBuffer(stream) { let length = 0; const chunks = []; - for await (const chunk of stream2) { + for await (const chunk of stream) { length += chunk.length; chunks.push(chunk); } return Buffer.concat(chunks, length); } exports2.toBuffer = toBuffer; - async function json2(stream2) { - const buf = await toBuffer(stream2); + async function json2(stream) { + const buf = await toBuffer(stream); const str2 = buf.toString("utf8"); try { return JSON.parse(str2); @@ -56121,12 +56121,12 @@ var require_concat = __commonJS({ webStream.values = streamAsyncIterator.bind(webStream); } } - function ensureNodeStream(stream2) { - if (stream2 instanceof ReadableStream) { - makeAsyncIterable(stream2); - return stream_1.Readable.fromWeb(stream2); + function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return stream_1.Readable.fromWeb(stream); } else { - return stream2; + return stream; } } function toStream(source) { @@ -56142,8 +56142,8 @@ var require_concat = __commonJS({ return function() { const streams = sources.map((x) => typeof x === "function" ? x() : x).map(toStream); return stream_1.Readable.from((async function* () { - for (const stream2 of streams) { - for await (const chunk of stream2) { + for (const stream of streams) { + for await (const chunk of stream) { yield chunk; } } @@ -56678,9 +56678,9 @@ var require_sendRequest = __commonJS({ try { const response = await pipeline.sendRequest(httpClient, request2); const headers = response.headers.toJSON(); - const stream2 = response.readableStreamBody ?? response.browserStreamBody; - const parsedBody = options.responseAsStream || stream2 !== void 0 ? void 0 : getResponseBody(response); - const body = stream2 ?? parsedBody; + const stream = response.readableStreamBody ?? response.browserStreamBody; + const parsedBody = options.responseAsStream || stream !== void 0 ? void 0 : getResponseBody(response); + const body = stream ?? parsedBody; if (options?.onResponse) { options.onResponse({ ...response, request: request2, rawHeaders: headers, parsedBody }); } @@ -56950,8 +56950,8 @@ var require_getClient = __commonJS({ } const { allowInsecureConnection, httpClient } = clientOptions; const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path17, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path17, args, { allowInsecureConnection, ...requestOptions }); + const client = (path14, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path14, args, { allowInsecureConnection, ...requestOptions }); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); @@ -57785,7 +57785,7 @@ var require_commonjs4 = __commonJS({ exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; exports2.isError = isError; exports2.isObject = isObject2; - exports2.randomUUID = randomUUID2; + exports2.randomUUID = randomUUID; exports2.uint8ArrayToString = uint8ArrayToString; exports2.stringToUint8Array = stringToUint8Array; var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); @@ -57834,7 +57834,7 @@ var require_commonjs4 = __commonJS({ function isObject2(input) { return tspRuntime.isObject(input); } - function randomUUID2() { + function randomUUID() { return tspRuntime.randomUUID(); } exports2.isBrowser = tspRuntime.isBrowser; @@ -57892,7 +57892,7 @@ var require_file3 = __commonJS({ return blob; } } - function createFileFromStream(stream2, name, options = {}) { + function createFileFromStream(stream, name, options = {}) { return { ...unimplementedMethods, type: options.type ?? "", @@ -57901,13 +57901,13 @@ var require_file3 = __commonJS({ size: options.size ?? -1, name, stream: () => { - const s = stream2(); + const s = stream(); if (isNodeReadableStream(s)) { throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); } return s; }, - [rawContent]: stream2 + [rawContent]: stream }; } function createFile(content, name, options = {}) { @@ -60822,15 +60822,15 @@ var require_urlHelpers2 = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path17 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path17.startsWith("/")) { - path17 = path17.substring(1); + let path14 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path14.startsWith("/")) { + path14 = path14.substring(1); } - if (isAbsoluteUrl(path17)) { - requestUrl = path17; + if (isAbsoluteUrl(path14)) { + requestUrl = path14; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path17); + requestUrl = appendPath(requestUrl, path14); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -60876,9 +60876,9 @@ var require_urlHelpers2 = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path17 = pathToAppend.substring(0, searchStart); + const path14 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path17; + newPath = newPath + path14; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -63129,10 +63129,10 @@ var require_utils_common = __commonJS({ var constants_js_1 = require_constants15(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path17 = urlParsed.pathname; - path17 = path17 || "/"; - path17 = escape(path17); - urlParsed.pathname = path17; + let path14 = urlParsed.pathname; + path14 = path14 || "/"; + path14 = escape(path14); + urlParsed.pathname = path14; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -63217,9 +63217,9 @@ var require_utils_common = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path17 = urlParsed.pathname; - path17 = path17 ? path17.endsWith("/") ? `${path17}${name}` : `${path17}/${name}` : name; - urlParsed.pathname = path17; + let path14 = urlParsed.pathname; + path14 = path14 ? path14.endsWith("/") ? `${path14}${name}` : `${path14}/${name}` : name; + urlParsed.pathname = path14; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -64446,9 +64446,9 @@ var require_StorageSharedKeyCredentialPolicy = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path17 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path14 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path17}`; + canonicalizedResourceString += `/${this.factory.accountName}${path14}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -65187,10 +65187,10 @@ var require_utils_common2 = __commonJS({ var constants_js_1 = require_constants16(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path17 = urlParsed.pathname; - path17 = path17 || "/"; - path17 = escape(path17); - urlParsed.pathname = path17; + let path14 = urlParsed.pathname; + path14 = path14 || "/"; + path14 = escape(path14); + urlParsed.pathname = path14; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -65275,9 +65275,9 @@ var require_utils_common2 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path17 = urlParsed.pathname; - path17 = path17 ? path17.endsWith("/") ? `${path17}${name}` : `${path17}/${name}` : name; - urlParsed.pathname = path17; + let path14 = urlParsed.pathname; + path14 = path14 ? path14.endsWith("/") ? `${path14}${name}` : `${path14}/${name}` : name; + urlParsed.pathname = path14; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -66198,9 +66198,9 @@ var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path17 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path14 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path17}`; + canonicalizedResourceString += `/${this.factory.accountName}${path14}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -66830,9 +66830,9 @@ var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path17 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path14 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path17}`; + canonicalizedResourceString += `/${options.accountName}${path14}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -67177,9 +67177,9 @@ var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path17 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path14 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path17}`; + canonicalizedResourceString += `/${options.accountName}${path14}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -83105,8 +83105,8 @@ var require_AvroParser = __commonJS({ * @param length - * @param options - */ - static async readFixedBytes(stream2, length, options = {}) { - const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); if (bytes.length !== length) { throw new Error("Hit stream end."); } @@ -83118,19 +83118,19 @@ var require_AvroParser = __commonJS({ * @param stream - * @param options - */ - static async readByte(stream2, options = {}) { - const buf = await _AvroParser.readFixedBytes(stream2, 1, options); + static async readByte(stream, options = {}) { + const buf = await _AvroParser.readFixedBytes(stream, 1, options); return buf[0]; } // int and long are stored in variable-length zig-zag coding. // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream2, options = {}) { + static async readZigZagLong(stream, options = {}) { let zigZagEncoded = 0; let significanceInBit = 0; let byte, haveMoreByte, significanceInFloat; do { - byte = await _AvroParser.readByte(stream2, options); + byte = await _AvroParser.readByte(stream, options); haveMoreByte = byte & 128; zigZagEncoded |= (byte & 127) << significanceInBit; significanceInBit += 7; @@ -83139,7 +83139,7 @@ var require_AvroParser = __commonJS({ zigZagEncoded = zigZagEncoded; significanceInFloat = 268435456; do { - byte = await _AvroParser.readByte(stream2, options); + byte = await _AvroParser.readByte(stream, options); zigZagEncoded += (byte & 127) * significanceInFloat; significanceInFloat *= 128; } while (byte & 128); @@ -83151,17 +83151,17 @@ var require_AvroParser = __commonJS({ } return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); } - static async readLong(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); + static async readLong(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } - static async readInt(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); + static async readInt(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } static async readNull() { return null; } - static async readBoolean(stream2, options = {}) { - const b = await _AvroParser.readByte(stream2, options); + static async readBoolean(stream, options = {}) { + const b = await _AvroParser.readByte(stream, options); if (b === 1) { return true; } else if (b === 0) { @@ -83170,53 +83170,53 @@ var require_AvroParser = __commonJS({ throw new Error("Byte was not a boolean."); } } - static async readFloat(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); + static async readFloat(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 4, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat32(0, true); } - static async readDouble(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); + static async readDouble(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 8, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat64(0, true); } - static async readBytes(stream2, options = {}) { - const size = await _AvroParser.readLong(stream2, options); + static async readBytes(stream, options = {}) { + const size = await _AvroParser.readLong(stream, options); if (size < 0) { throw new Error("Bytes size was negative."); } - return stream2.read(size, { abortSignal: options.abortSignal }); + return stream.read(size, { abortSignal: options.abortSignal }); } - static async readString(stream2, options = {}) { - const u8arr = await _AvroParser.readBytes(stream2, options); + static async readString(stream, options = {}) { + const u8arr = await _AvroParser.readBytes(stream, options); const utf8decoder = new TextDecoder(); return utf8decoder.decode(u8arr); } - static async readMapPair(stream2, readItemMethod, options = {}) { - const key = await _AvroParser.readString(stream2, options); - const value = await readItemMethod(stream2, options); + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await _AvroParser.readString(stream, options); + const value = await readItemMethod(stream, options); return { key, value }; } - static async readMap(stream2, readItemMethod, options = {}) { + static async readMap(stream, readItemMethod, options = {}) { const readPairMethod = (s, opts = {}) => { return _AvroParser.readMapPair(s, readItemMethod, opts); }; - const pairs2 = await _AvroParser.readArray(stream2, readPairMethod, options); + const pairs2 = await _AvroParser.readArray(stream, readPairMethod, options); const dict = {}; for (const pair of pairs2) { dict[pair.key] = pair.value; } return dict; } - static async readArray(stream2, readItemMethod, options = {}) { + static async readArray(stream, readItemMethod, options = {}) { const items = []; - for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { + for (let count = await _AvroParser.readLong(stream, options); count !== 0; count = await _AvroParser.readLong(stream, options)) { if (count < 0) { - await _AvroParser.readLong(stream2, options); + await _AvroParser.readLong(stream, options); count = -count; } while (count--) { - const item = await readItemMethod(stream2, options); + const item = await readItemMethod(stream, options); items.push(item); } } @@ -83328,24 +83328,24 @@ var require_AvroParser = __commonJS({ this._primitive = primitive; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { + read(stream, options = {}) { switch (this._primitive) { case AvroPrimitive.NULL: return AvroParser.readNull(); case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream2, options); + return AvroParser.readBoolean(stream, options); case AvroPrimitive.INT: - return AvroParser.readInt(stream2, options); + return AvroParser.readInt(stream, options); case AvroPrimitive.LONG: - return AvroParser.readLong(stream2, options); + return AvroParser.readLong(stream, options); case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream2, options); + return AvroParser.readFloat(stream, options); case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream2, options); + return AvroParser.readDouble(stream, options); case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream2, options); + return AvroParser.readBytes(stream, options); case AvroPrimitive.STRING: - return AvroParser.readString(stream2, options); + return AvroParser.readString(stream, options); default: throw new Error("Unknown Avro Primitive"); } @@ -83358,8 +83358,8 @@ var require_AvroParser = __commonJS({ this._symbols = symbols; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { - const value = await AvroParser.readInt(stream2, options); + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); return this._symbols[value]; } }; @@ -83369,9 +83369,9 @@ var require_AvroParser = __commonJS({ super(); this._types = types; } - async read(stream2, options = {}) { - const typeIndex = await AvroParser.readInt(stream2, options); - return this._types[typeIndex].read(stream2, options); + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); } }; var AvroMapType = class extends AvroType { @@ -83381,11 +83381,11 @@ var require_AvroParser = __commonJS({ this._itemType = itemType; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { + read(stream, options = {}) { const readItemMethod = (s, opts) => { return this._itemType.read(s, opts); }; - return AvroParser.readMap(stream2, readItemMethod, options); + return AvroParser.readMap(stream, readItemMethod, options); } }; var AvroRecordType = class extends AvroType { @@ -83397,12 +83397,12 @@ var require_AvroParser = __commonJS({ this._name = name; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { + async read(stream, options = {}) { const record = {}; record["$schema"] = this._name; for (const key in this._fields) { if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream2, options); + record[key] = await this._fields[key].read(stream, options); } } return record; @@ -85656,18 +85656,18 @@ var require_utils7 = __commonJS({ var node_fs_1 = tslib_1.__importDefault(require("node:fs")); var node_util_1 = tslib_1.__importDefault(require("node:util")); var constants_js_1 = require_constants15(); - async function streamToBuffer(stream2, buffer, offset, end, encoding) { + async function streamToBuffer(stream, buffer, offset, end, encoding) { let pos = 0; const count = end - offset; return new Promise((resolve8, reject) => { const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); - stream2.on("readable", () => { + stream.on("readable", () => { if (pos >= count) { clearTimeout(timeout); resolve8(); return; } - let chunk = stream2.read(); + let chunk = stream.read(); if (!chunk) { return; } @@ -85678,25 +85678,25 @@ var require_utils7 = __commonJS({ buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); - stream2.on("end", () => { + stream.on("end", () => { clearTimeout(timeout); if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } resolve8(); }); - stream2.on("error", (msg) => { + stream.on("error", (msg) => { clearTimeout(timeout); reject(msg); }); }); } - async function streamToBuffer2(stream2, buffer, encoding) { + async function streamToBuffer2(stream, buffer, encoding) { let pos = 0; const bufferSize = buffer.length; return new Promise((resolve8, reject) => { - stream2.on("readable", () => { - let chunk = stream2.read(); + stream.on("readable", () => { + let chunk = stream.read(); if (!chunk) { return; } @@ -85710,10 +85710,10 @@ var require_utils7 = __commonJS({ buffer.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); - stream2.on("end", () => { + stream.on("end", () => { resolve8(pos); }); - stream2.on("error", reject); + stream.on("error", reject); }); } async function streamToBuffer3(readableStream, encoding) { @@ -86551,8 +86551,8 @@ var require_Clients = __commonJS({ customerProvidedKey: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions }); - const stream2 = response.readableStreamBody; - await (0, utils_js_1.streamToBuffer)(stream2, buffer, off - offset, chunkEnd - offset); + const stream = response.readableStreamBody; + await (0, utils_js_1.streamToBuffer)(stream, buffer, off - offset, chunkEnd - offset); transferProgress += chunkEnd - off; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress }); @@ -87655,7 +87655,7 @@ var require_Clients = __commonJS({ * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadStream(stream2, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + async uploadStream(stream, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { if (!options.blobHTTPHeaders) { options.blobHTTPHeaders = {}; } @@ -87668,7 +87668,7 @@ var require_Clients = __commonJS({ let transferProgress = 0; const blockList = []; const scheduler = new storage_common_1.BufferScheduler( - stream2, + stream, bufferSize, maxConcurrency, async (body, length) => { @@ -88834,8 +88834,8 @@ var require_BlobBatch = __commonJS({ if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path17 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path17 || path17 === "") { + const path14 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path14 || path14 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -88913,8 +88913,8 @@ var require_BlobBatchClient = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path17 = (0, utils_common_js_1.getURLPath)(url2); - if (path17 && path17 !== "/") { + const path14 = (0, utils_common_js_1.getURLPath)(url2); + if (path14 && path14 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -92201,8 +92201,8 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs18 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); + var fs15 = __importStar2(require("fs")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); @@ -92210,7 +92210,7 @@ var require_downloadUtils = __commonJS({ var abort_controller_1 = require_dist4(); function pipeResponseToStream(response, output) { return __awaiter2(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); yield pipeline(response.message, output); }); } @@ -92312,7 +92312,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs18.createWriteStream(archivePath); + const writeStream = fs15.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -92337,7 +92337,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs18.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs15.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -92453,7 +92453,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs18.openSync(archivePath, "w"); + const fd = fs15.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -92471,12 +92471,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs18.writeFileSync(fd, result); + fs15.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs18.closeSync(fd); + fs15.closeSync(fd); } } }); @@ -92798,7 +92798,7 @@ var require_cacheHttpClient = __commonJS({ var core17 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -92933,7 +92933,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs18.openSync(archivePath, "r"); + const fd = fs15.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -92947,7 +92947,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs15.createReadStream(archivePath, { fd, start, end, @@ -92958,7 +92958,7 @@ Other caches with similar key:`); } }))); } finally { - fs18.closeSync(fd); + fs15.closeSync(fd); } return; }); @@ -96848,7 +96848,7 @@ var require_test_transport = __commonJS({ * The returned promise resolves when the stream is closed. It should * not reject. If it does, code is broken. */ - streamResponses(method, stream2, abort) { + streamResponses(method, stream, abort) { return __awaiter2(this, void 0, void 0, function* () { const messages = []; if (this.data.response === void 0) { @@ -96865,31 +96865,31 @@ var require_test_transport = __commonJS({ try { yield delay2(this.responseDelay, abort)(void 0); } catch (error3) { - stream2.notifyError(error3); + stream.notifyError(error3); return; } if (this.data.response instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.response); + stream.notifyError(this.data.response); return; } for (let msg of messages) { - stream2.notifyMessage(msg); + stream.notifyMessage(msg); try { yield delay2(this.betweenResponseDelay, abort)(void 0); } catch (error3) { - stream2.notifyError(error3); + stream.notifyError(error3); return; } } if (this.data.status instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.status); + stream.notifyError(this.data.status); return; } if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.trailers); + stream.notifyError(this.data.trailers); return; } - stream2.notifyComplete(); + stream.notifyComplete(); }); } // Creates a promise for response status from the mock data. @@ -98223,7 +98223,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io7 = __importStar2(require_io()); var fs_1 = require("fs"); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); var IS_WINDOWS = process.platform === "win32"; @@ -98269,13 +98269,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path17.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path14.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -98321,7 +98321,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path17.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path14.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -98330,7 +98330,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path17.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path14.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -98345,7 +98345,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -98354,7 +98354,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path17.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path14.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -98392,7 +98392,7 @@ var require_tar = __commonJS({ } function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path17.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path14.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -98474,7 +98474,7 @@ var require_cache5 = __commonJS({ exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; var core17 = __importStar2(require_core()); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); @@ -98569,7 +98569,7 @@ var require_cache5 = __commonJS({ core17.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path17.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path14.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core17.isDebug()) { @@ -98638,7 +98638,7 @@ var require_cache5 = __commonJS({ core17.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path17.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path14.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core17.debug(`Archive path: ${archivePath}`); core17.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -98700,7 +98700,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path17.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path14.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98764,7 +98764,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path17.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path14.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98912,12 +98912,12 @@ var require_manifest = __commonJS({ exports2._readLinuxVersionFile = _readLinuxVersionFile; var semver9 = __importStar2(require_semver2()); var core_1 = require_core(); - var os4 = require("os"); + var os3 = require("os"); var cp = require("child_process"); - var fs18 = require("fs"); + var fs15 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { - const platFilter = os4.platform(); + const platFilter = os3.platform(); let result; let match; let file; @@ -98953,7 +98953,7 @@ var require_manifest = __commonJS({ }); } function _getOsVersion() { - const plat = os4.platform(); + const plat = os3.platform(); let version = ""; if (plat === "darwin") { version = cp.execSync("sw_vers -productVersion").toString(); @@ -98976,10 +98976,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs18.existsSync(lsbReleaseFile)) { - contents = fs18.readFileSync(lsbReleaseFile).toString(); - } else if (fs18.existsSync(osReleaseFile)) { - contents = fs18.readFileSync(osReleaseFile).toString(); + if (fs15.existsSync(lsbReleaseFile)) { + contents = fs15.readFileSync(lsbReleaseFile).toString(); + } else if (fs15.existsSync(osReleaseFile)) { + contents = fs15.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99188,13 +99188,13 @@ var require_tool_cache = __commonJS({ var core17 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); - var os4 = __importStar2(require("os")); - var path17 = __importStar2(require("path")); + var os3 = __importStar2(require("os")); + var path14 = __importStar2(require("path")); var httpm = __importStar2(require_lib()); var semver9 = __importStar2(require_semver2()); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var assert_1 = require("assert"); var exec_1 = require_exec(); @@ -99212,8 +99212,8 @@ var require_tool_cache = __commonJS({ var userAgent2 = "actions/tool-cache"; function downloadTool2(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - dest = dest || path17.join(_getTempDirectory(), crypto2.randomUUID()); - yield io7.mkdirP(path17.dirname(dest)); + dest = dest || path14.join(_getTempDirectory(), crypto2.randomUUID()); + yield io7.mkdirP(path14.dirname(dest)); core17.debug(`Downloading ${url2}`); core17.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -99234,7 +99234,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs18.existsSync(dest)) { + if (fs15.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99253,12 +99253,12 @@ var require_tool_cache = __commonJS({ core17.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs18.createWriteStream(dest)); + yield pipeline(readStream, fs15.createWriteStream(dest)); core17.debug("download complete"); succeeded = true; return dest; @@ -99303,7 +99303,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path17.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path14.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -99464,61 +99464,61 @@ var require_tool_cache = __commonJS({ yield (0, exec_1.exec)(`"${unzipPath}"`, args, { cwd: dest }); }); } - function cacheDir(sourceDir, tool, version, arch2) { + function cacheDir(sourceDir, tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; - arch2 = arch2 || os4.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); + arch = arch || os3.arch(); + core17.debug(`Caching tool ${tool} ${version} ${arch}`); core17.debug(`source dir: ${sourceDir}`); - if (!fs18.statSync(sourceDir).isDirectory()) { + if (!fs15.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } - const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs18.readdirSync(sourceDir)) { - const s = path17.join(sourceDir, itemName); + const destPath = yield _createToolPath(tool, version, arch); + for (const itemName of fs15.readdirSync(sourceDir)) { + const s = path14.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } - _completeToolPath(tool, version, arch2); + _completeToolPath(tool, version, arch); return destPath; }); } - function cacheFile(sourceFile, targetFile, tool, version, arch2) { + function cacheFile(sourceFile, targetFile, tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; - arch2 = arch2 || os4.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); + arch = arch || os3.arch(); + core17.debug(`Caching tool ${tool} ${version} ${arch}`); core17.debug(`source file: ${sourceFile}`); - if (!fs18.statSync(sourceFile).isFile()) { + if (!fs15.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } - const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path17.join(destFolder, targetFile); + const destFolder = yield _createToolPath(tool, version, arch); + const destPath = path14.join(destFolder, targetFile); core17.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); - _completeToolPath(tool, version, arch2); + _completeToolPath(tool, version, arch); return destFolder; }); } - function find2(toolName, versionSpec, arch2) { + function find2(toolName, versionSpec, arch) { if (!toolName) { throw new Error("toolName parameter is required"); } if (!versionSpec) { throw new Error("versionSpec parameter is required"); } - arch2 = arch2 || os4.arch(); + arch = arch || os3.arch(); if (!isExplicitVersion(versionSpec)) { - const localVersions = findAllVersions2(toolName, arch2); + const localVersions = findAllVersions2(toolName, arch); const match = evaluateVersions(localVersions, versionSpec); versionSpec = match; } let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path17.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path14.join(_getCacheDirectory(), toolName, versionSpec, arch); core17.debug(`checking cache: ${cachePath}`); - if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { - core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) { + core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { core17.debug("not found"); @@ -99526,16 +99526,16 @@ var require_tool_cache = __commonJS({ } return toolPath; } - function findAllVersions2(toolName, arch2) { + function findAllVersions2(toolName, arch) { const versions = []; - arch2 = arch2 || os4.arch(); - const toolPath = path17.join(_getCacheDirectory(), toolName); - if (fs18.existsSync(toolPath)) { - const children = fs18.readdirSync(toolPath); + arch = arch || os3.arch(); + const toolPath = path14.join(_getCacheDirectory(), toolName); + if (fs15.existsSync(toolPath)) { + const children = fs15.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path17.join(toolPath, child, arch2 || ""); - if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) { + const fullPath = path14.join(toolPath, child, arch || ""); + if (fs15.existsSync(fullPath) && fs15.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99578,7 +99578,7 @@ var require_tool_cache = __commonJS({ }); } function findFromManifest(versionSpec_1, stable_1, manifest_1) { - return __awaiter2(this, arguments, void 0, function* (versionSpec, stable, manifest, archFilter = os4.arch()) { + return __awaiter2(this, arguments, void 0, function* (versionSpec, stable, manifest, archFilter = os3.arch()) { const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); return match; }); @@ -99586,15 +99586,15 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter2(this, void 0, void 0, function* () { if (!dest) { - dest = path17.join(_getTempDirectory(), crypto2.randomUUID()); + dest = path14.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; }); } - function _createToolPath(tool, version, arch2) { + function _createToolPath(tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { - const folderPath = path17.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path14.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || ""); core17.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); @@ -99603,10 +99603,10 @@ var require_tool_cache = __commonJS({ return folderPath; }); } - function _completeToolPath(tool, version, arch2) { - const folderPath = path17.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + function _completeToolPath(tool, version, arch) { + const folderPath = path14.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs18.writeFileSync(markerPath, ""); + fs15.writeFileSync(markerPath, ""); core17.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -102386,13 +102386,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.validateArtifactName = validateArtifactName; - function validateFilePath(path17) { - if (!path17) { + function validateFilePath(path14) { + if (!path14) { throw new Error(`Provided file path input during validation is empty`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path17.includes(invalidCharacterKey)) { - throw new Error(`The path for one of the files in artifact is not valid: ${path17}. Contains the following character: ${errorMessageForCharacter} + if (path14.includes(invalidCharacterKey)) { + throw new Error(`The path for one of the files in artifact is not valid: ${path14}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -102937,15 +102937,15 @@ var require_upload_zip_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadZipSpecification = exports2.validateRootDirectory = void 0; - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var core_1 = require_core(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); function validateRootDirectory(rootDirectory) { - if (!fs18.existsSync(rootDirectory)) { + if (!fs15.existsSync(rootDirectory)) { throw new Error(`The provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs18.statSync(rootDirectory).isDirectory()) { + if (!fs15.statSync(rootDirectory).isDirectory()) { throw new Error(`The provided rootDirectory ${rootDirectory} is not a valid directory`); } (0, core_1.info)(`Root directory input is valid!`); @@ -102956,7 +102956,7 @@ var require_upload_zip_specification = __commonJS({ rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of filesToZip) { - const stats = fs18.lstatSync(file, { throwIfNoEntry: false }); + const stats = fs15.lstatSync(file, { throwIfNoEntry: false }); if (!stats) { throw new Error(`File ${file} does not exist`); } @@ -103053,7 +103053,7 @@ var require_blob_upload = __commonJS({ var config_1 = require_config2(); var core17 = __importStar2(require_core()); var crypto2 = __importStar2(require("crypto")); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var errors_1 = require_errors4(); function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter2(this, void 0, void 0, function* () { @@ -103089,7 +103089,7 @@ var require_blob_upload = __commonJS({ abortSignal: abortController.signal }; let sha256Hash = void 0; - const uploadStream = new stream2.PassThrough(); + const uploadStream = new stream.PassThrough(); const hashStream = crypto2.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); @@ -103293,8 +103293,8 @@ var require_minimatch2 = __commonJS({ return new Minimatch(pattern, options).match(p); }; module2.exports = minimatch; - var path17 = require_path(); - minimatch.sep = path17.sep; + var path14 = require_path(); + minimatch.sep = path14.sep; var GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); minimatch.GLOBSTAR = GLOBSTAR; var expand2 = require_brace_expansion2(); @@ -103803,8 +103803,8 @@ var require_minimatch2 = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; const options = this.options; - if (path17.sep !== "/") { - f = f.split(path17.sep).join("/"); + if (path14.sep !== "/") { + f = f.split(path14.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -103842,13 +103842,13 @@ var require_minimatch2 = __commonJS({ var require_readdir_glob = __commonJS({ "node_modules/readdir-glob/index.js"(exports2, module2) { module2.exports = readdirGlob; - var fs18 = require("fs"); + var fs15 = require("fs"); var { EventEmitter } = require("events"); var { Minimatch } = require_minimatch2(); var { resolve: resolve8 } = require("path"); function readdir(dir, strict) { return new Promise((resolve9, reject) => { - fs18.readdir(dir, { withFileTypes: true }, (err, files) => { + fs15.readdir(dir, { withFileTypes: true }, (err, files) => { if (err) { switch (err.code) { case "ENOTDIR": @@ -103881,7 +103881,7 @@ var require_readdir_glob = __commonJS({ } function stat(file, followSymlinks) { return new Promise((resolve9, reject) => { - const statFunc = followSymlinks ? fs18.stat : fs18.lstat; + const statFunc = followSymlinks ? fs15.stat : fs15.lstat; statFunc(file, (err, stats) => { if (err) { switch (err.code) { @@ -103902,8 +103902,8 @@ var require_readdir_glob = __commonJS({ }); }); } - async function* exploreWalkAsync(dir, path17, followSymlinks, useStat, shouldSkip, strict) { - let files = await readdir(path17 + dir, strict); + async function* exploreWalkAsync(dir, path14, followSymlinks, useStat, shouldSkip, strict) { + let files = await readdir(path14 + dir, strict); for (const file of files) { let name = file.name; if (name === void 0) { @@ -103912,7 +103912,7 @@ var require_readdir_glob = __commonJS({ } const filename = dir + "/" + name; const relative2 = filename.slice(1); - const absolute = path17 + "/" + relative2; + const absolute = path14 + "/" + relative2; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -103926,15 +103926,15 @@ var require_readdir_glob = __commonJS({ if (stats.isDirectory()) { if (!shouldSkip(relative2)) { yield { relative: relative2, absolute, stats }; - yield* exploreWalkAsync(filename, path17, followSymlinks, useStat, shouldSkip, false); + yield* exploreWalkAsync(filename, path14, followSymlinks, useStat, shouldSkip, false); } } else { yield { relative: relative2, absolute, stats }; } } } - async function* explore(path17, followSymlinks, useStat, shouldSkip) { - yield* exploreWalkAsync("", path17, followSymlinks, useStat, shouldSkip, true); + async function* explore(path14, followSymlinks, useStat, shouldSkip) { + yield* exploreWalkAsync("", path14, followSymlinks, useStat, shouldSkip, true); } function readOptions(options) { return { @@ -105946,54 +105946,54 @@ var require_polyfills = __commonJS({ } var chdir; module2.exports = patch; - function patch(fs18) { + function patch(fs15) { if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs18); - } - if (!fs18.lutimes) { - patchLutimes(fs18); - } - fs18.chown = chownFix(fs18.chown); - fs18.fchown = chownFix(fs18.fchown); - fs18.lchown = chownFix(fs18.lchown); - fs18.chmod = chmodFix(fs18.chmod); - fs18.fchmod = chmodFix(fs18.fchmod); - fs18.lchmod = chmodFix(fs18.lchmod); - fs18.chownSync = chownFixSync(fs18.chownSync); - fs18.fchownSync = chownFixSync(fs18.fchownSync); - fs18.lchownSync = chownFixSync(fs18.lchownSync); - fs18.chmodSync = chmodFixSync(fs18.chmodSync); - fs18.fchmodSync = chmodFixSync(fs18.fchmodSync); - fs18.lchmodSync = chmodFixSync(fs18.lchmodSync); - fs18.stat = statFix(fs18.stat); - fs18.fstat = statFix(fs18.fstat); - fs18.lstat = statFix(fs18.lstat); - fs18.statSync = statFixSync(fs18.statSync); - fs18.fstatSync = statFixSync(fs18.fstatSync); - fs18.lstatSync = statFixSync(fs18.lstatSync); - if (fs18.chmod && !fs18.lchmod) { - fs18.lchmod = function(path17, mode, cb) { + patchLchmod(fs15); + } + if (!fs15.lutimes) { + patchLutimes(fs15); + } + fs15.chown = chownFix(fs15.chown); + fs15.fchown = chownFix(fs15.fchown); + fs15.lchown = chownFix(fs15.lchown); + fs15.chmod = chmodFix(fs15.chmod); + fs15.fchmod = chmodFix(fs15.fchmod); + fs15.lchmod = chmodFix(fs15.lchmod); + fs15.chownSync = chownFixSync(fs15.chownSync); + fs15.fchownSync = chownFixSync(fs15.fchownSync); + fs15.lchownSync = chownFixSync(fs15.lchownSync); + fs15.chmodSync = chmodFixSync(fs15.chmodSync); + fs15.fchmodSync = chmodFixSync(fs15.fchmodSync); + fs15.lchmodSync = chmodFixSync(fs15.lchmodSync); + fs15.stat = statFix(fs15.stat); + fs15.fstat = statFix(fs15.fstat); + fs15.lstat = statFix(fs15.lstat); + fs15.statSync = statFixSync(fs15.statSync); + fs15.fstatSync = statFixSync(fs15.fstatSync); + fs15.lstatSync = statFixSync(fs15.lstatSync); + if (fs15.chmod && !fs15.lchmod) { + fs15.lchmod = function(path14, mode, cb) { if (cb) process.nextTick(cb); }; - fs18.lchmodSync = function() { + fs15.lchmodSync = function() { }; } - if (fs18.chown && !fs18.lchown) { - fs18.lchown = function(path17, uid, gid, cb) { + if (fs15.chown && !fs15.lchown) { + fs15.lchown = function(path14, uid, gid, cb) { if (cb) process.nextTick(cb); }; - fs18.lchownSync = function() { + fs15.lchownSync = function() { }; } if (platform === "win32") { - fs18.rename = typeof fs18.rename !== "function" ? fs18.rename : (function(fs$rename) { + fs15.rename = typeof fs15.rename !== "function" ? fs15.rename : (function(fs$rename) { function rename(from, to, cb) { var start = Date.now(); var backoff = 0; fs$rename(from, to, function CB(er) { if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < 6e4) { setTimeout(function() { - fs18.stat(to, function(stater, st) { + fs15.stat(to, function(stater, st) { if (stater && stater.code === "ENOENT") fs$rename(from, to, CB); else @@ -106009,9 +106009,9 @@ var require_polyfills = __commonJS({ } if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename); return rename; - })(fs18.rename); + })(fs15.rename); } - fs18.read = typeof fs18.read !== "function" ? fs18.read : (function(fs$read) { + fs15.read = typeof fs15.read !== "function" ? fs15.read : (function(fs$read) { function read(fd, buffer, offset, length, position, callback_) { var callback; if (callback_ && typeof callback_ === "function") { @@ -106019,22 +106019,22 @@ var require_polyfills = __commonJS({ callback = function(er, _2, __) { if (er && er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; - return fs$read.call(fs18, fd, buffer, offset, length, position, callback); + return fs$read.call(fs15, fd, buffer, offset, length, position, callback); } callback_.apply(this, arguments); }; } - return fs$read.call(fs18, fd, buffer, offset, length, position, callback); + return fs$read.call(fs15, fd, buffer, offset, length, position, callback); } if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read); return read; - })(fs18.read); - fs18.readSync = typeof fs18.readSync !== "function" ? fs18.readSync : /* @__PURE__ */ (function(fs$readSync) { + })(fs15.read); + fs15.readSync = typeof fs15.readSync !== "function" ? fs15.readSync : /* @__PURE__ */ (function(fs$readSync) { return function(fd, buffer, offset, length, position) { var eagCounter = 0; while (true) { try { - return fs$readSync.call(fs18, fd, buffer, offset, length, position); + return fs$readSync.call(fs15, fd, buffer, offset, length, position); } catch (er) { if (er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; @@ -106044,11 +106044,11 @@ var require_polyfills = __commonJS({ } } }; - })(fs18.readSync); - function patchLchmod(fs19) { - fs19.lchmod = function(path17, mode, callback) { - fs19.open( - path17, + })(fs15.readSync); + function patchLchmod(fs16) { + fs16.lchmod = function(path14, mode, callback) { + fs16.open( + path14, constants.O_WRONLY | constants.O_SYMLINK, mode, function(err, fd) { @@ -106056,80 +106056,80 @@ var require_polyfills = __commonJS({ if (callback) callback(err); return; } - fs19.fchmod(fd, mode, function(err2) { - fs19.close(fd, function(err22) { + fs16.fchmod(fd, mode, function(err2) { + fs16.close(fd, function(err22) { if (callback) callback(err2 || err22); }); }); } ); }; - fs19.lchmodSync = function(path17, mode) { - var fd = fs19.openSync(path17, constants.O_WRONLY | constants.O_SYMLINK, mode); + fs16.lchmodSync = function(path14, mode) { + var fd = fs16.openSync(path14, constants.O_WRONLY | constants.O_SYMLINK, mode); var threw = true; var ret; try { - ret = fs19.fchmodSync(fd, mode); + ret = fs16.fchmodSync(fd, mode); threw = false; } finally { if (threw) { try { - fs19.closeSync(fd); + fs16.closeSync(fd); } catch (er) { } } else { - fs19.closeSync(fd); + fs16.closeSync(fd); } } return ret; }; } - function patchLutimes(fs19) { - if (constants.hasOwnProperty("O_SYMLINK") && fs19.futimes) { - fs19.lutimes = function(path17, at, mt, cb) { - fs19.open(path17, constants.O_SYMLINK, function(er, fd) { + function patchLutimes(fs16) { + if (constants.hasOwnProperty("O_SYMLINK") && fs16.futimes) { + fs16.lutimes = function(path14, at, mt, cb) { + fs16.open(path14, constants.O_SYMLINK, function(er, fd) { if (er) { if (cb) cb(er); return; } - fs19.futimes(fd, at, mt, function(er2) { - fs19.close(fd, function(er22) { + fs16.futimes(fd, at, mt, function(er2) { + fs16.close(fd, function(er22) { if (cb) cb(er2 || er22); }); }); }); }; - fs19.lutimesSync = function(path17, at, mt) { - var fd = fs19.openSync(path17, constants.O_SYMLINK); + fs16.lutimesSync = function(path14, at, mt) { + var fd = fs16.openSync(path14, constants.O_SYMLINK); var ret; var threw = true; try { - ret = fs19.futimesSync(fd, at, mt); + ret = fs16.futimesSync(fd, at, mt); threw = false; } finally { if (threw) { try { - fs19.closeSync(fd); + fs16.closeSync(fd); } catch (er) { } } else { - fs19.closeSync(fd); + fs16.closeSync(fd); } } return ret; }; - } else if (fs19.futimes) { - fs19.lutimes = function(_a, _b, _c, cb) { + } else if (fs16.futimes) { + fs16.lutimes = function(_a, _b, _c, cb) { if (cb) process.nextTick(cb); }; - fs19.lutimesSync = function() { + fs16.lutimesSync = function() { }; } } function chmodFix(orig) { if (!orig) return orig; return function(target, mode, cb) { - return orig.call(fs18, target, mode, function(er) { + return orig.call(fs15, target, mode, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -106139,7 +106139,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, mode) { try { - return orig.call(fs18, target, mode); + return orig.call(fs15, target, mode); } catch (er) { if (!chownErOk(er)) throw er; } @@ -106148,7 +106148,7 @@ var require_polyfills = __commonJS({ function chownFix(orig) { if (!orig) return orig; return function(target, uid, gid, cb) { - return orig.call(fs18, target, uid, gid, function(er) { + return orig.call(fs15, target, uid, gid, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -106158,7 +106158,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, uid, gid) { try { - return orig.call(fs18, target, uid, gid); + return orig.call(fs15, target, uid, gid); } catch (er) { if (!chownErOk(er)) throw er; } @@ -106178,13 +106178,13 @@ var require_polyfills = __commonJS({ } if (cb) cb.apply(this, arguments); } - return options ? orig.call(fs18, target, options, callback) : orig.call(fs18, target, callback); + return options ? orig.call(fs15, target, options, callback) : orig.call(fs15, target, callback); }; } function statFixSync(orig) { if (!orig) return orig; return function(target, options) { - var stats = options ? orig.call(fs18, target, options) : orig.call(fs18, target); + var stats = options ? orig.call(fs15, target, options) : orig.call(fs15, target); if (stats) { if (stats.uid < 0) stats.uid += 4294967296; if (stats.gid < 0) stats.gid += 4294967296; @@ -106213,16 +106213,16 @@ var require_legacy_streams = __commonJS({ "node_modules/graceful-fs/legacy-streams.js"(exports2, module2) { var Stream = require("stream").Stream; module2.exports = legacy; - function legacy(fs18) { + function legacy(fs15) { return { ReadStream, WriteStream }; - function ReadStream(path17, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path17, options); + function ReadStream(path14, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path14, options); Stream.call(this); var self2 = this; - this.path = path17; + this.path = path14; this.fd = null; this.readable = true; this.paused = false; @@ -106256,7 +106256,7 @@ var require_legacy_streams = __commonJS({ }); return; } - fs18.open(this.path, this.flags, this.mode, function(err, fd) { + fs15.open(this.path, this.flags, this.mode, function(err, fd) { if (err) { self2.emit("error", err); self2.readable = false; @@ -106267,10 +106267,10 @@ var require_legacy_streams = __commonJS({ self2._read(); }); } - function WriteStream(path17, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path17, options); + function WriteStream(path14, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path14, options); Stream.call(this); - this.path = path17; + this.path = path14; this.fd = null; this.writable = true; this.flags = "w"; @@ -106295,7 +106295,7 @@ var require_legacy_streams = __commonJS({ this.busy = false; this._queue = []; if (this.fd === null) { - this._open = fs18.open; + this._open = fs15.open; this._queue.push([this._open, this.path, this.flags, this.mode, void 0]); this.flush(); } @@ -106330,7 +106330,7 @@ var require_clone = __commonJS({ // node_modules/graceful-fs/graceful-fs.js var require_graceful_fs = __commonJS({ "node_modules/graceful-fs/graceful-fs.js"(exports2, module2) { - var fs18 = require("fs"); + var fs15 = require("fs"); var polyfills = require_polyfills(); var legacy = require_legacy_streams(); var clone = require_clone(); @@ -106362,12 +106362,12 @@ var require_graceful_fs = __commonJS({ m = "GFS4: " + m.split(/\n/).join("\nGFS4: "); console.error(m); }; - if (!fs18[gracefulQueue]) { + if (!fs15[gracefulQueue]) { queue = global[gracefulQueue] || []; - publishQueue(fs18, queue); - fs18.close = (function(fs$close) { + publishQueue(fs15, queue); + fs15.close = (function(fs$close) { function close(fd, cb) { - return fs$close.call(fs18, fd, function(err) { + return fs$close.call(fs15, fd, function(err) { if (!err) { resetQueue(); } @@ -106379,48 +106379,48 @@ var require_graceful_fs = __commonJS({ value: fs$close }); return close; - })(fs18.close); - fs18.closeSync = (function(fs$closeSync) { + })(fs15.close); + fs15.closeSync = (function(fs$closeSync) { function closeSync(fd) { - fs$closeSync.apply(fs18, arguments); + fs$closeSync.apply(fs15, arguments); resetQueue(); } Object.defineProperty(closeSync, previousSymbol, { value: fs$closeSync }); return closeSync; - })(fs18.closeSync); + })(fs15.closeSync); if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) { process.on("exit", function() { - debug5(fs18[gracefulQueue]); - require("assert").equal(fs18[gracefulQueue].length, 0); + debug5(fs15[gracefulQueue]); + require("assert").equal(fs15[gracefulQueue].length, 0); }); } } var queue; if (!global[gracefulQueue]) { - publishQueue(global, fs18[gracefulQueue]); - } - module2.exports = patch(clone(fs18)); - if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs18.__patched) { - module2.exports = patch(fs18); - fs18.__patched = true; - } - function patch(fs19) { - polyfills(fs19); - fs19.gracefulify = patch; - fs19.createReadStream = createReadStream2; - fs19.createWriteStream = createWriteStream3; - var fs$readFile = fs19.readFile; - fs19.readFile = readFile; - function readFile(path17, options, cb) { + publishQueue(global, fs15[gracefulQueue]); + } + module2.exports = patch(clone(fs15)); + if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs15.__patched) { + module2.exports = patch(fs15); + fs15.__patched = true; + } + function patch(fs16) { + polyfills(fs16); + fs16.gracefulify = patch; + fs16.createReadStream = createReadStream2; + fs16.createWriteStream = createWriteStream3; + var fs$readFile = fs16.readFile; + fs16.readFile = readFile; + function readFile(path14, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$readFile(path17, options, cb); - function go$readFile(path18, options2, cb2, startTime) { - return fs$readFile(path18, options2, function(err) { + return go$readFile(path14, options, cb); + function go$readFile(path15, options2, cb2, startTime) { + return fs$readFile(path15, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$readFile, [path18, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$readFile, [path15, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106428,16 +106428,16 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$writeFile = fs19.writeFile; - fs19.writeFile = writeFile; - function writeFile(path17, data, options, cb) { + var fs$writeFile = fs16.writeFile; + fs16.writeFile = writeFile; + function writeFile(path14, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$writeFile(path17, data, options, cb); - function go$writeFile(path18, data2, options2, cb2, startTime) { - return fs$writeFile(path18, data2, options2, function(err) { + return go$writeFile(path14, data, options, cb); + function go$writeFile(path15, data2, options2, cb2, startTime) { + return fs$writeFile(path15, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$writeFile, [path18, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$writeFile, [path15, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106445,17 +106445,17 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$appendFile = fs19.appendFile; + var fs$appendFile = fs16.appendFile; if (fs$appendFile) - fs19.appendFile = appendFile; - function appendFile(path17, data, options, cb) { + fs16.appendFile = appendFile; + function appendFile(path14, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$appendFile(path17, data, options, cb); - function go$appendFile(path18, data2, options2, cb2, startTime) { - return fs$appendFile(path18, data2, options2, function(err) { + return go$appendFile(path14, data, options, cb); + function go$appendFile(path15, data2, options2, cb2, startTime) { + return fs$appendFile(path15, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$appendFile, [path18, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$appendFile, [path15, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106463,9 +106463,9 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$copyFile = fs19.copyFile; + var fs$copyFile = fs16.copyFile; if (fs$copyFile) - fs19.copyFile = copyFile2; + fs16.copyFile = copyFile2; function copyFile2(src, dest, flags, cb) { if (typeof flags === "function") { cb = flags; @@ -106483,34 +106483,34 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$readdir = fs19.readdir; - fs19.readdir = readdir; + var fs$readdir = fs16.readdir; + fs16.readdir = readdir; var noReaddirOptionVersions = /^v[0-5]\./; - function readdir(path17, options, cb) { + function readdir(path14, options, cb) { if (typeof options === "function") cb = options, options = null; - var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path18, options2, cb2, startTime) { - return fs$readdir(path18, fs$readdirCallback( - path18, + var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path15, options2, cb2, startTime) { + return fs$readdir(path15, fs$readdirCallback( + path15, options2, cb2, startTime )); - } : function go$readdir2(path18, options2, cb2, startTime) { - return fs$readdir(path18, options2, fs$readdirCallback( - path18, + } : function go$readdir2(path15, options2, cb2, startTime) { + return fs$readdir(path15, options2, fs$readdirCallback( + path15, options2, cb2, startTime )); }; - return go$readdir(path17, options, cb); - function fs$readdirCallback(path18, options2, cb2, startTime) { + return go$readdir(path14, options, cb); + function fs$readdirCallback(path15, options2, cb2, startTime) { return function(err, files) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) enqueue([ go$readdir, - [path18, options2, cb2], + [path15, options2, cb2], err, startTime || Date.now(), Date.now() @@ -106525,21 +106525,21 @@ var require_graceful_fs = __commonJS({ } } if (process.version.substr(0, 4) === "v0.8") { - var legStreams = legacy(fs19); + var legStreams = legacy(fs16); ReadStream = legStreams.ReadStream; WriteStream = legStreams.WriteStream; } - var fs$ReadStream = fs19.ReadStream; + var fs$ReadStream = fs16.ReadStream; if (fs$ReadStream) { ReadStream.prototype = Object.create(fs$ReadStream.prototype); ReadStream.prototype.open = ReadStream$open; } - var fs$WriteStream = fs19.WriteStream; + var fs$WriteStream = fs16.WriteStream; if (fs$WriteStream) { WriteStream.prototype = Object.create(fs$WriteStream.prototype); WriteStream.prototype.open = WriteStream$open; } - Object.defineProperty(fs19, "ReadStream", { + Object.defineProperty(fs16, "ReadStream", { get: function() { return ReadStream; }, @@ -106549,7 +106549,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(fs19, "WriteStream", { + Object.defineProperty(fs16, "WriteStream", { get: function() { return WriteStream; }, @@ -106560,7 +106560,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileReadStream = ReadStream; - Object.defineProperty(fs19, "FileReadStream", { + Object.defineProperty(fs16, "FileReadStream", { get: function() { return FileReadStream; }, @@ -106571,7 +106571,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileWriteStream = WriteStream; - Object.defineProperty(fs19, "FileWriteStream", { + Object.defineProperty(fs16, "FileWriteStream", { get: function() { return FileWriteStream; }, @@ -106581,7 +106581,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - function ReadStream(path17, options) { + function ReadStream(path14, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this; else @@ -106601,7 +106601,7 @@ var require_graceful_fs = __commonJS({ } }); } - function WriteStream(path17, options) { + function WriteStream(path14, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this; else @@ -106619,22 +106619,22 @@ var require_graceful_fs = __commonJS({ } }); } - function createReadStream2(path17, options) { - return new fs19.ReadStream(path17, options); + function createReadStream2(path14, options) { + return new fs16.ReadStream(path14, options); } - function createWriteStream3(path17, options) { - return new fs19.WriteStream(path17, options); + function createWriteStream3(path14, options) { + return new fs16.WriteStream(path14, options); } - var fs$open = fs19.open; - fs19.open = open; - function open(path17, flags, mode, cb) { + var fs$open = fs16.open; + fs16.open = open; + function open(path14, flags, mode, cb) { if (typeof mode === "function") cb = mode, mode = null; - return go$open(path17, flags, mode, cb); - function go$open(path18, flags2, mode2, cb2, startTime) { - return fs$open(path18, flags2, mode2, function(err, fd) { + return go$open(path14, flags, mode, cb); + function go$open(path15, flags2, mode2, cb2, startTime) { + return fs$open(path15, flags2, mode2, function(err, fd) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$open, [path18, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$open, [path15, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106642,20 +106642,20 @@ var require_graceful_fs = __commonJS({ }); } } - return fs19; + return fs16; } function enqueue(elem) { debug5("ENQUEUE", elem[0].name, elem[1]); - fs18[gracefulQueue].push(elem); + fs15[gracefulQueue].push(elem); retry2(); } var retryTimer; function resetQueue() { var now = Date.now(); - for (var i = 0; i < fs18[gracefulQueue].length; ++i) { - if (fs18[gracefulQueue][i].length > 2) { - fs18[gracefulQueue][i][3] = now; - fs18[gracefulQueue][i][4] = now; + for (var i = 0; i < fs15[gracefulQueue].length; ++i) { + if (fs15[gracefulQueue][i].length > 2) { + fs15[gracefulQueue][i][3] = now; + fs15[gracefulQueue][i][4] = now; } } retry2(); @@ -106663,9 +106663,9 @@ var require_graceful_fs = __commonJS({ function retry2() { clearTimeout(retryTimer); retryTimer = void 0; - if (fs18[gracefulQueue].length === 0) + if (fs15[gracefulQueue].length === 0) return; - var elem = fs18[gracefulQueue].shift(); + var elem = fs15[gracefulQueue].shift(); var fn = elem[0]; var args = elem[1]; var err = elem[2]; @@ -106687,7 +106687,7 @@ var require_graceful_fs = __commonJS({ debug5("RETRY", fn.name, args); fn.apply(null, args.concat([startTime])); } else { - fs18[gracefulQueue].push(elem); + fs15[gracefulQueue].push(elem); } } if (retryTimer === void 0) { @@ -106701,11 +106701,11 @@ var require_graceful_fs = __commonJS({ var require_is_stream = __commonJS({ "node_modules/archiver-utils/node_modules/is-stream/index.js"(exports2, module2) { "use strict"; - var isStream = (stream2) => stream2 !== null && typeof stream2 === "object" && typeof stream2.pipe === "function"; - isStream.writable = (stream2) => isStream(stream2) && stream2.writable !== false && typeof stream2._write === "function" && typeof stream2._writableState === "object"; - isStream.readable = (stream2) => isStream(stream2) && stream2.readable !== false && typeof stream2._read === "function" && typeof stream2._readableState === "object"; - isStream.duplex = (stream2) => isStream.writable(stream2) && isStream.readable(stream2); - isStream.transform = (stream2) => isStream.duplex(stream2) && typeof stream2._transform === "function"; + var isStream = (stream) => stream !== null && typeof stream === "object" && typeof stream.pipe === "function"; + isStream.writable = (stream) => isStream(stream) && stream.writable !== false && typeof stream._write === "function" && typeof stream._writableState === "object"; + isStream.readable = (stream) => isStream(stream) && stream.readable !== false && typeof stream._read === "function" && typeof stream._readableState === "object"; + isStream.duplex = (stream) => isStream.writable(stream) && isStream.readable(stream); + isStream.transform = (stream) => isStream.duplex(stream) && typeof stream._transform === "function"; module2.exports = isStream; } }); @@ -106987,7 +106987,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join15(s) { + BufferList.prototype.join = function join13(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -107131,10 +107131,10 @@ var require_stream_writable = __commonJS({ util.inherits(Writable, Stream); function nop() { } - function WritableState(options, stream2) { + function WritableState(options, stream) { Duplex = Duplex || require_stream_duplex(); options = options || {}; - var isDuplex = stream2 instanceof Duplex; + var isDuplex = stream instanceof Duplex; this.objectMode = !!options.objectMode; if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; var hwm = options.highWaterMark; @@ -107159,7 +107159,7 @@ var require_stream_writable = __commonJS({ this.sync = true; this.bufferProcessing = false; this.onwrite = function(er) { - onwrite(stream2, er); + onwrite(stream, er); }; this.writecb = null; this.writelen = 0; @@ -107223,12 +107223,12 @@ var require_stream_writable = __commonJS({ Writable.prototype.pipe = function() { this.emit("error", new Error("Cannot pipe, not readable")); }; - function writeAfterEnd(stream2, cb) { + function writeAfterEnd(stream, cb) { var er = new Error("write after end"); - stream2.emit("error", er); + stream.emit("error", er); pna.nextTick(cb, er); } - function validChunk(stream2, state, chunk, cb) { + function validChunk(stream, state, chunk, cb) { var valid3 = true; var er = false; if (chunk === null) { @@ -107237,7 +107237,7 @@ var require_stream_writable = __commonJS({ er = new TypeError("Invalid non-string/buffer chunk"); } if (er) { - stream2.emit("error", er); + stream.emit("error", er); pna.nextTick(cb, er); valid3 = false; } @@ -107296,7 +107296,7 @@ var require_stream_writable = __commonJS({ return this._writableState.highWaterMark; } }); - function writeOrBuffer(stream2, state, isBuf, chunk, encoding, cb) { + function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { if (!isBuf) { var newChunk = decodeChunk(state, chunk, encoding); if (chunk !== newChunk) { @@ -107325,31 +107325,31 @@ var require_stream_writable = __commonJS({ } state.bufferedRequestCount += 1; } else { - doWrite(stream2, state, false, len, chunk, encoding, cb); + doWrite(stream, state, false, len, chunk, encoding, cb); } return ret; } - function doWrite(stream2, state, writev, len, chunk, encoding, cb) { + function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writelen = len; state.writecb = cb; state.writing = true; state.sync = true; - if (writev) stream2._writev(chunk, state.onwrite); - else stream2._write(chunk, encoding, state.onwrite); + if (writev) stream._writev(chunk, state.onwrite); + else stream._write(chunk, encoding, state.onwrite); state.sync = false; } - function onwriteError(stream2, state, sync, er, cb) { + function onwriteError(stream, state, sync, er, cb) { --state.pendingcb; if (sync) { pna.nextTick(cb, er); - pna.nextTick(finishMaybe, stream2, state); - stream2._writableState.errorEmitted = true; - stream2.emit("error", er); + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit("error", er); } else { cb(er); - stream2._writableState.errorEmitted = true; - stream2.emit("error", er); - finishMaybe(stream2, state); + stream._writableState.errorEmitted = true; + stream.emit("error", er); + finishMaybe(stream, state); } } function onwriteStateUpdate(state) { @@ -107358,40 +107358,40 @@ var require_stream_writable = __commonJS({ state.length -= state.writelen; state.writelen = 0; } - function onwrite(stream2, er) { - var state = stream2._writableState; + function onwrite(stream, er) { + var state = stream._writableState; var sync = state.sync; var cb = state.writecb; onwriteStateUpdate(state); - if (er) onwriteError(stream2, state, sync, er, cb); + if (er) onwriteError(stream, state, sync, er, cb); else { var finished = needFinish(state); if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream2, state); + clearBuffer(stream, state); } if (sync) { - asyncWrite(afterWrite, stream2, state, finished, cb); + asyncWrite(afterWrite, stream, state, finished, cb); } else { - afterWrite(stream2, state, finished, cb); + afterWrite(stream, state, finished, cb); } } } - function afterWrite(stream2, state, finished, cb) { - if (!finished) onwriteDrain(stream2, state); + function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); state.pendingcb--; cb(); - finishMaybe(stream2, state); + finishMaybe(stream, state); } - function onwriteDrain(stream2, state) { + function onwriteDrain(stream, state) { if (state.length === 0 && state.needDrain) { state.needDrain = false; - stream2.emit("drain"); + stream.emit("drain"); } } - function clearBuffer(stream2, state) { + function clearBuffer(stream, state) { state.bufferProcessing = true; var entry = state.bufferedRequest; - if (stream2._writev && entry && entry.next) { + if (stream._writev && entry && entry.next) { var l = state.bufferedRequestCount; var buffer = new Array(l); var holder = state.corkedRequestsFree; @@ -107405,7 +107405,7 @@ var require_stream_writable = __commonJS({ count += 1; } buffer.allBuffers = allBuffers; - doWrite(stream2, state, true, state.length, buffer, "", holder.finish); + doWrite(stream, state, true, state.length, buffer, "", holder.finish); state.pendingcb++; state.lastBufferedRequest = null; if (holder.next) { @@ -107421,7 +107421,7 @@ var require_stream_writable = __commonJS({ var encoding = entry.encoding; var cb = entry.callback; var len = state.objectMode ? 1 : chunk.length; - doWrite(stream2, state, false, len, chunk, encoding, cb); + doWrite(stream, state, false, len, chunk, encoding, cb); entry = entry.next; state.bufferedRequestCount--; if (state.writing) { @@ -107457,49 +107457,49 @@ var require_stream_writable = __commonJS({ function needFinish(state) { return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } - function callFinal(stream2, state) { - stream2._final(function(err) { + function callFinal(stream, state) { + stream._final(function(err) { state.pendingcb--; if (err) { - stream2.emit("error", err); + stream.emit("error", err); } state.prefinished = true; - stream2.emit("prefinish"); - finishMaybe(stream2, state); + stream.emit("prefinish"); + finishMaybe(stream, state); }); } - function prefinish(stream2, state) { + function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { - if (typeof stream2._final === "function") { + if (typeof stream._final === "function") { state.pendingcb++; state.finalCalled = true; - pna.nextTick(callFinal, stream2, state); + pna.nextTick(callFinal, stream, state); } else { state.prefinished = true; - stream2.emit("prefinish"); + stream.emit("prefinish"); } } } - function finishMaybe(stream2, state) { + function finishMaybe(stream, state) { var need = needFinish(state); if (need) { - prefinish(stream2, state); + prefinish(stream, state); if (state.pendingcb === 0) { state.finished = true; - stream2.emit("finish"); + stream.emit("finish"); } } return need; } - function endWritable(stream2, state, cb) { + function endWritable(stream, state, cb) { state.ending = true; - finishMaybe(stream2, state); + finishMaybe(stream, state); if (cb) { if (state.finished) pna.nextTick(cb); - else stream2.once("finish", cb); + else stream.once("finish", cb); } state.ended = true; - stream2.writable = false; + stream.writable = false; } function onCorkedFinish(corkReq, state, err) { var entry = corkReq.entry; @@ -107550,9 +107550,9 @@ var require_stream_duplex = __commonJS({ module2.exports = Duplex; var util = Object.create(require_util20()); util.inherits = require_inherits(); - var Readable2 = require_stream_readable(); + var Readable = require_stream_readable(); var Writable = require_stream_writable(); - util.inherits(Duplex, Readable2); + util.inherits(Duplex, Readable); { keys = objectKeys(Writable.prototype); for (v = 0; v < keys.length; v++) { @@ -107565,7 +107565,7 @@ var require_stream_duplex = __commonJS({ var v; function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); - Readable2.call(this, options); + Readable.call(this, options); Writable.call(this, options); if (options && options.readable === false) this.readable = false; if (options && options.writable === false) this.writable = false; @@ -107855,10 +107855,10 @@ var require_stream_readable = __commonJS({ "node_modules/lazystream/node_modules/readable-stream/lib/_stream_readable.js"(exports2, module2) { "use strict"; var pna = require_process_nextick_args(); - module2.exports = Readable2; + module2.exports = Readable; var isArray = require_isarray(); var Duplex; - Readable2.ReadableState = ReadableState; + Readable.ReadableState = ReadableState; var EE = require("events").EventEmitter; var EElistenerCount = function(emitter, type2) { return emitter.listeners(type2).length; @@ -107886,7 +107886,7 @@ var require_stream_readable = __commonJS({ var BufferList = require_BufferList(); var destroyImpl = require_destroy(); var StringDecoder; - util.inherits(Readable2, Stream); + util.inherits(Readable, Stream); var kProxyEvents = ["error", "close", "destroy", "pause", "resume"]; function prependListener(emitter, event, fn) { if (typeof emitter.prependListener === "function") return emitter.prependListener(event, fn); @@ -107894,10 +107894,10 @@ var require_stream_readable = __commonJS({ else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn); else emitter._events[event] = [fn, emitter._events[event]]; } - function ReadableState(options, stream2) { + function ReadableState(options, stream) { Duplex = Duplex || require_stream_duplex(); options = options || {}; - var isDuplex = stream2 instanceof Duplex; + var isDuplex = stream instanceof Duplex; this.objectMode = !!options.objectMode; if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; var hwm = options.highWaterMark; @@ -107932,9 +107932,9 @@ var require_stream_readable = __commonJS({ this.encoding = options.encoding; } } - function Readable2(options) { + function Readable(options) { Duplex = Duplex || require_stream_duplex(); - if (!(this instanceof Readable2)) return new Readable2(options); + if (!(this instanceof Readable)) return new Readable(options); this._readableState = new ReadableState(options, this); this.readable = true; if (options) { @@ -107943,7 +107943,7 @@ var require_stream_readable = __commonJS({ } Stream.call(this); } - Object.defineProperty(Readable2.prototype, "destroyed", { + Object.defineProperty(Readable.prototype, "destroyed", { get: function() { if (this._readableState === void 0) { return false; @@ -107957,13 +107957,13 @@ var require_stream_readable = __commonJS({ this._readableState.destroyed = value; } }); - Readable2.prototype.destroy = destroyImpl.destroy; - Readable2.prototype._undestroy = destroyImpl.undestroy; - Readable2.prototype._destroy = function(err, cb) { + Readable.prototype.destroy = destroyImpl.destroy; + Readable.prototype._undestroy = destroyImpl.undestroy; + Readable.prototype._destroy = function(err, cb) { this.push(null); cb(err); }; - Readable2.prototype.push = function(chunk, encoding) { + Readable.prototype.push = function(chunk, encoding) { var state = this._readableState; var skipChunkCheck; if (!state.objectMode) { @@ -107980,36 +107980,36 @@ var require_stream_readable = __commonJS({ } return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); }; - Readable2.prototype.unshift = function(chunk) { + Readable.prototype.unshift = function(chunk) { return readableAddChunk(this, chunk, null, true, false); }; - function readableAddChunk(stream2, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream2._readableState; + function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; if (chunk === null) { state.reading = false; - onEofChunk(stream2, state); + onEofChunk(stream, state); } else { var er; if (!skipChunkCheck) er = chunkInvalid(state, chunk); if (er) { - stream2.emit("error", er); + stream.emit("error", er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== "string" && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer2.prototype) { chunk = _uint8ArrayToBuffer(chunk); } if (addToFront) { - if (state.endEmitted) stream2.emit("error", new Error("stream.unshift() after end event")); - else addChunk(stream2, state, chunk, true); + if (state.endEmitted) stream.emit("error", new Error("stream.unshift() after end event")); + else addChunk(stream, state, chunk, true); } else if (state.ended) { - stream2.emit("error", new Error("stream.push() after EOF")); + stream.emit("error", new Error("stream.push() after EOF")); } else { state.reading = false; if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream2, state, chunk, false); - else maybeReadMore(stream2, state); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false); + else maybeReadMore(stream, state); } else { - addChunk(stream2, state, chunk, false); + addChunk(stream, state, chunk, false); } } } else if (!addToFront) { @@ -108018,17 +108018,17 @@ var require_stream_readable = __commonJS({ } return needMoreData(state); } - function addChunk(stream2, state, chunk, addToFront) { + function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { - stream2.emit("data", chunk); - stream2.read(0); + stream.emit("data", chunk); + stream.read(0); } else { state.length += state.objectMode ? 1 : chunk.length; if (addToFront) state.buffer.unshift(chunk); else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream2); + if (state.needReadable) emitReadable(stream); } - maybeReadMore(stream2, state); + maybeReadMore(stream, state); } function chunkInvalid(state, chunk) { var er; @@ -108040,10 +108040,10 @@ var require_stream_readable = __commonJS({ function needMoreData(state) { return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); } - Readable2.prototype.isPaused = function() { + Readable.prototype.isPaused = function() { return this._readableState.flowing === false; }; - Readable2.prototype.setEncoding = function(enc) { + Readable.prototype.setEncoding = function(enc) { if (!StringDecoder) StringDecoder = require_string_decoder().StringDecoder; this._readableState.decoder = new StringDecoder(enc); this._readableState.encoding = enc; @@ -108079,7 +108079,7 @@ var require_stream_readable = __commonJS({ } return state.length; } - Readable2.prototype.read = function(n) { + Readable.prototype.read = function(n) { debug5("read", n); n = parseInt(n, 10); var state = this._readableState; @@ -108130,7 +108130,7 @@ var require_stream_readable = __commonJS({ if (ret !== null) this.emit("data", ret); return ret; }; - function onEofChunk(stream2, state) { + function onEofChunk(stream, state) { if (state.ended) return; if (state.decoder) { var chunk = state.decoder.end(); @@ -108140,44 +108140,44 @@ var require_stream_readable = __commonJS({ } } state.ended = true; - emitReadable(stream2); + emitReadable(stream); } - function emitReadable(stream2) { - var state = stream2._readableState; + function emitReadable(stream) { + var state = stream._readableState; state.needReadable = false; if (!state.emittedReadable) { debug5("emitReadable", state.flowing); state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream2); - else emitReadable_(stream2); + if (state.sync) pna.nextTick(emitReadable_, stream); + else emitReadable_(stream); } } - function emitReadable_(stream2) { + function emitReadable_(stream) { debug5("emit readable"); - stream2.emit("readable"); - flow(stream2); + stream.emit("readable"); + flow(stream); } - function maybeReadMore(stream2, state) { + function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; - pna.nextTick(maybeReadMore_, stream2, state); + pna.nextTick(maybeReadMore_, stream, state); } } - function maybeReadMore_(stream2, state) { + function maybeReadMore_(stream, state) { var len = state.length; while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { debug5("maybeReadMore read 0"); - stream2.read(0); + stream.read(0); if (len === state.length) break; else len = state.length; } state.readingMore = false; } - Readable2.prototype._read = function(n) { + Readable.prototype._read = function(n) { this.emit("error", new Error("_read() is not implemented")); }; - Readable2.prototype.pipe = function(dest, pipeOpts) { + Readable.prototype.pipe = function(dest, pipeOpts) { var src = this; var state = this._readableState; switch (state.pipesCount) { @@ -108282,7 +108282,7 @@ var require_stream_readable = __commonJS({ } }; } - Readable2.prototype.unpipe = function(dest) { + Readable.prototype.unpipe = function(dest) { var state = this._readableState; var unpipeInfo = { hasUnpiped: false }; if (state.pipesCount === 0) return this; @@ -108314,7 +108314,7 @@ var require_stream_readable = __commonJS({ dest.emit("unpipe", this, unpipeInfo); return this; }; - Readable2.prototype.on = function(ev, fn) { + Readable.prototype.on = function(ev, fn) { var res = Stream.prototype.on.call(this, ev, fn); if (ev === "data") { if (this._readableState.flowing !== false) this.resume(); @@ -108332,12 +108332,12 @@ var require_stream_readable = __commonJS({ } return res; }; - Readable2.prototype.addListener = Readable2.prototype.on; + Readable.prototype.addListener = Readable.prototype.on; function nReadingNextTick(self2) { debug5("readable nexttick read 0"); self2.read(0); } - Readable2.prototype.resume = function() { + Readable.prototype.resume = function() { var state = this._readableState; if (!state.flowing) { debug5("resume"); @@ -108346,24 +108346,24 @@ var require_stream_readable = __commonJS({ } return this; }; - function resume(stream2, state) { + function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; - pna.nextTick(resume_, stream2, state); + pna.nextTick(resume_, stream, state); } } - function resume_(stream2, state) { + function resume_(stream, state) { if (!state.reading) { debug5("resume read 0"); - stream2.read(0); + stream.read(0); } state.resumeScheduled = false; state.awaitDrain = 0; - stream2.emit("resume"); - flow(stream2); - if (state.flowing && !state.reading) stream2.read(0); + stream.emit("resume"); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); } - Readable2.prototype.pause = function() { + Readable.prototype.pause = function() { debug5("call pause flowing=%j", this._readableState.flowing); if (false !== this._readableState.flowing) { debug5("pause"); @@ -108372,17 +108372,17 @@ var require_stream_readable = __commonJS({ } return this; }; - function flow(stream2) { - var state = stream2._readableState; + function flow(stream) { + var state = stream._readableState; debug5("flow", state.flowing); - while (state.flowing && stream2.read() !== null) { + while (state.flowing && stream.read() !== null) { } } - Readable2.prototype.wrap = function(stream2) { + Readable.prototype.wrap = function(stream) { var _this = this; var state = this._readableState; var paused = false; - stream2.on("end", function() { + stream.on("end", function() { debug5("wrapped end"); if (state.decoder && !state.ended) { var chunk = state.decoder.end(); @@ -108390,7 +108390,7 @@ var require_stream_readable = __commonJS({ } _this.push(null); }); - stream2.on("data", function(chunk) { + stream.on("data", function(chunk) { debug5("wrapped data"); if (state.decoder) chunk = state.decoder.write(chunk); if (state.objectMode && (chunk === null || chunk === void 0)) return; @@ -108398,31 +108398,31 @@ var require_stream_readable = __commonJS({ var ret = _this.push(chunk); if (!ret) { paused = true; - stream2.pause(); + stream.pause(); } }); - for (var i in stream2) { - if (this[i] === void 0 && typeof stream2[i] === "function") { + for (var i in stream) { + if (this[i] === void 0 && typeof stream[i] === "function") { this[i] = /* @__PURE__ */ (function(method) { return function() { - return stream2[method].apply(stream2, arguments); + return stream[method].apply(stream, arguments); }; })(i); } } for (var n = 0; n < kProxyEvents.length; n++) { - stream2.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); } this._read = function(n2) { debug5("wrapped _read", n2); if (paused) { paused = false; - stream2.resume(); + stream.resume(); } }; return this; }; - Object.defineProperty(Readable2.prototype, "readableHighWaterMark", { + Object.defineProperty(Readable.prototype, "readableHighWaterMark", { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail @@ -108431,7 +108431,7 @@ var require_stream_readable = __commonJS({ return this._readableState.highWaterMark; } }); - Readable2._fromList = fromList; + Readable._fromList = fromList; function fromList(n, state) { if (state.length === 0) return null; var ret; @@ -108512,19 +108512,19 @@ var require_stream_readable = __commonJS({ list.length -= c; return ret; } - function endReadable(stream2) { - var state = stream2._readableState; + function endReadable(stream) { + var state = stream._readableState; if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); if (!state.endEmitted) { state.ended = true; - pna.nextTick(endReadableNT, state, stream2); + pna.nextTick(endReadableNT, state, stream); } } - function endReadableNT(state, stream2) { + function endReadableNT(state, stream) { if (!state.endEmitted && state.length === 0) { state.endEmitted = true; - stream2.readable = false; - stream2.emit("end"); + stream.readable = false; + stream.emit("end"); } } function indexOf(xs, x) { @@ -108625,13 +108625,13 @@ var require_stream_transform = __commonJS({ _this2.emit("close"); }); }; - function done(stream2, er, data) { - if (er) return stream2.emit("error", er); + function done(stream, er, data) { + if (er) return stream.emit("error", er); if (data != null) - stream2.push(data); - if (stream2._writableState.length) throw new Error("Calling transform done when ws.length != 0"); - if (stream2._transformState.transforming) throw new Error("Calling transform done when still transforming"); - return stream2.push(null); + stream.push(data); + if (stream._writableState.length) throw new Error("Calling transform done when ws.length != 0"); + if (stream._transformState.transforming) throw new Error("Calling transform done when still transforming"); + return stream.push(null); } } }); @@ -108693,10 +108693,10 @@ var require_lazystream = __commonJS({ var util = require("util"); var PassThrough = require_passthrough(); module2.exports = { - Readable: Readable2, + Readable, Writable }; - util.inherits(Readable2, PassThrough); + util.inherits(Readable, PassThrough); util.inherits(Writable, PassThrough); function beforeFirstCall(instance, method, callback) { instance[method] = function() { @@ -108705,9 +108705,9 @@ var require_lazystream = __commonJS({ return this[method].apply(this, arguments); }; } - function Readable2(fn, options) { - if (!(this instanceof Readable2)) - return new Readable2(fn, options); + function Readable(fn, options) { + if (!(this instanceof Readable)) + return new Readable(fn, options); PassThrough.call(this, options); beforeFirstCall(this, "_read", function() { var source = fn.call(this, options); @@ -108735,22 +108735,22 @@ var require_lazystream = __commonJS({ // node_modules/normalize-path/index.js var require_normalize_path = __commonJS({ "node_modules/normalize-path/index.js"(exports2, module2) { - module2.exports = function(path17, stripTrailing) { - if (typeof path17 !== "string") { + module2.exports = function(path14, stripTrailing) { + if (typeof path14 !== "string") { throw new TypeError("expected path to be a string"); } - if (path17 === "\\" || path17 === "/") return "/"; - var len = path17.length; - if (len <= 1) return path17; + if (path14 === "\\" || path14 === "/") return "/"; + var len = path14.length; + if (len <= 1) return path14; var prefix = ""; - if (len > 4 && path17[3] === "\\") { - var ch = path17[2]; - if ((ch === "?" || ch === ".") && path17.slice(0, 2) === "\\\\") { - path17 = path17.slice(2); + if (len > 4 && path14[3] === "\\") { + var ch = path14[2]; + if ((ch === "?" || ch === ".") && path14.slice(0, 2) === "\\\\") { + path14 = path14.slice(2); prefix = "//"; } } - var segs = path17.split(/[/\\]+/); + var segs = path14.split(/[/\\]+/); if (stripTrailing !== false && segs[segs.length - 1] === "") { segs.pop(); } @@ -111085,132 +111085,132 @@ var require_utils8 = __commonJS({ if (isAsync === false) return typeof obj[SymbolIterator] === "function"; return typeof obj[SymbolAsyncIterator] === "function" || typeof obj[SymbolIterator] === "function"; } - function isDestroyed(stream2) { - if (!isNodeStream(stream2)) return null; - const wState = stream2._writableState; - const rState = stream2._readableState; + function isDestroyed(stream) { + if (!isNodeStream(stream)) return null; + const wState = stream._writableState; + const rState = stream._readableState; const state = wState || rState; - return !!(stream2.destroyed || stream2[kIsDestroyed] || state !== null && state !== void 0 && state.destroyed); + return !!(stream.destroyed || stream[kIsDestroyed] || state !== null && state !== void 0 && state.destroyed); } - function isWritableEnded(stream2) { - if (!isWritableNodeStream(stream2)) return null; - if (stream2.writableEnded === true) return true; - const wState = stream2._writableState; + function isWritableEnded(stream) { + if (!isWritableNodeStream(stream)) return null; + if (stream.writableEnded === true) return true; + const wState = stream._writableState; if (wState !== null && wState !== void 0 && wState.errored) return false; if (typeof (wState === null || wState === void 0 ? void 0 : wState.ended) !== "boolean") return null; return wState.ended; } - function isWritableFinished(stream2, strict) { - if (!isWritableNodeStream(stream2)) return null; - if (stream2.writableFinished === true) return true; - const wState = stream2._writableState; + function isWritableFinished(stream, strict) { + if (!isWritableNodeStream(stream)) return null; + if (stream.writableFinished === true) return true; + const wState = stream._writableState; if (wState !== null && wState !== void 0 && wState.errored) return false; if (typeof (wState === null || wState === void 0 ? void 0 : wState.finished) !== "boolean") return null; return !!(wState.finished || strict === false && wState.ended === true && wState.length === 0); } - function isReadableEnded(stream2) { - if (!isReadableNodeStream(stream2)) return null; - if (stream2.readableEnded === true) return true; - const rState = stream2._readableState; + function isReadableEnded(stream) { + if (!isReadableNodeStream(stream)) return null; + if (stream.readableEnded === true) return true; + const rState = stream._readableState; if (!rState || rState.errored) return false; if (typeof (rState === null || rState === void 0 ? void 0 : rState.ended) !== "boolean") return null; return rState.ended; } - function isReadableFinished(stream2, strict) { - if (!isReadableNodeStream(stream2)) return null; - const rState = stream2._readableState; + function isReadableFinished(stream, strict) { + if (!isReadableNodeStream(stream)) return null; + const rState = stream._readableState; if (rState !== null && rState !== void 0 && rState.errored) return false; if (typeof (rState === null || rState === void 0 ? void 0 : rState.endEmitted) !== "boolean") return null; return !!(rState.endEmitted || strict === false && rState.ended === true && rState.length === 0); } - function isReadable(stream2) { - if (stream2 && stream2[kIsReadable] != null) return stream2[kIsReadable]; - if (typeof (stream2 === null || stream2 === void 0 ? void 0 : stream2.readable) !== "boolean") return null; - if (isDestroyed(stream2)) return false; - return isReadableNodeStream(stream2) && stream2.readable && !isReadableFinished(stream2); + function isReadable(stream) { + if (stream && stream[kIsReadable] != null) return stream[kIsReadable]; + if (typeof (stream === null || stream === void 0 ? void 0 : stream.readable) !== "boolean") return null; + if (isDestroyed(stream)) return false; + return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream); } - function isWritable(stream2) { - if (stream2 && stream2[kIsWritable] != null) return stream2[kIsWritable]; - if (typeof (stream2 === null || stream2 === void 0 ? void 0 : stream2.writable) !== "boolean") return null; - if (isDestroyed(stream2)) return false; - return isWritableNodeStream(stream2) && stream2.writable && !isWritableEnded(stream2); + function isWritable(stream) { + if (stream && stream[kIsWritable] != null) return stream[kIsWritable]; + if (typeof (stream === null || stream === void 0 ? void 0 : stream.writable) !== "boolean") return null; + if (isDestroyed(stream)) return false; + return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream); } - function isFinished(stream2, opts) { - if (!isNodeStream(stream2)) { + function isFinished(stream, opts) { + if (!isNodeStream(stream)) { return null; } - if (isDestroyed(stream2)) { + if (isDestroyed(stream)) { return true; } - if ((opts === null || opts === void 0 ? void 0 : opts.readable) !== false && isReadable(stream2)) { + if ((opts === null || opts === void 0 ? void 0 : opts.readable) !== false && isReadable(stream)) { return false; } - if ((opts === null || opts === void 0 ? void 0 : opts.writable) !== false && isWritable(stream2)) { + if ((opts === null || opts === void 0 ? void 0 : opts.writable) !== false && isWritable(stream)) { return false; } return true; } - function isWritableErrored(stream2) { + function isWritableErrored(stream) { var _stream$_writableStat, _stream$_writableStat2; - if (!isNodeStream(stream2)) { + if (!isNodeStream(stream)) { return null; } - if (stream2.writableErrored) { - return stream2.writableErrored; + if (stream.writableErrored) { + return stream.writableErrored; } - return (_stream$_writableStat = (_stream$_writableStat2 = stream2._writableState) === null || _stream$_writableStat2 === void 0 ? void 0 : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== void 0 ? _stream$_writableStat : null; + return (_stream$_writableStat = (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === void 0 ? void 0 : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== void 0 ? _stream$_writableStat : null; } - function isReadableErrored(stream2) { + function isReadableErrored(stream) { var _stream$_readableStat, _stream$_readableStat2; - if (!isNodeStream(stream2)) { + if (!isNodeStream(stream)) { return null; } - if (stream2.readableErrored) { - return stream2.readableErrored; + if (stream.readableErrored) { + return stream.readableErrored; } - return (_stream$_readableStat = (_stream$_readableStat2 = stream2._readableState) === null || _stream$_readableStat2 === void 0 ? void 0 : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== void 0 ? _stream$_readableStat : null; + return (_stream$_readableStat = (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === void 0 ? void 0 : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== void 0 ? _stream$_readableStat : null; } - function isClosed(stream2) { - if (!isNodeStream(stream2)) { + function isClosed(stream) { + if (!isNodeStream(stream)) { return null; } - if (typeof stream2.closed === "boolean") { - return stream2.closed; + if (typeof stream.closed === "boolean") { + return stream.closed; } - const wState = stream2._writableState; - const rState = stream2._readableState; + const wState = stream._writableState; + const rState = stream._readableState; if (typeof (wState === null || wState === void 0 ? void 0 : wState.closed) === "boolean" || typeof (rState === null || rState === void 0 ? void 0 : rState.closed) === "boolean") { return (wState === null || wState === void 0 ? void 0 : wState.closed) || (rState === null || rState === void 0 ? void 0 : rState.closed); } - if (typeof stream2._closed === "boolean" && isOutgoingMessage(stream2)) { - return stream2._closed; + if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) { + return stream._closed; } return null; } - function isOutgoingMessage(stream2) { - return typeof stream2._closed === "boolean" && typeof stream2._defaultKeepAlive === "boolean" && typeof stream2._removedConnection === "boolean" && typeof stream2._removedContLen === "boolean"; + function isOutgoingMessage(stream) { + return typeof stream._closed === "boolean" && typeof stream._defaultKeepAlive === "boolean" && typeof stream._removedConnection === "boolean" && typeof stream._removedContLen === "boolean"; } - function isServerResponse(stream2) { - return typeof stream2._sent100 === "boolean" && isOutgoingMessage(stream2); + function isServerResponse(stream) { + return typeof stream._sent100 === "boolean" && isOutgoingMessage(stream); } - function isServerRequest(stream2) { + function isServerRequest(stream) { var _stream$req; - return typeof stream2._consuming === "boolean" && typeof stream2._dumped === "boolean" && ((_stream$req = stream2.req) === null || _stream$req === void 0 ? void 0 : _stream$req.upgradeOrConnect) === void 0; + return typeof stream._consuming === "boolean" && typeof stream._dumped === "boolean" && ((_stream$req = stream.req) === null || _stream$req === void 0 ? void 0 : _stream$req.upgradeOrConnect) === void 0; } - function willEmitClose(stream2) { - if (!isNodeStream(stream2)) return null; - const wState = stream2._writableState; - const rState = stream2._readableState; + function willEmitClose(stream) { + if (!isNodeStream(stream)) return null; + const wState = stream._writableState; + const rState = stream._readableState; const state = wState || rState; - return !state && isServerResponse(stream2) || !!(state && state.autoDestroy && state.emitClose && state.closed === false); + return !state && isServerResponse(stream) || !!(state && state.autoDestroy && state.emitClose && state.closed === false); } - function isDisturbed(stream2) { + function isDisturbed(stream) { var _stream$kIsDisturbed; - return !!(stream2 && ((_stream$kIsDisturbed = stream2[kIsDisturbed]) !== null && _stream$kIsDisturbed !== void 0 ? _stream$kIsDisturbed : stream2.readableDidRead || stream2.readableAborted)); + return !!(stream && ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== void 0 ? _stream$kIsDisturbed : stream.readableDidRead || stream.readableAborted)); } - function isErrored(stream2) { + function isErrored(stream) { var _ref, _ref2, _ref3, _ref4, _ref5, _stream$kIsErrored, _stream$_readableStat3, _stream$_writableStat3, _stream$_readableStat4, _stream$_writableStat4; - return !!(stream2 && ((_ref = (_ref2 = (_ref3 = (_ref4 = (_ref5 = (_stream$kIsErrored = stream2[kIsErrored]) !== null && _stream$kIsErrored !== void 0 ? _stream$kIsErrored : stream2.readableErrored) !== null && _ref5 !== void 0 ? _ref5 : stream2.writableErrored) !== null && _ref4 !== void 0 ? _ref4 : (_stream$_readableStat3 = stream2._readableState) === null || _stream$_readableStat3 === void 0 ? void 0 : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== void 0 ? _ref3 : (_stream$_writableStat3 = stream2._writableState) === null || _stream$_writableStat3 === void 0 ? void 0 : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== void 0 ? _ref2 : (_stream$_readableStat4 = stream2._readableState) === null || _stream$_readableStat4 === void 0 ? void 0 : _stream$_readableStat4.errored) !== null && _ref !== void 0 ? _ref : (_stream$_writableStat4 = stream2._writableState) === null || _stream$_writableStat4 === void 0 ? void 0 : _stream$_writableStat4.errored)); + return !!(stream && ((_ref = (_ref2 = (_ref3 = (_ref4 = (_ref5 = (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== void 0 ? _stream$kIsErrored : stream.readableErrored) !== null && _ref5 !== void 0 ? _ref5 : stream.writableErrored) !== null && _ref4 !== void 0 ? _ref4 : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === void 0 ? void 0 : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== void 0 ? _ref3 : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === void 0 ? void 0 : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== void 0 ? _ref2 : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === void 0 ? void 0 : _stream$_readableStat4.errored) !== null && _ref !== void 0 ? _ref : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === void 0 ? void 0 : _stream$_writableStat4.errored)); } module2.exports = { isDestroyed, @@ -111275,12 +111275,12 @@ var require_end_of_stream = __commonJS({ kIsClosedPromise } = require_utils8(); var addAbortListener; - function isRequest(stream2) { - return stream2.setHeader && typeof stream2.abort === "function"; + function isRequest(stream) { + return stream.setHeader && typeof stream.abort === "function"; } var nop = () => { }; - function eos(stream2, options, callback) { + function eos(stream, options, callback) { var _options$readable, _options$writable; if (arguments.length === 2) { callback = options; @@ -111293,133 +111293,133 @@ var require_end_of_stream = __commonJS({ validateFunction(callback, "callback"); validateAbortSignal(options.signal, "options.signal"); callback = once(callback); - if (isReadableStream(stream2) || isWritableStream(stream2)) { - return eosWeb(stream2, options, callback); + if (isReadableStream(stream) || isWritableStream(stream)) { + return eosWeb(stream, options, callback); } - if (!isNodeStream(stream2)) { - throw new ERR_INVALID_ARG_TYPE2("stream", ["ReadableStream", "WritableStream", "Stream"], stream2); + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE2("stream", ["ReadableStream", "WritableStream", "Stream"], stream); } - const readable = (_options$readable = options.readable) !== null && _options$readable !== void 0 ? _options$readable : isReadableNodeStream(stream2); - const writable = (_options$writable = options.writable) !== null && _options$writable !== void 0 ? _options$writable : isWritableNodeStream(stream2); - const wState = stream2._writableState; - const rState = stream2._readableState; + const readable = (_options$readable = options.readable) !== null && _options$readable !== void 0 ? _options$readable : isReadableNodeStream(stream); + const writable = (_options$writable = options.writable) !== null && _options$writable !== void 0 ? _options$writable : isWritableNodeStream(stream); + const wState = stream._writableState; + const rState = stream._readableState; const onlegacyfinish = () => { - if (!stream2.writable) { + if (!stream.writable) { onfinish(); } }; - let willEmitClose = _willEmitClose(stream2) && isReadableNodeStream(stream2) === readable && isWritableNodeStream(stream2) === writable; - let writableFinished = isWritableFinished(stream2, false); + let willEmitClose = _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable; + let writableFinished = isWritableFinished(stream, false); const onfinish = () => { writableFinished = true; - if (stream2.destroyed) { + if (stream.destroyed) { willEmitClose = false; } - if (willEmitClose && (!stream2.readable || readable)) { + if (willEmitClose && (!stream.readable || readable)) { return; } if (!readable || readableFinished) { - callback.call(stream2); + callback.call(stream); } }; - let readableFinished = isReadableFinished(stream2, false); + let readableFinished = isReadableFinished(stream, false); const onend = () => { readableFinished = true; - if (stream2.destroyed) { + if (stream.destroyed) { willEmitClose = false; } - if (willEmitClose && (!stream2.writable || writable)) { + if (willEmitClose && (!stream.writable || writable)) { return; } if (!writable || writableFinished) { - callback.call(stream2); + callback.call(stream); } }; const onerror = (err) => { - callback.call(stream2, err); + callback.call(stream, err); }; - let closed = isClosed(stream2); + let closed = isClosed(stream); const onclose = () => { closed = true; - const errored = isWritableErrored(stream2) || isReadableErrored(stream2); + const errored = isWritableErrored(stream) || isReadableErrored(stream); if (errored && typeof errored !== "boolean") { - return callback.call(stream2, errored); + return callback.call(stream, errored); } - if (readable && !readableFinished && isReadableNodeStream(stream2, true)) { - if (!isReadableFinished(stream2, false)) return callback.call(stream2, new ERR_STREAM_PREMATURE_CLOSE()); + if (readable && !readableFinished && isReadableNodeStream(stream, true)) { + if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); } if (writable && !writableFinished) { - if (!isWritableFinished(stream2, false)) return callback.call(stream2, new ERR_STREAM_PREMATURE_CLOSE()); + if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); } - callback.call(stream2); + callback.call(stream); }; const onclosed = () => { closed = true; - const errored = isWritableErrored(stream2) || isReadableErrored(stream2); + const errored = isWritableErrored(stream) || isReadableErrored(stream); if (errored && typeof errored !== "boolean") { - return callback.call(stream2, errored); + return callback.call(stream, errored); } - callback.call(stream2); + callback.call(stream); }; const onrequest = () => { - stream2.req.on("finish", onfinish); + stream.req.on("finish", onfinish); }; - if (isRequest(stream2)) { - stream2.on("complete", onfinish); + if (isRequest(stream)) { + stream.on("complete", onfinish); if (!willEmitClose) { - stream2.on("abort", onclose); + stream.on("abort", onclose); } - if (stream2.req) { + if (stream.req) { onrequest(); } else { - stream2.on("request", onrequest); + stream.on("request", onrequest); } } else if (writable && !wState) { - stream2.on("end", onlegacyfinish); - stream2.on("close", onlegacyfinish); + stream.on("end", onlegacyfinish); + stream.on("close", onlegacyfinish); } - if (!willEmitClose && typeof stream2.aborted === "boolean") { - stream2.on("aborted", onclose); + if (!willEmitClose && typeof stream.aborted === "boolean") { + stream.on("aborted", onclose); } - stream2.on("end", onend); - stream2.on("finish", onfinish); + stream.on("end", onend); + stream.on("finish", onfinish); if (options.error !== false) { - stream2.on("error", onerror); + stream.on("error", onerror); } - stream2.on("close", onclose); + stream.on("close", onclose); if (closed) { process2.nextTick(onclose); } else if (wState !== null && wState !== void 0 && wState.errorEmitted || rState !== null && rState !== void 0 && rState.errorEmitted) { if (!willEmitClose) { process2.nextTick(onclosed); } - } else if (!readable && (!willEmitClose || isReadable(stream2)) && (writableFinished || isWritable(stream2) === false)) { + } else if (!readable && (!willEmitClose || isReadable(stream)) && (writableFinished || isWritable(stream) === false)) { process2.nextTick(onclosed); - } else if (!writable && (!willEmitClose || isWritable(stream2)) && (readableFinished || isReadable(stream2) === false)) { + } else if (!writable && (!willEmitClose || isWritable(stream)) && (readableFinished || isReadable(stream) === false)) { process2.nextTick(onclosed); - } else if (rState && stream2.req && stream2.aborted) { + } else if (rState && stream.req && stream.aborted) { process2.nextTick(onclosed); } const cleanup = () => { callback = nop; - stream2.removeListener("aborted", onclose); - stream2.removeListener("complete", onfinish); - stream2.removeListener("abort", onclose); - stream2.removeListener("request", onrequest); - if (stream2.req) stream2.req.removeListener("finish", onfinish); - stream2.removeListener("end", onlegacyfinish); - stream2.removeListener("close", onlegacyfinish); - stream2.removeListener("finish", onfinish); - stream2.removeListener("end", onend); - stream2.removeListener("error", onerror); - stream2.removeListener("close", onclose); + stream.removeListener("aborted", onclose); + stream.removeListener("complete", onfinish); + stream.removeListener("abort", onclose); + stream.removeListener("request", onrequest); + if (stream.req) stream.req.removeListener("finish", onfinish); + stream.removeListener("end", onlegacyfinish); + stream.removeListener("close", onlegacyfinish); + stream.removeListener("finish", onfinish); + stream.removeListener("end", onend); + stream.removeListener("error", onerror); + stream.removeListener("close", onclose); }; if (options.signal && !closed) { const abort = () => { const endCallback = callback; cleanup(); endCallback.call( - stream2, + stream, new AbortError(void 0, { cause: options.signal.reason }) @@ -111433,20 +111433,20 @@ var require_end_of_stream = __commonJS({ const originalCallback = callback; callback = once((...args) => { disposable[SymbolDispose](); - originalCallback.apply(stream2, args); + originalCallback.apply(stream, args); }); } } return cleanup; } - function eosWeb(stream2, options, callback) { + function eosWeb(stream, options, callback) { let isAborted = false; let abort = nop; if (options.signal) { abort = () => { isAborted = true; callback.call( - stream2, + stream, new AbortError(void 0, { cause: options.signal.reason }) @@ -111460,19 +111460,19 @@ var require_end_of_stream = __commonJS({ const originalCallback = callback; callback = once((...args) => { disposable[SymbolDispose](); - originalCallback.apply(stream2, args); + originalCallback.apply(stream, args); }); } } const resolverFn = (...args) => { if (!isAborted) { - process2.nextTick(() => callback.apply(stream2, args)); + process2.nextTick(() => callback.apply(stream, args)); } }; - PromisePrototypeThen(stream2[kIsClosedPromise].promise, resolverFn, resolverFn); + PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn); return nop; } - function finished(stream2, opts) { + function finished(stream, opts) { var _opts; let autoCleanup = false; if (opts === null) { @@ -111483,7 +111483,7 @@ var require_end_of_stream = __commonJS({ autoCleanup = opts.cleanup; } return new Promise2((resolve8, reject) => { - const cleanup = eos(stream2, opts, (err) => { + const cleanup = eos(stream, opts, (err) => { if (autoCleanup) { cleanup(); } @@ -111641,14 +111641,14 @@ var require_destroy2 = __commonJS({ w.finished = w.writable === false; } } - function errorOrDestroy(stream2, err, sync) { - const r = stream2._readableState; - const w = stream2._writableState; + function errorOrDestroy(stream, err, sync) { + const r = stream._readableState; + const w = stream._writableState; if (w !== null && w !== void 0 && w.destroyed || r !== null && r !== void 0 && r.destroyed) { return this; } if (r !== null && r !== void 0 && r.autoDestroy || w !== null && w !== void 0 && w.autoDestroy) - stream2.destroy(err); + stream.destroy(err); else if (err) { err.stack; if (w && !w.errored) { @@ -111658,40 +111658,40 @@ var require_destroy2 = __commonJS({ r.errored = err; } if (sync) { - process2.nextTick(emitErrorNT, stream2, err); + process2.nextTick(emitErrorNT, stream, err); } else { - emitErrorNT(stream2, err); + emitErrorNT(stream, err); } } } - function construct(stream2, cb) { - if (typeof stream2._construct !== "function") { + function construct(stream, cb) { + if (typeof stream._construct !== "function") { return; } - const r = stream2._readableState; - const w = stream2._writableState; + const r = stream._readableState; + const w = stream._writableState; if (r) { r.constructed = false; } if (w) { w.constructed = false; } - stream2.once(kConstruct, cb); - if (stream2.listenerCount(kConstruct) > 1) { + stream.once(kConstruct, cb); + if (stream.listenerCount(kConstruct) > 1) { return; } - process2.nextTick(constructNT, stream2); + process2.nextTick(constructNT, stream); } - function constructNT(stream2) { + function constructNT(stream) { let called = false; function onConstruct(err) { if (called) { - errorOrDestroy(stream2, err !== null && err !== void 0 ? err : new ERR_MULTIPLE_CALLBACK()); + errorOrDestroy(stream, err !== null && err !== void 0 ? err : new ERR_MULTIPLE_CALLBACK()); return; } called = true; - const r = stream2._readableState; - const w = stream2._writableState; + const r = stream._readableState; + const w = stream._writableState; const s = w || r; if (r) { r.constructed = true; @@ -111700,59 +111700,59 @@ var require_destroy2 = __commonJS({ w.constructed = true; } if (s.destroyed) { - stream2.emit(kDestroy, err); + stream.emit(kDestroy, err); } else if (err) { - errorOrDestroy(stream2, err, true); + errorOrDestroy(stream, err, true); } else { - process2.nextTick(emitConstructNT, stream2); + process2.nextTick(emitConstructNT, stream); } } try { - stream2._construct((err) => { + stream._construct((err) => { process2.nextTick(onConstruct, err); }); } catch (err) { process2.nextTick(onConstruct, err); } } - function emitConstructNT(stream2) { - stream2.emit(kConstruct); + function emitConstructNT(stream) { + stream.emit(kConstruct); } - function isRequest(stream2) { - return (stream2 === null || stream2 === void 0 ? void 0 : stream2.setHeader) && typeof stream2.abort === "function"; + function isRequest(stream) { + return (stream === null || stream === void 0 ? void 0 : stream.setHeader) && typeof stream.abort === "function"; } - function emitCloseLegacy(stream2) { - stream2.emit("close"); + function emitCloseLegacy(stream) { + stream.emit("close"); } - function emitErrorCloseLegacy(stream2, err) { - stream2.emit("error", err); - process2.nextTick(emitCloseLegacy, stream2); + function emitErrorCloseLegacy(stream, err) { + stream.emit("error", err); + process2.nextTick(emitCloseLegacy, stream); } - function destroyer(stream2, err) { - if (!stream2 || isDestroyed(stream2)) { + function destroyer(stream, err) { + if (!stream || isDestroyed(stream)) { return; } - if (!err && !isFinished(stream2)) { + if (!err && !isFinished(stream)) { err = new AbortError(); } - if (isServerRequest(stream2)) { - stream2.socket = null; - stream2.destroy(err); - } else if (isRequest(stream2)) { - stream2.abort(); - } else if (isRequest(stream2.req)) { - stream2.req.abort(); - } else if (typeof stream2.destroy === "function") { - stream2.destroy(err); - } else if (typeof stream2.close === "function") { - stream2.close(); + if (isServerRequest(stream)) { + stream.socket = null; + stream.destroy(err); + } else if (isRequest(stream)) { + stream.abort(); + } else if (isRequest(stream.req)) { + stream.req.abort(); + } else if (typeof stream.destroy === "function") { + stream.destroy(err); + } else if (typeof stream.close === "function") { + stream.close(); } else if (err) { - process2.nextTick(emitErrorCloseLegacy, stream2, err); + process2.nextTick(emitErrorCloseLegacy, stream, err); } else { - process2.nextTick(emitCloseLegacy, stream2); + process2.nextTick(emitCloseLegacy, stream); } - if (!stream2.destroyed) { - stream2[kIsDestroyed] = true; + if (!stream.destroyed) { + stream[kIsDestroyed] = true; } } module2.exports = { @@ -111858,25 +111858,25 @@ var require_add_abort_signal = __commonJS({ throw new ERR_INVALID_ARG_TYPE2(name, "AbortSignal", signal); } }; - module2.exports.addAbortSignal = function addAbortSignal(signal, stream2) { + module2.exports.addAbortSignal = function addAbortSignal(signal, stream) { validateAbortSignal(signal, "signal"); - if (!isNodeStream(stream2) && !isWebStream(stream2)) { - throw new ERR_INVALID_ARG_TYPE2("stream", ["ReadableStream", "WritableStream", "Stream"], stream2); + if (!isNodeStream(stream) && !isWebStream(stream)) { + throw new ERR_INVALID_ARG_TYPE2("stream", ["ReadableStream", "WritableStream", "Stream"], stream); } - return module2.exports.addAbortSignalNoValidate(signal, stream2); + return module2.exports.addAbortSignalNoValidate(signal, stream); }; - module2.exports.addAbortSignalNoValidate = function(signal, stream2) { + module2.exports.addAbortSignalNoValidate = function(signal, stream) { if (typeof signal !== "object" || !("aborted" in signal)) { - return stream2; + return stream; } - const onAbort = isNodeStream(stream2) ? () => { - stream2.destroy( + const onAbort = isNodeStream(stream) ? () => { + stream.destroy( new AbortError(void 0, { cause: signal.reason }) ); } : () => { - stream2[kControllerErrorFunction]( + stream[kControllerErrorFunction]( new AbortError(void 0, { cause: signal.reason }) @@ -111887,9 +111887,9 @@ var require_add_abort_signal = __commonJS({ } else { addAbortListener = addAbortListener || require_util21().addAbortListener; const disposable = addAbortListener(signal, onAbort); - eos(stream2, disposable[SymbolDispose]); + eos(stream, disposable[SymbolDispose]); } - return stream2; + return stream; }; } }); @@ -112099,10 +112099,10 @@ var require_from = __commonJS({ var { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require_primordials(); var { Buffer: Buffer2 } = require("buffer"); var { ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_STREAM_NULL_VALUES } = require_errors5().codes; - function from(Readable2, iterable, opts) { + function from(Readable, iterable, opts) { let iterator2; if (typeof iterable === "string" || iterable instanceof Buffer2) { - return new Readable2({ + return new Readable({ objectMode: true, ...opts, read() { @@ -112121,7 +112121,7 @@ var require_from = __commonJS({ } else { throw new ERR_INVALID_ARG_TYPE2("iterable", ["Iterable"], iterable); } - const readable = new Readable2({ + const readable = new Readable({ objectMode: true, highWaterMark: 1, // TODO(ronag): What options should be allowed? @@ -112204,8 +112204,8 @@ var require_readable4 = __commonJS({ SymbolAsyncIterator, Symbol: Symbol2 } = require_primordials(); - module2.exports = Readable2; - Readable2.ReadableState = ReadableState; + module2.exports = Readable; + Readable.ReadableState = ReadableState; var { EventEmitter: EE } = require("events"); var { Stream, prependListener } = require_legacy(); var { Buffer: Buffer2 } = require("buffer"); @@ -112232,8 +112232,8 @@ var require_readable4 = __commonJS({ var kPaused = Symbol2("kPaused"); var { StringDecoder } = require("string_decoder"); var from = require_from(); - ObjectSetPrototypeOf(Readable2.prototype, Stream.prototype); - ObjectSetPrototypeOf(Readable2, Stream); + ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); + ObjectSetPrototypeOf(Readable, Stream); var nop = () => { }; var { errorOrDestroy } = destroyImpl; @@ -112305,8 +112305,8 @@ var require_readable4 = __commonJS({ readingMore: makeBitMapDescriptor(kReadingMore), dataEmitted: makeBitMapDescriptor(kDataEmitted) }); - function ReadableState(options, stream2, isDuplex) { - if (typeof isDuplex !== "boolean") isDuplex = stream2 instanceof require_duplex(); + function ReadableState(options, stream, isDuplex) { + if (typeof isDuplex !== "boolean") isDuplex = stream instanceof require_duplex(); this.state = kEmitClose | kAutoDestroy | kConstructed | kSync; if (options && options.objectMode) this.state |= kObjectMode; if (isDuplex && options && options.readableObjectMode) this.state |= kObjectMode; @@ -112328,8 +112328,8 @@ var require_readable4 = __commonJS({ this.encoding = options.encoding; } } - function Readable2(options) { - if (!(this instanceof Readable2)) return new Readable2(options); + function Readable(options) { + if (!(this instanceof Readable)) return new Readable(options); const isDuplex = this instanceof require_duplex(); this._readableState = new ReadableState(options, this, isDuplex); if (options) { @@ -112345,15 +112345,15 @@ var require_readable4 = __commonJS({ } }); } - Readable2.prototype.destroy = destroyImpl.destroy; - Readable2.prototype._undestroy = destroyImpl.undestroy; - Readable2.prototype._destroy = function(err, cb) { + Readable.prototype.destroy = destroyImpl.destroy; + Readable.prototype._undestroy = destroyImpl.undestroy; + Readable.prototype._destroy = function(err, cb) { cb(err); }; - Readable2.prototype[EE.captureRejectionSymbol] = function(err) { + Readable.prototype[EE.captureRejectionSymbol] = function(err) { this.destroy(err); }; - Readable2.prototype[SymbolAsyncDispose] = function() { + Readable.prototype[SymbolAsyncDispose] = function() { let error3; if (!this.destroyed) { error3 = this.readableEnded ? null : new AbortError(); @@ -112361,15 +112361,15 @@ var require_readable4 = __commonJS({ } return new Promise2((resolve8, reject) => eos(this, (err) => err && err !== error3 ? reject(err) : resolve8(null))); }; - Readable2.prototype.push = function(chunk, encoding) { + Readable.prototype.push = function(chunk, encoding) { return readableAddChunk(this, chunk, encoding, false); }; - Readable2.prototype.unshift = function(chunk, encoding) { + Readable.prototype.unshift = function(chunk, encoding) { return readableAddChunk(this, chunk, encoding, true); }; - function readableAddChunk(stream2, chunk, encoding, addToFront) { + function readableAddChunk(stream, chunk, encoding, addToFront) { debug5("readableAddChunk", chunk); - const state = stream2._readableState; + const state = stream._readableState; let err; if ((state.state & kObjectMode) === 0) { if (typeof chunk === "string") { @@ -112392,57 +112392,57 @@ var require_readable4 = __commonJS({ } } if (err) { - errorOrDestroy(stream2, err); + errorOrDestroy(stream, err); } else if (chunk === null) { state.state &= ~kReading; - onEofChunk(stream2, state); + onEofChunk(stream, state); } else if ((state.state & kObjectMode) !== 0 || chunk && chunk.length > 0) { if (addToFront) { - if ((state.state & kEndEmitted) !== 0) errorOrDestroy(stream2, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); + if ((state.state & kEndEmitted) !== 0) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); else if (state.destroyed || state.errored) return false; - else addChunk(stream2, state, chunk, true); + else addChunk(stream, state, chunk, true); } else if (state.ended) { - errorOrDestroy(stream2, new ERR_STREAM_PUSH_AFTER_EOF()); + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); } else if (state.destroyed || state.errored) { return false; } else { state.state &= ~kReading; if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream2, state, chunk, false); - else maybeReadMore(stream2, state); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false); + else maybeReadMore(stream, state); } else { - addChunk(stream2, state, chunk, false); + addChunk(stream, state, chunk, false); } } } else if (!addToFront) { state.state &= ~kReading; - maybeReadMore(stream2, state); + maybeReadMore(stream, state); } return !state.ended && (state.length < state.highWaterMark || state.length === 0); } - function addChunk(stream2, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync && stream2.listenerCount("data") > 0) { + function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount("data") > 0) { if ((state.state & kMultiAwaitDrain) !== 0) { state.awaitDrainWriters.clear(); } else { state.awaitDrainWriters = null; } state.dataEmitted = true; - stream2.emit("data", chunk); + stream.emit("data", chunk); } else { state.length += state.objectMode ? 1 : chunk.length; if (addToFront) state.buffer.unshift(chunk); else state.buffer.push(chunk); - if ((state.state & kNeedReadable) !== 0) emitReadable(stream2); + if ((state.state & kNeedReadable) !== 0) emitReadable(stream); } - maybeReadMore(stream2, state); + maybeReadMore(stream, state); } - Readable2.prototype.isPaused = function() { + Readable.prototype.isPaused = function() { const state = this._readableState; return state[kPaused] === true || state.flowing === false; }; - Readable2.prototype.setEncoding = function(enc) { + Readable.prototype.setEncoding = function(enc) { const decoder = new StringDecoder(enc); this._readableState.decoder = decoder; this._readableState.encoding = this._readableState.decoder.encoding; @@ -112481,7 +112481,7 @@ var require_readable4 = __commonJS({ if (n <= state.length) return n; return state.ended ? state.length : 0; } - Readable2.prototype.read = function(n) { + Readable.prototype.read = function(n) { debug5("read", n); if (n === void 0) { n = NaN; @@ -112548,7 +112548,7 @@ var require_readable4 = __commonJS({ } return ret; }; - function onEofChunk(stream2, state) { + function onEofChunk(stream, state) { debug5("onEofChunk"); if (state.ended) return; if (state.decoder) { @@ -112560,53 +112560,53 @@ var require_readable4 = __commonJS({ } state.ended = true; if (state.sync) { - emitReadable(stream2); + emitReadable(stream); } else { state.needReadable = false; state.emittedReadable = true; - emitReadable_(stream2); + emitReadable_(stream); } } - function emitReadable(stream2) { - const state = stream2._readableState; + function emitReadable(stream) { + const state = stream._readableState; debug5("emitReadable", state.needReadable, state.emittedReadable); state.needReadable = false; if (!state.emittedReadable) { debug5("emitReadable", state.flowing); state.emittedReadable = true; - process2.nextTick(emitReadable_, stream2); + process2.nextTick(emitReadable_, stream); } } - function emitReadable_(stream2) { - const state = stream2._readableState; + function emitReadable_(stream) { + const state = stream._readableState; debug5("emitReadable_", state.destroyed, state.length, state.ended); if (!state.destroyed && !state.errored && (state.length || state.ended)) { - stream2.emit("readable"); + stream.emit("readable"); state.emittedReadable = false; } state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream2); + flow(stream); } - function maybeReadMore(stream2, state) { + function maybeReadMore(stream, state) { if (!state.readingMore && state.constructed) { state.readingMore = true; - process2.nextTick(maybeReadMore_, stream2, state); + process2.nextTick(maybeReadMore_, stream, state); } } - function maybeReadMore_(stream2, state) { + function maybeReadMore_(stream, state) { while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { const len = state.length; debug5("maybeReadMore read 0"); - stream2.read(0); + stream.read(0); if (len === state.length) break; } state.readingMore = false; } - Readable2.prototype._read = function(n) { + Readable.prototype._read = function(n) { throw new ERR_METHOD_NOT_IMPLEMENTED("_read()"); }; - Readable2.prototype.pipe = function(dest, pipeOpts) { + Readable.prototype.pipe = function(dest, pipeOpts) { const src = this; const state = this._readableState; if (state.pipes.length === 1) { @@ -112731,7 +112731,7 @@ var require_readable4 = __commonJS({ } }; } - Readable2.prototype.unpipe = function(dest) { + Readable.prototype.unpipe = function(dest) { const state = this._readableState; const unpipeInfo = { hasUnpiped: false @@ -112754,7 +112754,7 @@ var require_readable4 = __commonJS({ dest.emit("unpipe", this, unpipeInfo); return this; }; - Readable2.prototype.on = function(ev, fn) { + Readable.prototype.on = function(ev, fn) { const res = Stream.prototype.on.call(this, ev, fn); const state = this._readableState; if (ev === "data") { @@ -112775,16 +112775,16 @@ var require_readable4 = __commonJS({ } return res; }; - Readable2.prototype.addListener = Readable2.prototype.on; - Readable2.prototype.removeListener = function(ev, fn) { + Readable.prototype.addListener = Readable.prototype.on; + Readable.prototype.removeListener = function(ev, fn) { const res = Stream.prototype.removeListener.call(this, ev, fn); if (ev === "readable") { process2.nextTick(updateReadableListening, this); } return res; }; - Readable2.prototype.off = Readable2.prototype.removeListener; - Readable2.prototype.removeAllListeners = function(ev) { + Readable.prototype.off = Readable.prototype.removeListener; + Readable.prototype.removeAllListeners = function(ev) { const res = Stream.prototype.removeAllListeners.apply(this, arguments); if (ev === "readable" || ev === void 0) { process2.nextTick(updateReadableListening, this); @@ -112806,7 +112806,7 @@ var require_readable4 = __commonJS({ debug5("readable nexttick read 0"); self2.read(0); } - Readable2.prototype.resume = function() { + Readable.prototype.resume = function() { const state = this._readableState; if (!state.flowing) { debug5("resume"); @@ -112816,23 +112816,23 @@ var require_readable4 = __commonJS({ state[kPaused] = false; return this; }; - function resume(stream2, state) { + function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; - process2.nextTick(resume_, stream2, state); + process2.nextTick(resume_, stream, state); } } - function resume_(stream2, state) { + function resume_(stream, state) { debug5("resume", state.reading); if (!state.reading) { - stream2.read(0); + stream.read(0); } state.resumeScheduled = false; - stream2.emit("resume"); - flow(stream2); - if (state.flowing && !state.reading) stream2.read(0); + stream.emit("resume"); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); } - Readable2.prototype.pause = function() { + Readable.prototype.pause = function() { debug5("call pause flowing=%j", this._readableState.flowing); if (this._readableState.flowing !== false) { debug5("pause"); @@ -112842,79 +112842,79 @@ var require_readable4 = __commonJS({ this._readableState[kPaused] = true; return this; }; - function flow(stream2) { - const state = stream2._readableState; + function flow(stream) { + const state = stream._readableState; debug5("flow", state.flowing); - while (state.flowing && stream2.read() !== null) ; + while (state.flowing && stream.read() !== null) ; } - Readable2.prototype.wrap = function(stream2) { + Readable.prototype.wrap = function(stream) { let paused = false; - stream2.on("data", (chunk) => { - if (!this.push(chunk) && stream2.pause) { + stream.on("data", (chunk) => { + if (!this.push(chunk) && stream.pause) { paused = true; - stream2.pause(); + stream.pause(); } }); - stream2.on("end", () => { + stream.on("end", () => { this.push(null); }); - stream2.on("error", (err) => { + stream.on("error", (err) => { errorOrDestroy(this, err); }); - stream2.on("close", () => { + stream.on("close", () => { this.destroy(); }); - stream2.on("destroy", () => { + stream.on("destroy", () => { this.destroy(); }); this._read = () => { - if (paused && stream2.resume) { + if (paused && stream.resume) { paused = false; - stream2.resume(); + stream.resume(); } }; - const streamKeys = ObjectKeys(stream2); + const streamKeys = ObjectKeys(stream); for (let j = 1; j < streamKeys.length; j++) { const i = streamKeys[j]; - if (this[i] === void 0 && typeof stream2[i] === "function") { - this[i] = stream2[i].bind(stream2); + if (this[i] === void 0 && typeof stream[i] === "function") { + this[i] = stream[i].bind(stream); } } return this; }; - Readable2.prototype[SymbolAsyncIterator] = function() { + Readable.prototype[SymbolAsyncIterator] = function() { return streamToAsyncIterator(this); }; - Readable2.prototype.iterator = function(options) { + Readable.prototype.iterator = function(options) { if (options !== void 0) { validateObject(options, "options"); } return streamToAsyncIterator(this, options); }; - function streamToAsyncIterator(stream2, options) { - if (typeof stream2.read !== "function") { - stream2 = Readable2.wrap(stream2, { + function streamToAsyncIterator(stream, options) { + if (typeof stream.read !== "function") { + stream = Readable.wrap(stream, { objectMode: true }); } - const iter = createAsyncIterator(stream2, options); - iter.stream = stream2; + const iter = createAsyncIterator(stream, options); + iter.stream = stream; return iter; } - async function* createAsyncIterator(stream2, options) { + async function* createAsyncIterator(stream, options) { let callback = nop; function next(resolve8) { - if (this === stream2) { + if (this === stream) { callback(); callback = nop; } else { callback = resolve8; } } - stream2.on("readable", next); + stream.on("readable", next); let error3; const cleanup = eos( - stream2, + stream, { writable: false }, @@ -112926,7 +112926,7 @@ var require_readable4 = __commonJS({ ); try { while (true) { - const chunk = stream2.destroyed ? null : stream2.read(); + const chunk = stream.destroyed ? null : stream.read(); if (chunk !== null) { yield chunk; } else if (error3) { @@ -112941,15 +112941,15 @@ var require_readable4 = __commonJS({ error3 = aggregateTwoErrors(error3, err); throw error3; } finally { - if ((error3 || (options === null || options === void 0 ? void 0 : options.destroyOnReturn) !== false) && (error3 === void 0 || stream2._readableState.autoDestroy)) { - destroyImpl.destroyer(stream2, null); + if ((error3 || (options === null || options === void 0 ? void 0 : options.destroyOnReturn) !== false) && (error3 === void 0 || stream._readableState.autoDestroy)) { + destroyImpl.destroyer(stream, null); } else { - stream2.off("readable", next); + stream.off("readable", next); cleanup(); } } } - ObjectDefineProperties(Readable2.prototype, { + ObjectDefineProperties(Readable.prototype, { readable: { __proto__: null, get() { @@ -113076,7 +113076,7 @@ var require_readable4 = __commonJS({ } } }); - Readable2._fromList = fromList; + Readable._fromList = fromList; function fromList(n, state) { if (state.length === 0) return null; let ret; @@ -113091,55 +113091,55 @@ var require_readable4 = __commonJS({ } return ret; } - function endReadable(stream2) { - const state = stream2._readableState; + function endReadable(stream) { + const state = stream._readableState; debug5("endReadable", state.endEmitted); if (!state.endEmitted) { state.ended = true; - process2.nextTick(endReadableNT, state, stream2); + process2.nextTick(endReadableNT, state, stream); } } - function endReadableNT(state, stream2) { + function endReadableNT(state, stream) { debug5("endReadableNT", state.endEmitted, state.length); if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) { state.endEmitted = true; - stream2.emit("end"); - if (stream2.writable && stream2.allowHalfOpen === false) { - process2.nextTick(endWritableNT, stream2); + stream.emit("end"); + if (stream.writable && stream.allowHalfOpen === false) { + process2.nextTick(endWritableNT, stream); } else if (state.autoDestroy) { - const wState = stream2._writableState; + const wState = stream._writableState; const autoDestroy = !wState || wState.autoDestroy && // We don't expect the writable to ever 'finish' // if writable is explicitly set to false. (wState.finished || wState.writable === false); if (autoDestroy) { - stream2.destroy(); + stream.destroy(); } } } } - function endWritableNT(stream2) { - const writable = stream2.writable && !stream2.writableEnded && !stream2.destroyed; + function endWritableNT(stream) { + const writable = stream.writable && !stream.writableEnded && !stream.destroyed; if (writable) { - stream2.end(); + stream.end(); } } - Readable2.from = function(iterable, opts) { - return from(Readable2, iterable, opts); + Readable.from = function(iterable, opts) { + return from(Readable, iterable, opts); }; var webStreamsAdapters; function lazyWebStreams() { if (webStreamsAdapters === void 0) webStreamsAdapters = {}; return webStreamsAdapters; } - Readable2.fromWeb = function(readableStream, options) { + Readable.fromWeb = function(readableStream, options) { return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options); }; - Readable2.toWeb = function(streamReadable, options) { + Readable.toWeb = function(streamReadable, options) { return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options); }; - Readable2.wrap = function(src, options) { + Readable.wrap = function(src, options) { var _ref, _src$readableObjectMo; - return new Readable2({ + return new Readable({ objectMode: (_ref = (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== void 0 ? _src$readableObjectMo : src.objectMode) !== null && _ref !== void 0 ? _ref : true, ...options, destroy(err, callback) { @@ -113191,8 +113191,8 @@ var require_writable = __commonJS({ function nop() { } var kOnFinished = Symbol2("kOnFinished"); - function WritableState(options, stream2, isDuplex) { - if (typeof isDuplex !== "boolean") isDuplex = stream2 instanceof require_duplex(); + function WritableState(options, stream, isDuplex) { + if (typeof isDuplex !== "boolean") isDuplex = stream instanceof require_duplex(); this.objectMode = !!(options && options.objectMode); if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode); this.highWaterMark = options ? getHighWaterMark(this, options, "writableHighWaterMark", isDuplex) : getDefaultHighWaterMark(false); @@ -113210,7 +113210,7 @@ var require_writable = __commonJS({ this.corked = 0; this.sync = true; this.bufferProcessing = false; - this.onwrite = onwrite.bind(void 0, stream2); + this.onwrite = onwrite.bind(void 0, stream); this.writecb = null; this.writelen = 0; this.afterWriteTickInfo = null; @@ -113273,8 +113273,8 @@ var require_writable = __commonJS({ Writable.prototype.pipe = function() { errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; - function _write(stream2, chunk, encoding, cb) { - const state = stream2._writableState; + function _write(stream, chunk, encoding, cb) { + const state = stream._writableState; if (typeof encoding === "function") { cb = encoding; encoding = state.defaultEncoding; @@ -113308,11 +113308,11 @@ var require_writable = __commonJS({ } if (err) { process2.nextTick(cb, err); - errorOrDestroy(stream2, err, true); + errorOrDestroy(stream, err, true); return err; } state.pendingcb++; - return writeOrBuffer(stream2, state, chunk, encoding, cb); + return writeOrBuffer(stream, state, chunk, encoding, cb); } Writable.prototype.write = function(chunk, encoding, cb) { return _write(this, chunk, encoding, cb) === true; @@ -113333,7 +113333,7 @@ var require_writable = __commonJS({ this._writableState.defaultEncoding = encoding; return this; }; - function writeOrBuffer(stream2, state, chunk, encoding, callback) { + function writeOrBuffer(stream, state, chunk, encoding, callback) { const len = state.objectMode ? 1 : chunk.length; state.length += len; const ret = state.length < state.highWaterMark; @@ -113355,33 +113355,33 @@ var require_writable = __commonJS({ state.writecb = callback; state.writing = true; state.sync = true; - stream2._write(chunk, encoding, state.onwrite); + stream._write(chunk, encoding, state.onwrite); state.sync = false; } return ret && !state.errored && !state.destroyed; } - function doWrite(stream2, state, writev, len, chunk, encoding, cb) { + function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writelen = len; state.writecb = cb; state.writing = true; state.sync = true; if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED("write")); - else if (writev) stream2._writev(chunk, state.onwrite); - else stream2._write(chunk, encoding, state.onwrite); + else if (writev) stream._writev(chunk, state.onwrite); + else stream._write(chunk, encoding, state.onwrite); state.sync = false; } - function onwriteError(stream2, state, er, cb) { + function onwriteError(stream, state, er, cb) { --state.pendingcb; cb(er); errorBuffer(state); - errorOrDestroy(stream2, er); + errorOrDestroy(stream, er); } - function onwrite(stream2, er) { - const state = stream2._writableState; + function onwrite(stream, er) { + const state = stream._writableState; const sync = state.sync; const cb = state.writecb; if (typeof cb !== "function") { - errorOrDestroy(stream2, new ERR_MULTIPLE_CALLBACK()); + errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()); return; } state.writing = false; @@ -113393,17 +113393,17 @@ var require_writable = __commonJS({ if (!state.errored) { state.errored = er; } - if (stream2._readableState && !stream2._readableState.errored) { - stream2._readableState.errored = er; + if (stream._readableState && !stream._readableState.errored) { + stream._readableState.errored = er; } if (sync) { - process2.nextTick(onwriteError, stream2, state, er, cb); + process2.nextTick(onwriteError, stream, state, er, cb); } else { - onwriteError(stream2, state, er, cb); + onwriteError(stream, state, er, cb); } } else { if (state.buffered.length > state.bufferedIndex) { - clearBuffer(stream2, state); + clearBuffer(stream, state); } if (sync) { if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) { @@ -113412,25 +113412,25 @@ var require_writable = __commonJS({ state.afterWriteTickInfo = { count: 1, cb, - stream: stream2, + stream, state }; process2.nextTick(afterWriteTick, state.afterWriteTickInfo); } } else { - afterWrite(stream2, state, 1, cb); + afterWrite(stream, state, 1, cb); } } } - function afterWriteTick({ stream: stream2, state, count, cb }) { + function afterWriteTick({ stream, state, count, cb }) { state.afterWriteTickInfo = null; - return afterWrite(stream2, state, count, cb); + return afterWrite(stream, state, count, cb); } - function afterWrite(stream2, state, count, cb) { - const needDrain = !state.ending && !stream2.destroyed && state.length === 0 && state.needDrain; + function afterWrite(stream, state, count, cb) { + const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain; if (needDrain) { state.needDrain = false; - stream2.emit("drain"); + stream.emit("drain"); } while (count-- > 0) { state.pendingcb--; @@ -113439,7 +113439,7 @@ var require_writable = __commonJS({ if (state.destroyed) { errorBuffer(state); } - finishMaybe(stream2, state); + finishMaybe(stream, state); } function errorBuffer(state) { if (state.writing) { @@ -113463,7 +113463,7 @@ var require_writable = __commonJS({ } resetBuffer(state); } - function clearBuffer(stream2, state) { + function clearBuffer(stream, state) { if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) { return; } @@ -113474,7 +113474,7 @@ var require_writable = __commonJS({ } let i = bufferedIndex; state.bufferProcessing = true; - if (bufferedLength > 1 && stream2._writev) { + if (bufferedLength > 1 && stream._writev) { state.pendingcb -= bufferedLength - 1; const callback = state.allNoop ? nop : (err) => { for (let n = i; n < buffered.length; ++n) { @@ -113483,14 +113483,14 @@ var require_writable = __commonJS({ }; const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i); chunks.allBuffers = state.allBuffers; - doWrite(stream2, state, true, state.length, chunks, "", callback); + doWrite(stream, state, true, state.length, chunks, "", callback); resetBuffer(state); } else { do { const { chunk, encoding, callback } = buffered[i]; buffered[i++] = null; const len = objectMode ? 1 : chunk.length; - doWrite(stream2, state, false, len, chunk, encoding, callback); + doWrite(stream, state, false, len, chunk, encoding, callback); } while (i < buffered.length && !state.writing); if (i === buffered.length) { resetBuffer(state); @@ -113562,11 +113562,11 @@ var require_writable = __commonJS({ function needFinish(state) { return state.ending && !state.destroyed && state.constructed && state.length === 0 && !state.errored && state.buffered.length === 0 && !state.finished && !state.writing && !state.errorEmitted && !state.closeEmitted; } - function callFinal(stream2, state) { + function callFinal(stream, state) { let called = false; function onFinish(err) { if (called) { - errorOrDestroy(stream2, err !== null && err !== void 0 ? err : ERR_MULTIPLE_CALLBACK()); + errorOrDestroy(stream, err !== null && err !== void 0 ? err : ERR_MULTIPLE_CALLBACK()); return; } called = true; @@ -113576,73 +113576,73 @@ var require_writable = __commonJS({ for (let i = 0; i < onfinishCallbacks.length; i++) { onfinishCallbacks[i](err); } - errorOrDestroy(stream2, err, state.sync); + errorOrDestroy(stream, err, state.sync); } else if (needFinish(state)) { state.prefinished = true; - stream2.emit("prefinish"); + stream.emit("prefinish"); state.pendingcb++; - process2.nextTick(finish, stream2, state); + process2.nextTick(finish, stream, state); } } state.sync = true; state.pendingcb++; try { - stream2._final(onFinish); + stream._final(onFinish); } catch (err) { onFinish(err); } state.sync = false; } - function prefinish(stream2, state) { + function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { - if (typeof stream2._final === "function" && !state.destroyed) { + if (typeof stream._final === "function" && !state.destroyed) { state.finalCalled = true; - callFinal(stream2, state); + callFinal(stream, state); } else { state.prefinished = true; - stream2.emit("prefinish"); + stream.emit("prefinish"); } } } - function finishMaybe(stream2, state, sync) { + function finishMaybe(stream, state, sync) { if (needFinish(state)) { - prefinish(stream2, state); + prefinish(stream, state); if (state.pendingcb === 0) { if (sync) { state.pendingcb++; process2.nextTick( - (stream3, state2) => { + (stream2, state2) => { if (needFinish(state2)) { - finish(stream3, state2); + finish(stream2, state2); } else { state2.pendingcb--; } }, - stream2, + stream, state ); } else if (needFinish(state)) { state.pendingcb++; - finish(stream2, state); + finish(stream, state); } } } } - function finish(stream2, state) { + function finish(stream, state) { state.pendingcb--; state.finished = true; const onfinishCallbacks = state[kOnFinished].splice(0); for (let i = 0; i < onfinishCallbacks.length; i++) { onfinishCallbacks[i](); } - stream2.emit("finish"); + stream.emit("finish"); if (state.autoDestroy) { - const rState = stream2._readableState; + const rState = stream._readableState; const autoDestroy = !rState || rState.autoDestroy && // We don't expect the readable to ever 'end' // if readable is explicitly set to false. (rState.endEmitted || rState.readable === false); if (autoDestroy) { - stream2.destroy(); + stream.destroy(); } } } @@ -113794,7 +113794,7 @@ var require_duplexify = __commonJS({ } = require_errors5(); var { destroyer } = require_destroy2(); var Duplex = require_duplex(); - var Readable2 = require_readable4(); + var Readable = require_readable4(); var Writable = require_writable(); var { createDeferredPromise } = require_util21(); var from = require_from(); @@ -113844,7 +113844,7 @@ var require_duplexify = __commonJS({ } if (isReadableStream(body)) { return _duplexify({ - readable: Readable2.fromWeb(body) + readable: Readable.fromWeb(body) }); } if (isWritableStream(body)) { @@ -114008,7 +114008,7 @@ var require_duplexify = __commonJS({ }; } function _duplexify(pair) { - const r = pair.readable && typeof pair.readable.read !== "function" ? Readable2.wrap(pair.readable) : pair.readable; + const r = pair.readable && typeof pair.readable.read !== "function" ? Readable.wrap(pair.readable) : pair.readable; const w = pair.writable; let readable = !!isReadable(r); let writable = !!isWritable(w); @@ -114129,10 +114129,10 @@ var require_duplex = __commonJS({ ObjectSetPrototypeOf } = require_primordials(); module2.exports = Duplex; - var Readable2 = require_readable4(); + var Readable = require_readable4(); var Writable = require_writable(); - ObjectSetPrototypeOf(Duplex.prototype, Readable2.prototype); - ObjectSetPrototypeOf(Duplex, Readable2); + ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); + ObjectSetPrototypeOf(Duplex, Readable); { const keys = ObjectKeys(Writable.prototype); for (let i = 0; i < keys.length; i++) { @@ -114142,7 +114142,7 @@ var require_duplex = __commonJS({ } function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); - Readable2.call(this, options); + Readable.call(this, options); Writable.call(this, options); if (options) { this.allowHalfOpen = options.allowHalfOpen !== false; @@ -114389,15 +114389,15 @@ var require_pipeline4 = __commonJS({ } = require_utils8(); var AbortController2 = globalThis.AbortController || require_abort_controller().AbortController; var PassThrough; - var Readable2; + var Readable; var addAbortListener; - function destroyer(stream2, reading, writing) { + function destroyer(stream, reading, writing) { let finished = false; - stream2.on("close", () => { + stream.on("close", () => { finished = true; }); const cleanup = eos( - stream2, + stream, { readable: reading, writable: writing @@ -114410,7 +114410,7 @@ var require_pipeline4 = __commonJS({ destroy: (err) => { if (finished) return; finished = true; - destroyImpl.destroyer(stream2, err || new ERR_STREAM_DESTROYED("pipe")); + destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED("pipe")); }, cleanup }; @@ -114428,10 +114428,10 @@ var require_pipeline4 = __commonJS({ throw new ERR_INVALID_ARG_TYPE2("val", ["Readable", "Iterable", "AsyncIterable"], val); } async function* fromReadable(val) { - if (!Readable2) { - Readable2 = require_readable4(); + if (!Readable) { + Readable = require_readable4(); } - yield* Readable2.prototype[SymbolAsyncIterator].call(val); + yield* Readable.prototype[SymbolAsyncIterator].call(val); } async function pumpToNode(iterable, writable, finish, { end }) { let error3; @@ -114566,12 +114566,12 @@ var require_pipeline4 = __commonJS({ } let ret; for (let i = 0; i < streams.length; i++) { - const stream2 = streams[i]; + const stream = streams[i]; const reading = i < streams.length - 1; const writing = i > 0; const end = reading || (opts === null || opts === void 0 ? void 0 : opts.end) !== false; const isLastStream = i === streams.length - 1; - if (isNodeStream(stream2)) { + if (isNodeStream(stream)) { let onError2 = function(err) { if (err && err.name !== "AbortError" && err.code !== "ERR_STREAM_PREMATURE_CLOSE") { finish(err); @@ -114579,40 +114579,40 @@ var require_pipeline4 = __commonJS({ }; var onError = onError2; if (end) { - const { destroy, cleanup } = destroyer(stream2, reading, writing); + const { destroy, cleanup } = destroyer(stream, reading, writing); destroys.push(destroy); - if (isReadable(stream2) && isLastStream) { + if (isReadable(stream) && isLastStream) { lastStreamCleanup.push(cleanup); } } - stream2.on("error", onError2); - if (isReadable(stream2) && isLastStream) { + stream.on("error", onError2); + if (isReadable(stream) && isLastStream) { lastStreamCleanup.push(() => { - stream2.removeListener("error", onError2); + stream.removeListener("error", onError2); }); } } if (i === 0) { - if (typeof stream2 === "function") { - ret = stream2({ + if (typeof stream === "function") { + ret = stream({ signal }); if (!isIterable(ret)) { throw new ERR_INVALID_RETURN_VALUE("Iterable, AsyncIterable or Stream", "source", ret); } - } else if (isIterable(stream2) || isReadableNodeStream(stream2) || isTransformStream(stream2)) { - ret = stream2; + } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) { + ret = stream; } else { - ret = Duplex.from(stream2); + ret = Duplex.from(stream); } - } else if (typeof stream2 === "function") { + } else if (typeof stream === "function") { if (isTransformStream(ret)) { var _ret; ret = makeAsyncIterable((_ret = ret) === null || _ret === void 0 ? void 0 : _ret.readable); } else { ret = makeAsyncIterable(ret); } - ret = stream2(ret, { + ret = stream(ret, { signal }); if (reading) { @@ -114668,24 +114668,24 @@ var require_pipeline4 = __commonJS({ lastStreamCleanup.push(cleanup); } } - } else if (isNodeStream(stream2)) { + } else if (isNodeStream(stream)) { if (isReadableNodeStream(ret)) { finishCount += 2; - const cleanup = pipe(ret, stream2, finish, { + const cleanup = pipe(ret, stream, finish, { end }); - if (isReadable(stream2) && isLastStream) { + if (isReadable(stream) && isLastStream) { lastStreamCleanup.push(cleanup); } } else if (isTransformStream(ret) || isReadableStream(ret)) { const toRead = ret.readable || ret; finishCount++; - pumpToNode(toRead, stream2, finish, { + pumpToNode(toRead, stream, finish, { end }); } else if (isIterable(ret)) { finishCount++; - pumpToNode(ret, stream2, finish, { + pumpToNode(ret, stream, finish, { end }); } else { @@ -114695,21 +114695,21 @@ var require_pipeline4 = __commonJS({ ret ); } - ret = stream2; - } else if (isWebStream(stream2)) { + ret = stream; + } else if (isWebStream(stream)) { if (isReadableNodeStream(ret)) { finishCount++; - pumpToWeb(makeAsyncIterable(ret), stream2, finish, { + pumpToWeb(makeAsyncIterable(ret), stream, finish, { end }); } else if (isReadableStream(ret) || isIterable(ret)) { finishCount++; - pumpToWeb(ret, stream2, finish, { + pumpToWeb(ret, stream, finish, { end }); } else if (isTransformStream(ret)) { finishCount++; - pumpToWeb(ret.readable, stream2, finish, { + pumpToWeb(ret.readable, stream, finish, { end }); } else { @@ -114719,9 +114719,9 @@ var require_pipeline4 = __commonJS({ ret ); } - ret = stream2; + ret = stream; } else { - ret = Duplex.from(stream2); + ret = Duplex.from(stream); } } if (signal !== null && signal !== void 0 && signal.aborted || outerSignal !== null && outerSignal !== void 0 && outerSignal.aborted) { @@ -115008,17 +115008,17 @@ var require_operators = __commonJS({ } = require_primordials(); var kEmpty = Symbol2("kEmpty"); var kEof = Symbol2("kEof"); - function compose(stream2, options) { + function compose(stream, options) { if (options != null) { validateObject(options, "options"); } if ((options === null || options === void 0 ? void 0 : options.signal) != null) { validateAbortSignal(options.signal, "options.signal"); } - if (isNodeStream(stream2) && !isWritable(stream2)) { - throw new ERR_INVALID_ARG_VALUE("stream", stream2, "must be writable"); + if (isNodeStream(stream) && !isWritable(stream)) { + throw new ERR_INVALID_ARG_VALUE("stream", stream, "must be writable"); } - const composedStream = staticCompose(this, stream2); + const composedStream = staticCompose(this, stream); if (options !== null && options !== void 0 && options.signal) { addAbortSignalNoValidate(options.signal, composedStream); } @@ -115049,7 +115049,7 @@ var require_operators = __commonJS({ const signal = require_util21().AbortSignalAny( [options === null || options === void 0 ? void 0 : options.signal].filter(Boolean2) ); - const stream2 = this; + const stream = this; const queue = []; const signalOpt = { signal @@ -115074,7 +115074,7 @@ var require_operators = __commonJS({ } async function pump() { try { - for await (let val of stream2) { + for await (let val of stream) { if (done) { return; } @@ -117366,11 +117366,11 @@ var require_commonjs20 = __commonJS({ return (f) => f.length === len && f !== "." && f !== ".."; }; var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; - var path17 = { + var path14 = { win32: { sep: "\\" }, posix: { sep: "/" } }; - exports2.sep = defaultPlatform === "win32" ? path17.win32.sep : path17.posix.sep; + exports2.sep = defaultPlatform === "win32" ? path14.win32.sep : path14.posix.sep; exports2.minimatch.sep = exports2.sep; exports2.GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); exports2.minimatch.GLOBSTAR = exports2.GLOBSTAR; @@ -120648,12 +120648,12 @@ var require_commonjs23 = __commonJS({ /** * Get the Path object referenced by the string path, resolved from this Path */ - resolve(path17) { - if (!path17) { + resolve(path14) { + if (!path14) { return this; } - const rootPath = this.getRootString(path17); - const dir = path17.substring(rootPath.length); + const rootPath = this.getRootString(path14); + const dir = path14.substring(rootPath.length); const dirParts = dir.split(this.splitSep); const result = rootPath ? this.getRoot(rootPath).#resolveParts(dirParts) : this.#resolveParts(dirParts); return result; @@ -121406,8 +121406,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - getRootString(path17) { - return node_path_1.win32.parse(path17).root; + getRootString(path14) { + return node_path_1.win32.parse(path14).root; } /** * @internal @@ -121454,8 +121454,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - getRootString(path17) { - return path17.startsWith("/") ? "/" : ""; + getRootString(path14) { + return path14.startsWith("/") ? "/" : ""; } /** * @internal @@ -121505,8 +121505,8 @@ var require_commonjs23 = __commonJS({ * * @internal */ - constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs18 = defaultFS } = {}) { - this.#fs = fsFromOption(fs18); + constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs15 = defaultFS } = {}) { + this.#fs = fsFromOption(fs15); if (cwd instanceof URL || cwd.startsWith("file://")) { cwd = (0, node_url_1.fileURLToPath)(cwd); } @@ -121545,11 +121545,11 @@ var require_commonjs23 = __commonJS({ /** * Get the depth of a provided path, string, or the cwd */ - depth(path17 = this.cwd) { - if (typeof path17 === "string") { - path17 = this.cwd.resolve(path17); + depth(path14 = this.cwd) { + if (typeof path14 === "string") { + path14 = this.cwd.resolve(path14); } - return path17.depth(); + return path14.depth(); } /** * Return the cache of child entries. Exposed so subclasses can create @@ -122036,9 +122036,9 @@ var require_commonjs23 = __commonJS({ process2(); return results; } - chdir(path17 = this.cwd) { + chdir(path14 = this.cwd) { const oldCwd = this.cwd; - this.cwd = typeof path17 === "string" ? this.cwd.resolve(path17) : path17; + this.cwd = typeof path14 === "string" ? this.cwd.resolve(path14) : path14; this.cwd[setAsCwd](oldCwd); } }; @@ -122065,8 +122065,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - newRoot(fs18) { - return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs18 }); + newRoot(fs15) { + return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs15 }); } /** * Return true if the provided path string is an absolute path @@ -122095,8 +122095,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - newRoot(fs18) { - return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs18 }); + newRoot(fs15) { + return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs15 }); } /** * Return true if the provided path string is an absolute path @@ -122426,8 +122426,8 @@ var require_processor = __commonJS({ } // match, absolute, ifdir entries() { - return [...this.store.entries()].map(([path17, n]) => [ - path17, + return [...this.store.entries()].map(([path14, n]) => [ + path14, !!(n & 2), !!(n & 1) ]); @@ -122645,9 +122645,9 @@ var require_walker = __commonJS({ signal; maxDepth; includeChildMatches; - constructor(patterns, path17, opts) { + constructor(patterns, path14, opts) { this.patterns = patterns; - this.path = path17; + this.path = path14; this.opts = opts; this.#sep = !opts.posix && opts.platform === "win32" ? "\\" : "/"; this.includeChildMatches = opts.includeChildMatches !== false; @@ -122666,11 +122666,11 @@ var require_walker = __commonJS({ }); } } - #ignored(path17) { - return this.seen.has(path17) || !!this.#ignore?.ignored?.(path17); + #ignored(path14) { + return this.seen.has(path14) || !!this.#ignore?.ignored?.(path14); } - #childrenIgnored(path17) { - return !!this.#ignore?.childrenIgnored?.(path17); + #childrenIgnored(path14) { + return !!this.#ignore?.childrenIgnored?.(path14); } // backpressure mechanism pause() { @@ -122886,8 +122886,8 @@ var require_walker = __commonJS({ exports2.GlobUtil = GlobUtil; var GlobWalker = class extends GlobUtil { matches = /* @__PURE__ */ new Set(); - constructor(patterns, path17, opts) { - super(patterns, path17, opts); + constructor(patterns, path14, opts) { + super(patterns, path14, opts); } matchEmit(e) { this.matches.add(e); @@ -122925,8 +122925,8 @@ var require_walker = __commonJS({ exports2.GlobWalker = GlobWalker; var GlobStream = class extends GlobUtil { results; - constructor(patterns, path17, opts) { - super(patterns, path17, opts); + constructor(patterns, path14, opts) { + super(patterns, path14, opts); this.results = new minipass_1.Minipass({ signal: this.signal, objectMode: true @@ -123281,8 +123281,8 @@ var require_commonjs24 = __commonJS({ // node_modules/archiver-utils/file.js var require_file4 = __commonJS({ "node_modules/archiver-utils/file.js"(exports2, module2) { - var fs18 = require_graceful_fs(); - var path17 = require("path"); + var fs15 = require_graceful_fs(); + var path14 = require("path"); var flatten = require_flatten(); var difference = require_difference(); var union = require_union(); @@ -123307,8 +123307,8 @@ var require_file4 = __commonJS({ return result; }; file.exists = function() { - var filepath = path17.join.apply(path17, arguments); - return fs18.existsSync(filepath); + var filepath = path14.join.apply(path14, arguments); + return fs15.existsSync(filepath); }; file.expand = function(...args) { var options = isPlainObject3(args[0]) ? args.shift() : {}; @@ -123321,12 +123321,12 @@ var require_file4 = __commonJS({ }); if (options.filter) { matches = matches.filter(function(filepath) { - filepath = path17.join(options.cwd || "", filepath); + filepath = path14.join(options.cwd || "", filepath); try { if (typeof options.filter === "function") { return options.filter(filepath); } else { - return fs18.statSync(filepath)[options.filter](); + return fs15.statSync(filepath)[options.filter](); } } catch (e) { return false; @@ -123338,7 +123338,7 @@ var require_file4 = __commonJS({ file.expandMapping = function(patterns, destBase, options) { options = Object.assign({ rename: function(destBase2, destPath) { - return path17.join(destBase2 || "", destPath); + return path14.join(destBase2 || "", destPath); } }, options); var files = []; @@ -123346,14 +123346,14 @@ var require_file4 = __commonJS({ file.expand(options, patterns).forEach(function(src) { var destPath = src; if (options.flatten) { - destPath = path17.basename(destPath); + destPath = path14.basename(destPath); } if (options.ext) { destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); } var dest = options.rename(destBase, destPath, options); if (options.cwd) { - src = path17.join(options.cwd, src); + src = path14.join(options.cwd, src); } dest = dest.replace(pathSeparatorRe, "/"); src = src.replace(pathSeparatorRe, "/"); @@ -123434,8 +123434,8 @@ var require_file4 = __commonJS({ // node_modules/archiver-utils/index.js var require_archiver_utils = __commonJS({ "node_modules/archiver-utils/index.js"(exports2, module2) { - var fs18 = require_graceful_fs(); - var path17 = require("path"); + var fs15 = require_graceful_fs(); + var path14 = require("path"); var isStream = require_is_stream(); var lazystream = require_lazystream(); var normalizePath = require_normalize_path(); @@ -123483,7 +123483,7 @@ var require_archiver_utils = __commonJS({ }; utils.lazyReadStream = function(filepath) { return new lazystream.Readable(function() { - return fs18.createReadStream(filepath); + return fs15.createReadStream(filepath); }); }; utils.normalizeInputSource = function(source) { @@ -123511,7 +123511,7 @@ var require_archiver_utils = __commonJS({ callback = base; base = dirpath; } - fs18.readdir(dirpath, function(err, list) { + fs15.readdir(dirpath, function(err, list) { var i = 0; var file; var filepath; @@ -123523,11 +123523,11 @@ var require_archiver_utils = __commonJS({ if (!file) { return callback(null, results); } - filepath = path17.join(dirpath, file); - fs18.stat(filepath, function(err2, stats) { + filepath = path14.join(dirpath, file); + fs15.stat(filepath, function(err2, stats) { results.push({ path: filepath, - relative: path17.relative(base, filepath).replace(/\\/g, "/"), + relative: path14.relative(base, filepath).replace(/\\/g, "/"), stats }); if (stats && stats.isDirectory()) { @@ -123586,10 +123586,10 @@ var require_error3 = __commonJS({ // node_modules/archiver/lib/core.js var require_core2 = __commonJS({ "node_modules/archiver/lib/core.js"(exports2, module2) { - var fs18 = require("fs"); + var fs15 = require("fs"); var glob2 = require_readdir_glob(); var async = require_async(); - var path17 = require("path"); + var path14 = require("path"); var util = require_archiver_utils(); var inherits = require("util").inherits; var ArchiverError = require_error3(); @@ -123650,7 +123650,7 @@ var require_core2 = __commonJS({ data.sourcePath = filepath; task.data = data; this._entriesCount++; - if (data.stats && data.stats instanceof fs18.Stats) { + if (data.stats && data.stats instanceof fs15.Stats) { task = this._updateQueueTaskWithStats(task, data.stats); if (task) { if (data.stats.size) { @@ -123821,7 +123821,7 @@ var require_core2 = __commonJS({ callback(); return; } - fs18.lstat(task.filepath, function(err, stats) { + fs15.lstat(task.filepath, function(err, stats) { if (this._state.aborted) { setImmediate(callback); return; @@ -123864,10 +123864,10 @@ var require_core2 = __commonJS({ task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { - var linkPath = fs18.readlinkSync(task.filepath); - var dirName = path17.dirname(task.filepath); + var linkPath = fs15.readlinkSync(task.filepath); + var dirName = path14.dirname(task.filepath); task.data.type = "symlink"; - task.data.linkname = path17.relative(dirName, path17.resolve(dirName, linkPath)); + task.data.linkname = path14.relative(dirName, path14.resolve(dirName, linkPath)); task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else { @@ -124575,11 +124575,11 @@ var require_zip_archive_entry = __commonJS({ var require_is_stream2 = __commonJS({ "node_modules/compress-commons/node_modules/is-stream/index.js"(exports2, module2) { "use strict"; - var isStream = (stream2) => stream2 !== null && typeof stream2 === "object" && typeof stream2.pipe === "function"; - isStream.writable = (stream2) => isStream(stream2) && stream2.writable !== false && typeof stream2._write === "function" && typeof stream2._writableState === "object"; - isStream.readable = (stream2) => isStream(stream2) && stream2.readable !== false && typeof stream2._read === "function" && typeof stream2._readableState === "object"; - isStream.duplex = (stream2) => isStream.writable(stream2) && isStream.readable(stream2); - isStream.transform = (stream2) => isStream.duplex(stream2) && typeof stream2._transform === "function"; + var isStream = (stream) => stream !== null && typeof stream === "object" && typeof stream.pipe === "function"; + isStream.writable = (stream) => isStream(stream) && stream.writable !== false && typeof stream._write === "function" && typeof stream._writableState === "object"; + isStream.readable = (stream) => isStream(stream) && stream.readable !== false && typeof stream._read === "function" && typeof stream._readableState === "object"; + isStream.duplex = (stream) => isStream.writable(stream) && isStream.readable(stream); + isStream.transform = (stream) => isStream.duplex(stream) && typeof stream._transform === "function"; module2.exports = isStream; } }); @@ -125846,8 +125846,8 @@ var require_streamx = __commonJS({ var WRITE_UPDATE_SYNC_STATUS = WRITE_UPDATING | OPEN_STATUS | WRITE_NEXT_TICK | WRITE_PRIMARY; var asyncIterator = Symbol.asyncIterator || /* @__PURE__ */ Symbol("asyncIterator"); var WritableState = class { - constructor(stream2, { highWaterMark = 16384, map: map2 = null, mapWritable, byteLength, byteLengthWritable } = {}) { - this.stream = stream2; + constructor(stream, { highWaterMark = 16384, map: map2 = null, mapWritable, byteLength, byteLengthWritable } = {}) { + this.stream = stream; this.queue = new FIFO(); this.highWaterMark = highWaterMark; this.buffered = 0; @@ -125886,44 +125886,44 @@ var require_streamx = __commonJS({ } autoBatch(data, cb) { const buffer = []; - const stream2 = this.stream; + const stream = this.stream; buffer.push(data); - while ((stream2._duplexState & WRITE_STATUS) === WRITE_QUEUED_AND_ACTIVE) { - buffer.push(stream2._writableState.shift()); + while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED_AND_ACTIVE) { + buffer.push(stream._writableState.shift()); } - if ((stream2._duplexState & OPEN_STATUS) !== 0) return cb(null); - stream2._writev(buffer, cb); + if ((stream._duplexState & OPEN_STATUS) !== 0) return cb(null); + stream._writev(buffer, cb); } update() { - const stream2 = this.stream; - stream2._duplexState |= WRITE_UPDATING; + const stream = this.stream; + stream._duplexState |= WRITE_UPDATING; do { - while ((stream2._duplexState & WRITE_STATUS) === WRITE_QUEUED) { + while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED) { const data = this.shift(); - stream2._duplexState |= WRITE_ACTIVE_AND_WRITING; - stream2._write(data, this.afterWrite); + stream._duplexState |= WRITE_ACTIVE_AND_WRITING; + stream._write(data, this.afterWrite); } - if ((stream2._duplexState & WRITE_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary(); + if ((stream._duplexState & WRITE_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary(); } while (this.continueUpdate() === true); - stream2._duplexState &= WRITE_NOT_UPDATING; + stream._duplexState &= WRITE_NOT_UPDATING; } updateNonPrimary() { - const stream2 = this.stream; - if ((stream2._duplexState & WRITE_FINISHING_STATUS) === WRITE_FINISHING) { - stream2._duplexState = (stream2._duplexState | WRITE_ACTIVE) & WRITE_NOT_FINISHING; - stream2._final(afterFinal.bind(this)); + const stream = this.stream; + if ((stream._duplexState & WRITE_FINISHING_STATUS) === WRITE_FINISHING) { + stream._duplexState = (stream._duplexState | WRITE_ACTIVE) & WRITE_NOT_FINISHING; + stream._final(afterFinal.bind(this)); return; } - if ((stream2._duplexState & DESTROY_STATUS) === DESTROYING) { - if ((stream2._duplexState & ACTIVE_OR_TICKING) === 0) { - stream2._duplexState |= ACTIVE; - stream2._destroy(afterDestroy.bind(this)); + if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { + if ((stream._duplexState & ACTIVE_OR_TICKING) === 0) { + stream._duplexState |= ACTIVE; + stream._destroy(afterDestroy.bind(this)); } return; } - if ((stream2._duplexState & IS_OPENING) === OPENING) { - stream2._duplexState = (stream2._duplexState | ACTIVE) & NOT_OPENING; - stream2._open(afterOpen.bind(this)); + if ((stream._duplexState & IS_OPENING) === OPENING) { + stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING; + stream._open(afterOpen.bind(this)); } } continueUpdate() { @@ -125942,8 +125942,8 @@ var require_streamx = __commonJS({ } }; var ReadableState = class { - constructor(stream2, { highWaterMark = 16384, map: map2 = null, mapReadable, byteLength, byteLengthReadable } = {}) { - this.stream = stream2; + constructor(stream, { highWaterMark = 16384, map: map2 = null, mapReadable, byteLength, byteLengthReadable } = {}) { + this.stream = stream; this.queue = new FIFO(); this.highWaterMark = highWaterMark === 0 ? 1 : highWaterMark; this.buffered = 0; @@ -125982,22 +125982,22 @@ var require_streamx = __commonJS({ pipeTo.emit("pipe", this.stream); } push(data) { - const stream2 = this.stream; + const stream = this.stream; if (data === null) { this.highWaterMark = 0; - stream2._duplexState = (stream2._duplexState | READ_ENDING) & READ_NON_PRIMARY_AND_PUSHED; + stream._duplexState = (stream._duplexState | READ_ENDING) & READ_NON_PRIMARY_AND_PUSHED; return false; } if (this.map !== null) { data = this.map(data); if (data === null) { - stream2._duplexState &= READ_PUSHED; + stream._duplexState &= READ_PUSHED; return this.buffered < this.highWaterMark; } } this.buffered += this.byteLength(data); this.queue.push(data); - stream2._duplexState = (stream2._duplexState | READ_QUEUED) & READ_PUSHED; + stream._duplexState = (stream._duplexState | READ_QUEUED) & READ_PUSHED; return this.buffered < this.highWaterMark; } shift() { @@ -126017,63 +126017,63 @@ var require_streamx = __commonJS({ this.push(pending[pending.length - 1]); } read() { - const stream2 = this.stream; - if ((stream2._duplexState & READ_STATUS) === READ_QUEUED) { + const stream = this.stream; + if ((stream._duplexState & READ_STATUS) === READ_QUEUED) { const data = this.shift(); - if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream2._duplexState &= READ_PIPE_NOT_DRAINED; - if ((stream2._duplexState & READ_EMIT_DATA) !== 0) stream2.emit("data", data); + if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED; + if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit("data", data); return data; } if (this.readAhead === false) { - stream2._duplexState |= READ_READ_AHEAD; + stream._duplexState |= READ_READ_AHEAD; this.updateNextTick(); } return null; } drain() { - const stream2 = this.stream; - while ((stream2._duplexState & READ_STATUS) === READ_QUEUED && (stream2._duplexState & READ_FLOWING) !== 0) { + const stream = this.stream; + while ((stream._duplexState & READ_STATUS) === READ_QUEUED && (stream._duplexState & READ_FLOWING) !== 0) { const data = this.shift(); - if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream2._duplexState &= READ_PIPE_NOT_DRAINED; - if ((stream2._duplexState & READ_EMIT_DATA) !== 0) stream2.emit("data", data); + if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED; + if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit("data", data); } } update() { - const stream2 = this.stream; - stream2._duplexState |= READ_UPDATING; + const stream = this.stream; + stream._duplexState |= READ_UPDATING; do { this.drain(); - while (this.buffered < this.highWaterMark && (stream2._duplexState & SHOULD_NOT_READ) === READ_READ_AHEAD) { - stream2._duplexState |= READ_ACTIVE_AND_NEEDS_PUSH; - stream2._read(this.afterRead); + while (this.buffered < this.highWaterMark && (stream._duplexState & SHOULD_NOT_READ) === READ_READ_AHEAD) { + stream._duplexState |= READ_ACTIVE_AND_NEEDS_PUSH; + stream._read(this.afterRead); this.drain(); } - if ((stream2._duplexState & READ_READABLE_STATUS) === READ_EMIT_READABLE_AND_QUEUED) { - stream2._duplexState |= READ_EMITTED_READABLE; - stream2.emit("readable"); + if ((stream._duplexState & READ_READABLE_STATUS) === READ_EMIT_READABLE_AND_QUEUED) { + stream._duplexState |= READ_EMITTED_READABLE; + stream.emit("readable"); } - if ((stream2._duplexState & READ_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary(); + if ((stream._duplexState & READ_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary(); } while (this.continueUpdate() === true); - stream2._duplexState &= READ_NOT_UPDATING; + stream._duplexState &= READ_NOT_UPDATING; } updateNonPrimary() { - const stream2 = this.stream; - if ((stream2._duplexState & READ_ENDING_STATUS) === READ_ENDING) { - stream2._duplexState = (stream2._duplexState | READ_DONE) & READ_NOT_ENDING; - stream2.emit("end"); - if ((stream2._duplexState & AUTO_DESTROY) === DONE) stream2._duplexState |= DESTROYING; + const stream = this.stream; + if ((stream._duplexState & READ_ENDING_STATUS) === READ_ENDING) { + stream._duplexState = (stream._duplexState | READ_DONE) & READ_NOT_ENDING; + stream.emit("end"); + if ((stream._duplexState & AUTO_DESTROY) === DONE) stream._duplexState |= DESTROYING; if (this.pipeTo !== null) this.pipeTo.end(); } - if ((stream2._duplexState & DESTROY_STATUS) === DESTROYING) { - if ((stream2._duplexState & ACTIVE_OR_TICKING) === 0) { - stream2._duplexState |= ACTIVE; - stream2._destroy(afterDestroy.bind(this)); + if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { + if ((stream._duplexState & ACTIVE_OR_TICKING) === 0) { + stream._duplexState |= ACTIVE; + stream._destroy(afterDestroy.bind(this)); } return; } - if ((stream2._duplexState & IS_OPENING) === OPENING) { - stream2._duplexState = (stream2._duplexState | ACTIVE) & NOT_OPENING; - stream2._open(afterOpen.bind(this)); + if ((stream._duplexState & IS_OPENING) === OPENING) { + stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING; + stream._open(afterOpen.bind(this)); } } continueUpdate() { @@ -126092,9 +126092,9 @@ var require_streamx = __commonJS({ } }; var TransformState = class { - constructor(stream2) { + constructor(stream) { this.data = null; - this.afterTransform = afterTransform.bind(stream2); + this.afterTransform = afterTransform.bind(stream); this.afterFinal = null; } }; @@ -126109,9 +126109,9 @@ var require_streamx = __commonJS({ finished() { this.pipeToFinished = true; } - done(stream2, err) { + done(stream, err) { if (err) this.error = err; - if (stream2 === this.to) { + if (stream === this.to) { this.to = null; if (this.from !== null) { if ((this.from._duplexState & READ_DONE) === 0 || !this.pipeToFinished) { @@ -126120,10 +126120,10 @@ var require_streamx = __commonJS({ return; } } - if (stream2 === this.from) { + if (stream === this.from) { this.from = null; if (this.to !== null) { - if ((stream2._duplexState & READ_DONE) === 0) { + if ((stream._duplexState & READ_DONE) === 0) { this.to.destroy(this.error || new Error("Readable stream closed before ending")); } return; @@ -126138,42 +126138,42 @@ var require_streamx = __commonJS({ this.updateCallback(); } function afterFinal(err) { - const stream2 = this.stream; - if (err) stream2.destroy(err); - if ((stream2._duplexState & DESTROY_STATUS) === 0) { - stream2._duplexState |= WRITE_DONE; - stream2.emit("finish"); + const stream = this.stream; + if (err) stream.destroy(err); + if ((stream._duplexState & DESTROY_STATUS) === 0) { + stream._duplexState |= WRITE_DONE; + stream.emit("finish"); } - if ((stream2._duplexState & AUTO_DESTROY) === DONE) { - stream2._duplexState |= DESTROYING; + if ((stream._duplexState & AUTO_DESTROY) === DONE) { + stream._duplexState |= DESTROYING; } - stream2._duplexState &= WRITE_NOT_ACTIVE; - if ((stream2._duplexState & WRITE_UPDATING) === 0) this.update(); + stream._duplexState &= WRITE_NOT_ACTIVE; + if ((stream._duplexState & WRITE_UPDATING) === 0) this.update(); else this.updateNextTick(); } function afterDestroy(err) { - const stream2 = this.stream; + const stream = this.stream; if (!err && this.error !== STREAM_DESTROYED) err = this.error; - if (err) stream2.emit("error", err); - stream2._duplexState |= DESTROYED; - stream2.emit("close"); - const rs = stream2._readableState; - const ws = stream2._writableState; - if (rs !== null && rs.pipeline !== null) rs.pipeline.done(stream2, err); + if (err) stream.emit("error", err); + stream._duplexState |= DESTROYED; + stream.emit("close"); + const rs = stream._readableState; + const ws = stream._writableState; + if (rs !== null && rs.pipeline !== null) rs.pipeline.done(stream, err); if (ws !== null) { while (ws.drains !== null && ws.drains.length > 0) ws.drains.shift().resolve(false); - if (ws.pipeline !== null) ws.pipeline.done(stream2, err); + if (ws.pipeline !== null) ws.pipeline.done(stream, err); } } function afterWrite(err) { - const stream2 = this.stream; - if (err) stream2.destroy(err); - stream2._duplexState &= WRITE_NOT_ACTIVE; + const stream = this.stream; + if (err) stream.destroy(err); + stream._duplexState &= WRITE_NOT_ACTIVE; if (this.drains !== null) tickDrains(this.drains); - if ((stream2._duplexState & WRITE_DRAIN_STATUS) === WRITE_UNDRAINED) { - stream2._duplexState &= WRITE_DRAINED; - if ((stream2._duplexState & WRITE_EMIT_DRAIN) === WRITE_EMIT_DRAIN) { - stream2.emit("drain"); + if ((stream._duplexState & WRITE_DRAIN_STATUS) === WRITE_UNDRAINED) { + stream._duplexState &= WRITE_DRAINED; + if ((stream._duplexState & WRITE_EMIT_DRAIN) === WRITE_EMIT_DRAIN) { + stream.emit("drain"); } } this.updateCallback(); @@ -126205,19 +126205,19 @@ var require_streamx = __commonJS({ } } function afterOpen(err) { - const stream2 = this.stream; - if (err) stream2.destroy(err); - if ((stream2._duplexState & DESTROYING) === 0) { - if ((stream2._duplexState & READ_PRIMARY_STATUS) === 0) stream2._duplexState |= READ_PRIMARY; - if ((stream2._duplexState & WRITE_PRIMARY_STATUS) === 0) stream2._duplexState |= WRITE_PRIMARY; - stream2.emit("open"); + const stream = this.stream; + if (err) stream.destroy(err); + if ((stream._duplexState & DESTROYING) === 0) { + if ((stream._duplexState & READ_PRIMARY_STATUS) === 0) stream._duplexState |= READ_PRIMARY; + if ((stream._duplexState & WRITE_PRIMARY_STATUS) === 0) stream._duplexState |= WRITE_PRIMARY; + stream.emit("open"); } - stream2._duplexState &= NOT_ACTIVE; - if (stream2._writableState !== null) { - stream2._writableState.updateCallback(); + stream._duplexState &= NOT_ACTIVE; + if (stream._writableState !== null) { + stream._writableState.updateCallback(); } - if (stream2._readableState !== null) { - stream2._readableState.updateCallback(); + if (stream._readableState !== null) { + stream._readableState.updateCallback(); } } function afterTransform(err, data) { @@ -126298,7 +126298,7 @@ var require_streamx = __commonJS({ } } }; - var Readable2 = class _Readable extends Stream { + var Readable = class _Readable extends Stream { constructor(opts) { super(opts); this._duplexState |= OPENING | WRITE_DONE | READ_READ_AHEAD; @@ -126390,7 +126390,7 @@ var require_streamx = __commonJS({ return (rs._duplexState & READ_RESUMED) === 0; } [asyncIterator]() { - const stream2 = this; + const stream = this; let error3 = null; let promiseResolve = null; let promiseReject = null; @@ -126407,9 +126407,9 @@ var require_streamx = __commonJS({ return new Promise(function(resolve8, reject) { promiseResolve = resolve8; promiseReject = reject; - const data = stream2.read(); + const data = stream.read(); if (data !== null) ondata(data); - else if ((stream2._duplexState & DESTROYED) !== 0) ondata(null); + else if ((stream._duplexState & DESTROYED) !== 0) ondata(null); }); }, return() { @@ -126420,7 +126420,7 @@ var require_streamx = __commonJS({ } }; function onreadable() { - if (promiseResolve !== null) ondata(stream2.read()); + if (promiseResolve !== null) ondata(stream.read()); } function onclose() { if (promiseResolve !== null) ondata(null); @@ -126428,15 +126428,15 @@ var require_streamx = __commonJS({ function ondata(data) { if (promiseReject === null) return; if (error3) promiseReject(error3); - else if (data === null && (stream2._duplexState & READ_DONE) === 0) promiseReject(STREAM_DESTROYED); + else if (data === null && (stream._duplexState & READ_DONE) === 0) promiseReject(STREAM_DESTROYED); else promiseResolve({ value: data, done: data === null }); promiseReject = promiseResolve = null; } function destroy(err) { - stream2.destroy(err); + stream.destroy(err); return new Promise((resolve8, reject) => { - if (stream2._duplexState & DESTROYED) return resolve8({ value: void 0, done: true }); - stream2.once("close", function() { + if (stream._duplexState & DESTROYED) return resolve8({ value: void 0, done: true }); + stream.once("close", function() { if (err) reject(err); else resolve8({ value: void 0, done: true }); }); @@ -126496,7 +126496,7 @@ var require_streamx = __commonJS({ return this; } }; - var Duplex = class extends Readable2 { + var Duplex = class extends Readable { // and Writable constructor(opts) { super(opts); @@ -126595,8 +126595,8 @@ var require_streamx = __commonJS({ }); }); } - function pipeline(stream2, ...streams) { - const all = Array.isArray(stream2) ? [...stream2, ...streams] : [stream2, ...streams]; + function pipeline(stream, ...streams) { + const all = Array.isArray(stream) ? [...stream, ...streams] : [stream, ...streams]; const done = all.length && typeof all[all.length - 1] === "function" ? all.pop() : null; if (all.length < 2) throw new Error("Pipeline requires at least 2 streams"); let src = all[0]; @@ -126646,24 +126646,24 @@ var require_streamx = __commonJS({ function echo(s) { return s; } - function isStream(stream2) { - return !!stream2._readableState || !!stream2._writableState; + function isStream(stream) { + return !!stream._readableState || !!stream._writableState; } - function isStreamx(stream2) { - return typeof stream2._duplexState === "number" && isStream(stream2); + function isStreamx(stream) { + return typeof stream._duplexState === "number" && isStream(stream); } - function isEnded(stream2) { - return !!stream2._readableState && stream2._readableState.ended; + function isEnded(stream) { + return !!stream._readableState && stream._readableState.ended; } - function isFinished(stream2) { - return !!stream2._writableState && stream2._writableState.ended; + function isFinished(stream) { + return !!stream._writableState && stream._writableState.ended; } - function getStreamError(stream2, opts = {}) { - const err = stream2._readableState && stream2._readableState.error || stream2._writableState && stream2._writableState.error; + function getStreamError(stream, opts = {}) { + const err = stream._readableState && stream._readableState.error || stream._writableState && stream._writableState.error; return !opts.all && err === STREAM_DESTROYED ? null : err; } - function isReadStreamx(stream2) { - return isStreamx(stream2) && stream2.readable; + function isReadStreamx(stream) { + return isStreamx(stream) && stream.readable; } function isTypedArray(data) { return typeof data === "object" && data !== null && typeof data.byteLength === "number"; @@ -126689,7 +126689,7 @@ var require_streamx = __commonJS({ getStreamError, Stream, Writable, - Readable: Readable2, + Readable, Duplex, Transform, // Export PassThrough for compatibility with Node.js core's stream module @@ -126962,7 +126962,7 @@ var require_headers3 = __commonJS({ // node_modules/tar-stream/extract.js var require_extract = __commonJS({ "node_modules/tar-stream/extract.js"(exports2, module2) { - var { Writable, Readable: Readable2, getStreamError } = require_streamx(); + var { Writable, Readable, getStreamError } = require_streamx(); var FIFO = require_fast_fifo(); var b4a = require_b4a(); var headers = require_headers3(); @@ -127009,7 +127009,7 @@ var require_extract = __commonJS({ return buf.subarray(this._offset, this._offset += size); } }; - var Source = class extends Readable2 { + var Source = class extends Readable { constructor(self2, header, offset) { super(); this.header = header; @@ -127251,14 +127251,14 @@ var require_extract = __commonJS({ promiseResolve = promiseReject = null; } } - function onentry(header, stream2, callback) { + function onentry(header, stream, callback) { entryCallback = callback; - stream2.on("error", noop3); + stream.on("error", noop3); if (promiseResolve) { - promiseResolve({ value: stream2, done: false }); + promiseResolve({ value: stream, done: false }); promiseResolve = promiseReject = null; } else { - entryStream = stream2; + entryStream = stream; } } function onclose() { @@ -127316,7 +127316,7 @@ var require_constants19 = __commonJS({ // node_modules/tar-stream/pack.js var require_pack = __commonJS({ "node_modules/tar-stream/pack.js"(exports2, module2) { - var { Readable: Readable2, Writable, getStreamError } = require_streamx(); + var { Readable, Writable, getStreamError } = require_streamx(); var b4a = require_b4a(); var constants = require_constants19(); var headers = require_headers3(); @@ -127409,7 +127409,7 @@ var require_pack = __commonJS({ cb(); } }; - var Pack = class extends Readable2 { + var Pack = class extends Readable { constructor(opts) { super(opts); this._drain = noop3; @@ -127454,8 +127454,8 @@ var require_pack = __commonJS({ this.push(END_OF_TAR); this.push(null); } - _done(stream2) { - if (stream2 !== this._stream) return; + _done(stream) { + if (stream !== this._stream) return; this._stream = null; if (this._finalizing) this.finalize(); if (this._pending.length) this._pending.shift()._continueOpen(); @@ -127506,9 +127506,9 @@ var require_pack = __commonJS({ const err = getStreamError(this); if (this._stream) this._stream.destroy(err); while (this._pending.length) { - const stream2 = this._pending.shift(); - stream2.destroy(err); - stream2._continueOpen(); + const stream = this._pending.shift(); + stream.destroy(err); + stream._continueOpen(); } this._doDrain(); } @@ -128098,13 +128098,13 @@ var require_zip2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createZipUploadStream = exports2.ZipUploadStream = exports2.DEFAULT_COMPRESSION_LEVEL = void 0; - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar2(require_archiver()); var core17 = __importStar2(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; - var ZipUploadStream = class extends stream2.Transform { + var ZipUploadStream = class extends stream.Transform { constructor(bufferSize) { super({ highWaterMark: bufferSize @@ -128317,8 +128317,8 @@ var require_context2 = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path17 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path17} does not exist${os_1.EOL}`); + const path14 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path14} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -128845,7 +128845,7 @@ var require_util24 = __commonJS({ var assert = require("assert"); var { kDestroyed, kBodyUsed } = require_symbols11(); var { IncomingMessage } = require("http"); - var stream2 = require("stream"); + var stream = require("stream"); var net = require("net"); var { InvalidArgumentError } = require_errors6(); var { Blob: Blob2 } = require("buffer"); @@ -128903,14 +128903,14 @@ var require_util24 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol}//${url2.hostname}:${port}`; - let path17 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path14 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path17 && !path17.startsWith("/")) { - path17 = `/${path17}`; + if (path14 && !path14.startsWith("/")) { + path14 = `/${path14}`; } - url2 = new URL(origin + path17); + url2 = new URL(origin + path14); } return url2; } @@ -128964,29 +128964,29 @@ var require_util24 = __commonJS({ } return null; } - function isDestroyed(stream3) { - return !stream3 || !!(stream3.destroyed || stream3[kDestroyed]); + function isDestroyed(stream2) { + return !stream2 || !!(stream2.destroyed || stream2[kDestroyed]); } - function isReadableAborted(stream3) { - const state = stream3 && stream3._readableState; - return isDestroyed(stream3) && state && !state.endEmitted; + function isReadableAborted(stream2) { + const state = stream2 && stream2._readableState; + return isDestroyed(stream2) && state && !state.endEmitted; } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { - process.nextTick((stream4, err2) => { - stream4.emit("error", err2); - }, stream3, err); + process.nextTick((stream3, err2) => { + stream3.emit("error", err2); + }, stream2, err); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -129075,15 +129075,15 @@ var require_util24 = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed ? stream2.isDisturbed(body) || body[kBodyUsed] : body[kBodyUsed] || body.readableDidRead || body._readableState && body._readableState.dataEmitted || isReadableAborted(body))); + return !!(body && (stream.isDisturbed ? stream.isDisturbed(body) || body[kBodyUsed] : body[kBodyUsed] || body.readableDidRead || body._readableState && body._readableState.dataEmitted || isReadableAborted(body))); } function isErrored(body) { - return !!(body && (stream2.isErrored ? stream2.isErrored(body) : /state: 'errored'/.test( + return !!(body && (stream.isErrored ? stream.isErrored(body) : /state: 'errored'/.test( nodeUtil.inspect(body) ))); } function isReadable(body) { - return !!(body && (stream2.isReadable ? stream2.isReadable(body) : /state: 'readable'/.test( + return !!(body && (stream.isReadable ? stream.isReadable(body) : /state: 'readable'/.test( nodeUtil.inspect(body) ))); } @@ -130524,20 +130524,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename2(path17) { - if (typeof path17 !== "string") { + module2.exports = function basename2(path14) { + if (typeof path14 !== "string") { return ""; } - for (var i = path17.length - 1; i >= 0; --i) { - switch (path17.charCodeAt(i)) { + for (var i = path14.length - 1; i >= 0; --i) { + switch (path14.charCodeAt(i)) { case 47: // '/' case 92: - path17 = path17.slice(i + 1); - return path17 === ".." || path17 === "." ? "" : path17; + path14 = path14.slice(i + 1); + return path14 === ".." || path14 === "." ? "" : path14; } } - return path17 === ".." || path17 === "." ? "" : path17; + return path14 === ".." || path14 === "." ? "" : path14; }; } }); @@ -130546,7 +130546,7 @@ var require_basename = __commonJS({ var require_multipart2 = __commonJS({ "node_modules/@fastify/busboy/lib/types/multipart.js"(exports2, module2) { "use strict"; - var { Readable: Readable2 } = require("node:stream"); + var { Readable } = require("node:stream"); var { inherits } = require("node:util"); var Dicer = require_Dicer(); var parseParams = require_parseParams(); @@ -130811,11 +130811,11 @@ var require_multipart2 = __commonJS({ part.resume(); } function FileStream(opts) { - Readable2.call(this, opts); + Readable.call(this, opts); this.bytesRead = 0; this.truncated = false; } - inherits(FileStream, Readable2); + inherits(FileStream, Readable); FileStream.prototype._read = function(n) { }; module2.exports = Multipart; @@ -131536,7 +131536,7 @@ var require_util25 = __commonJS({ "use strict"; var { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require_constants21(); var { getGlobalOrigin } = require_global5(); - var { performance: performance3 } = require("perf_hooks"); + var { performance: performance2 } = require("perf_hooks"); var { isBlobLike, toUSVString, ReadableStreamFrom } = require_util24(); var assert = require("assert"); var { isUint8Array } = require("util/types"); @@ -131699,7 +131699,7 @@ var require_util25 = __commonJS({ } } function coarsenedSharedCurrentTime(crossOriginIsolatedCapability) { - return performance3.now(); + return performance2.now(); } function createOpaqueTimingInfo(timingInfo) { return { @@ -132035,11 +132035,11 @@ var require_util25 = __commonJS({ } } var ReadableStream2 = globalThis.ReadableStream; - function isReadableStreamLike(stream2) { + function isReadableStreamLike(stream) { if (!ReadableStream2) { ReadableStream2 = require("stream/web").ReadableStream; } - return stream2 instanceof ReadableStream2 || stream2[Symbol.toStringTag] === "ReadableStream" && typeof stream2.tee === "function"; + return stream instanceof ReadableStream2 || stream[Symbol.toStringTag] === "ReadableStream" && typeof stream.tee === "function"; } var MAXIMUM_ARGUMENT_LENGTH = 65535; function isomorphicDecode(input) { @@ -133196,13 +133196,13 @@ var require_body3 = __commonJS({ if (!ReadableStream2) { ReadableStream2 = require("stream/web").ReadableStream; } - let stream2 = null; + let stream = null; if (object instanceof ReadableStream2) { - stream2 = object; + stream = object; } else if (isBlobLike(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream2({ + stream = new ReadableStream2({ async pull(controller) { controller.enqueue( typeof source === "string" ? textEncoder.encode(source) : source @@ -133214,7 +133214,7 @@ var require_body3 = __commonJS({ type: void 0 }); } - assert(isReadableStreamLike(stream2)); + assert(isReadableStreamLike(stream)); let action = null; let source = null; let length = null; @@ -133292,14 +133292,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = object instanceof ReadableStream2 ? object : ReadableStreamFrom(object); + stream = object instanceof ReadableStream2 ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator2; - stream2 = new ReadableStream2({ + stream = new ReadableStream2({ async start() { iterator2 = action(object)[Symbol.asyncIterator](); }, @@ -133310,7 +133310,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.close(); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { controller.enqueue(new Uint8Array(value)); } } @@ -133322,7 +133322,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: void 0 }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type2]; } function safelyExtractBody(object, keepalive = false) { @@ -133351,15 +133351,15 @@ Content-Type: ${value.type || "application/octet-stream"}\r if (isUint8Array(body)) { yield body; } else { - const stream2 = body.stream; - if (util.isDisturbed(stream2)) { + const stream = body.stream; + if (util.isDisturbed(stream)) { throw new TypeError("The body has already been consumed."); } - if (stream2.locked) { + if (stream.locked) { throw new TypeError("The stream is locked."); } - stream2[kBodyUsed] = true; - yield* stream2; + stream[kBodyUsed] = true; + yield* stream; } } } @@ -133567,7 +133567,7 @@ var require_request5 = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path17, + path: path14, method, body, headers, @@ -133581,11 +133581,11 @@ var require_request5 = __commonJS({ throwOnError, expectContinue }, handler2) { - if (typeof path17 !== "string") { + if (typeof path14 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path17[0] !== "/" && !(path17.startsWith("http://") || path17.startsWith("https://")) && method !== "CONNECT") { + } else if (path14[0] !== "/" && !(path14.startsWith("http://") || path14.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path17) !== null) { + } else if (invalidPathRegex.exec(path14) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -133648,7 +133648,7 @@ var require_request5 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path17, query) : path17; + this.path = query ? util.buildURL(path14, query) : path14; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -134656,9 +134656,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path17 = search ? `${pathname}${search}` : pathname; + const path14 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path17; + this.opts.path = path14; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -135898,7 +135898,7 @@ var require_client3 = __commonJS({ writeH2(client, client[kHTTP2Session], request2); return; } - const { body, method, path: path17, host, upgrade, headers, blocking, reset } = request2; + const { body, method, path: path14, host, upgrade, headers, blocking, reset } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -135948,7 +135948,7 @@ var require_client3 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path17} HTTP/1.1\r + let header = `${method} ${path14} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -136011,7 +136011,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request2) { - const { body, method, path: path17, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { body, method, path: path14, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -136032,29 +136032,29 @@ upgrade: ${upgrade}\r if (request2.aborted) { return false; } - let stream2; + let stream; const h2State = client[kHTTP2SessionState]; headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]; headers[HTTP2_HEADER_METHOD] = method; if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (stream2.id && !stream2.pending) { - request2.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (stream.id && !stream.pending) { + request2.onUpgrade(null, null, stream); ++h2State.openStreams; } else { - stream2.once("ready", () => { - request2.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request2.onUpgrade(null, null, stream); ++h2State.openStreams; }); } - stream2.once("close", () => { + stream.once("close", () => { h2State.openStreams -= 1; if (h2State.openStreams === 0) session.unref(); }); return true; } - headers[HTTP2_HEADER_PATH] = path17; + headers[HTTP2_HEADER_PATH] = path14; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -136082,48 +136082,48 @@ upgrade: ${upgrade}\r const shouldEndStream = method === "GET" || method === "HEAD"; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++h2State.openStreams; - stream2.once("response", (headers2) => { + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; - if (request2.onHeaders(Number(statusCode), realHeaders, stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request2.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), "") === false) { + stream.pause(); } }); - stream2.once("end", () => { + stream.once("end", () => { request2.onComplete([]); }); - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request2.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); - stream2.once("close", () => { + stream.once("close", () => { h2State.openStreams -= 1; if (h2State.openStreams === 0) { session.unref(); } }); - stream2.once("error", function(err) { + stream.once("error", function(err) { if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1; - util.destroy(stream2, err); + util.destroy(stream, err); } }); - stream2.once("frameError", (type2, code) => { + stream.once("frameError", (type2, code) => { const err = new InformationalError(`HTTP/2: "frameError" received - type ${type2}, code ${code}`); errorRequest2(client, request2, err); if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1; - util.destroy(stream2, err); + util.destroy(stream, err); } }); return true; @@ -136132,10 +136132,10 @@ upgrade: ${upgrade}\r request2.onRequestSent(); } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, "buffer body must have content length"); - stream2.cork(); - stream2.write(body); - stream2.uncork(); - stream2.end(); + stream.cork(); + stream.write(body); + stream.uncork(); + stream.end(); request2.onBodySent(body); request2.onRequestSent(); } else if (util.isBlobLike(body)) { @@ -136144,7 +136144,7 @@ upgrade: ${upgrade}\r client, request: request2, contentLength, - h2stream: stream2, + h2stream: stream, expectsPayload, body: body.stream(), socket: client[kSocket], @@ -136157,7 +136157,7 @@ upgrade: ${upgrade}\r request: request2, contentLength, expectsPayload, - h2stream: stream2, + h2stream: stream, header: "", socket: client[kSocket] }); @@ -136170,7 +136170,7 @@ upgrade: ${upgrade}\r contentLength, expectsPayload, socket: client[kSocket], - h2stream: stream2, + h2stream: stream, header: "" }); } else if (util.isIterable(body)) { @@ -136181,7 +136181,7 @@ upgrade: ${upgrade}\r contentLength, expectsPayload, header: "", - h2stream: stream2, + h2stream: stream, socket: client[kSocket] }); } else { @@ -137111,7 +137111,7 @@ var require_readable5 = __commonJS({ "node_modules/undici/lib/api/readable.js"(exports2, module2) { "use strict"; var assert = require("assert"); - var { Readable: Readable2 } = require("stream"); + var { Readable } = require("stream"); var { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require_errors6(); var util = require_util24(); var { ReadableStreamFrom, toUSVString } = require_util24(); @@ -137123,7 +137123,7 @@ var require_readable5 = __commonJS({ var kContentType = /* @__PURE__ */ Symbol("kContentType"); var noop3 = () => { }; - module2.exports = class BodyReadable extends Readable2 { + module2.exports = class BodyReadable extends Readable { constructor({ resume, abort, @@ -137266,28 +137266,28 @@ var require_readable5 = __commonJS({ function isUnusable(self2) { return util.isDisturbed(self2) || isLocked(self2); } - async function consume(stream2, type2) { - if (isUnusable(stream2)) { + async function consume(stream, type2) { + if (isUnusable(stream)) { throw new TypeError("unusable"); } - assert(!stream2[kConsume]); + assert(!stream[kConsume]); return new Promise((resolve8, reject) => { - stream2[kConsume] = { + stream[kConsume] = { type: type2, - stream: stream2, + stream, resolve: resolve8, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - process.nextTick(consumeStart, stream2[kConsume]); + process.nextTick(consumeStart, stream[kConsume]); }); } function consumeStart(consume2) { @@ -137310,7 +137310,7 @@ var require_readable5 = __commonJS({ } } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve8, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve8, stream, length } = consume2; try { if (type2 === "text") { resolve8(toUSVString(Buffer.concat(body))); @@ -137328,11 +137328,11 @@ var require_readable5 = __commonJS({ if (!Blob2) { Blob2 = require("buffer").Blob; } - resolve8(new Blob2(body, { type: stream2[kContentType] })); + resolve8(new Blob2(body, { type: stream[kContentType] })); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -137454,7 +137454,7 @@ var require_abort_signal3 = __commonJS({ var require_api_request3 = __commonJS({ "node_modules/undici/lib/api/api-request.js"(exports2, module2) { "use strict"; - var Readable2 = require_readable5(); + var Readable = require_readable5(); var { InvalidArgumentError, RequestAbortedError @@ -137528,7 +137528,7 @@ var require_api_request3 = __commonJS({ } const parsedHeaders = responseHeaders === "raw" ? util.parseHeaders(rawHeaders) : headers; const contentType = parsedHeaders["content-type"]; - const body = new Readable2({ resume, abort, contentType, highWaterMark }); + const body = new Readable({ resume, abort, contentType, highWaterMark }); this.callback = null; this.res = body; if (callback !== null) { @@ -137756,10 +137756,10 @@ var require_api_stream3 = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { return new Promise((resolve8, reject) => { - stream2.call(this, opts, factory, (err, data) => { + stream.call(this, opts, factory, (err, data) => { return err ? reject(err) : resolve8(data); }); }); @@ -137774,7 +137774,7 @@ var require_api_stream3 = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -137783,7 +137783,7 @@ var require_api_pipeline3 = __commonJS({ "node_modules/undici/lib/api/api-pipeline.js"(exports2, module2) { "use strict"; var { - Readable: Readable2, + Readable, Duplex, PassThrough } = require("stream"); @@ -137797,7 +137797,7 @@ var require_api_pipeline3 = __commonJS({ var { addSignal, removeSignal } = require_abort_signal3(); var assert = require("assert"); var kResume = /* @__PURE__ */ Symbol("resume"); - var PipelineRequest = class extends Readable2 { + var PipelineRequest = class extends Readable { constructor() { super({ autoDestroy: true }); this[kResume] = null; @@ -137814,7 +137814,7 @@ var require_api_pipeline3 = __commonJS({ callback(err); } }; - var PipelineResponse = class extends Readable2 { + var PipelineResponse = class extends Readable { constructor(resume) { super({ autoDestroy: true }); this[kResume] = resume; @@ -138294,20 +138294,20 @@ var require_mock_utils3 = __commonJS({ } return true; } - function safeUrl(path17) { - if (typeof path17 !== "string") { - return path17; + function safeUrl(path14) { + if (typeof path14 !== "string") { + return path14; } - const pathSegments = path17.split("?"); + const pathSegments = path14.split("?"); if (pathSegments.length !== 2) { - return path17; + return path14; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path17, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path17); + function matchKey(mockDispatch2, { path: path14, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path14); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -138325,7 +138325,7 @@ var require_mock_utils3 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path17 }) => matchValue(safeUrl(path17), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path14 }) => matchValue(safeUrl(path14), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -138362,9 +138362,9 @@ var require_mock_utils3 = __commonJS({ } } function buildKey(opts) { - const { path: path17, method, body, headers, query } = opts; + const { path: path14, method, body, headers, query } = opts; return { - path: path17, + path: path14, method, body, headers, @@ -138813,10 +138813,10 @@ var require_pending_interceptors_formatter3 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path17, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path14, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path17, + Path: path14, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -140917,7 +140917,7 @@ var require_fetch3 = __commonJS({ } = require_constants21(); var { kHeadersList } = require_symbols11(); var EE = require("events"); - var { Readable: Readable2, pipeline } = require("stream"); + var { Readable, pipeline } = require("stream"); var { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require_util24(); var { dataURLProcessor, serializeAMimeType } = require_dataURL(); var { TransformStream: TransformStream2 } = require("stream/web"); @@ -141676,7 +141676,7 @@ var require_fetch3 = __commonJS({ if (!ReadableStream2) { ReadableStream2 = require("stream/web").ReadableStream; } - const stream2 = new ReadableStream2( + const stream = new ReadableStream2( { async start(controller) { fetchParams.controller.controller = controller; @@ -141695,7 +141695,7 @@ var require_fetch3 = __commonJS({ } } ); - response.body = { stream: stream2 }; + response.body = { stream }; fetchParams.controller.on("terminated", onAborted); fetchParams.controller.resume = async () => { while (true) { @@ -141726,7 +141726,7 @@ var require_fetch3 = __commonJS({ return; } fetchParams.controller.controller.enqueue(new Uint8Array(bytes)); - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -141738,13 +141738,13 @@ var require_fetch3 = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -141808,7 +141808,7 @@ var require_fetch3 = __commonJS({ headers[kHeadersList].append(key, val); } } - this.body = new Readable2({ read: resume }); + this.body = new Readable({ read: resume }); const decoders = []; const willFollow = request2.redirect === "follow" && location && redirectStatusSet.has(status); if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { @@ -142295,8 +142295,8 @@ var require_util27 = __commonJS({ fr[kState] = "loading"; fr[kResult] = null; fr[kError] = null; - const stream2 = blob.stream(); - const reader = stream2.getReader(); + const stream = blob.stream(); + const reader = stream.getReader(); const bytes = []; let chunkPromise = reader.read(); let isFirstChunk = true; @@ -142980,8 +142980,8 @@ var require_cache6 = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -143436,8 +143436,8 @@ var require_util29 = __commonJS({ } } } - function validateCookiePath(path17) { - for (const char of path17) { + function validateCookiePath(path14) { + for (const char of path14) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -145117,11 +145117,11 @@ var require_undici3 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path17 = opts.path; + let path14 = opts.path; if (!opts.path.startsWith("/")) { - path17 = `/${path17}`; + path14 = `/${path14}`; } - url2 = new URL(util.parseOrigin(url2).origin + path17); + url2 = new URL(util.parseOrigin(url2).origin + path14); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -145439,9 +145439,9 @@ var require_lib4 = __commonJS({ return this.request("HEAD", requestUrl, null, additionalHeaders || {}); }); } - sendStream(verb, requestUrl, stream2, additionalHeaders) { + sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter2(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream2, additionalHeaders); + return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** @@ -149971,7 +149971,7 @@ var require_traverse = __commonJS({ })(this.value); }; function walk(root, cb, immutable) { - var path17 = []; + var path14 = []; var parents = []; var alive = true; return (function walker(node_) { @@ -149980,11 +149980,11 @@ var require_traverse = __commonJS({ var state = { node, node_, - path: [].concat(path17), + path: [].concat(path14), parent: parents.slice(-1)[0], - key: path17.slice(-1)[0], - isRoot: path17.length === 0, - level: path17.length, + key: path14.slice(-1)[0], + isRoot: path14.length === 0, + level: path14.length, circular: null, update: function(x) { if (!state.isRoot) { @@ -150039,7 +150039,7 @@ var require_traverse = __commonJS({ parents.push(state); var keys = Object.keys(state.node); keys.forEach(function(key, i2) { - path17.push(key); + path14.push(key); if (modifiers.pre) modifiers.pre.call(state, state.node[key], key); var child = walker(state.node[key]); if (immutable && Object.hasOwnProperty.call(state.node, key)) { @@ -150048,7 +150048,7 @@ var require_traverse = __commonJS({ child.isLast = i2 == keys.length - 1; child.isFirst = i2 == 0; if (modifiers.post) modifiers.post.call(state, child); - path17.pop(); + path14.pop(); }); parents.pop(); } @@ -150625,23 +150625,23 @@ var require_binary = __commonJS({ return self2; } ; - var stream2 = Chainsaw.light(builder); - stream2.writable = true; + var stream = Chainsaw.light(builder); + stream.writable = true; var buffers = Buffers(); - stream2.write = function(buf) { + stream.write = function(buf) { buffers.push(buf); dispatch(); }; var vars = Vars(); var done = false, caughtEnd = false; - stream2.end = function() { + stream.end = function() { caughtEnd = true; }; - stream2.pipe = Stream.prototype.pipe; + stream.pipe = Stream.prototype.pipe; Object.getOwnPropertyNames(EventEmitter.prototype).forEach(function(name) { - stream2[name] = EventEmitter.prototype[name]; + stream[name] = EventEmitter.prototype[name]; }); - return stream2; + return stream; }; exports2.parse = function parse2(buffer) { var self2 = words(function(bytes, cb) { @@ -150857,20 +150857,20 @@ var require_matcher_stream = __commonJS({ var require_entry = __commonJS({ "node_modules/unzip-stream/lib/entry.js"(exports2, module2) { "use strict"; - var stream2 = require("stream"); + var stream = require("stream"); var inherits = require("util").inherits; function Entry() { if (!(this instanceof Entry)) { return new Entry(); } - stream2.PassThrough.call(this); + stream.PassThrough.call(this); this.path = null; this.type = null; this.isDirectory = false; } - inherits(Entry, stream2.PassThrough); + inherits(Entry, stream.PassThrough); Entry.prototype.autodrain = function() { - return this.pipe(new stream2.Transform({ transform: function(d, e, cb) { + return this.pipe(new stream.Transform({ transform: function(d, e, cb) { cb(); } })); }; @@ -150883,7 +150883,7 @@ var require_unzip_stream = __commonJS({ "node_modules/unzip-stream/lib/unzip-stream.js"(exports2, module2) { "use strict"; var binary2 = require_binary(); - var stream2 = require("stream"); + var stream = require("stream"); var util = require("util"); var zlib3 = require("zlib"); var MatcherStream = require_matcher_stream(); @@ -150917,7 +150917,7 @@ var require_unzip_stream = __commonJS({ if (!(this instanceof UnzipStream)) { return new UnzipStream(options); } - stream2.Transform.call(this); + stream.Transform.call(this); this.options = options || {}; this.data = new Buffer(""); this.state = states.STREAM_START; @@ -150925,7 +150925,7 @@ var require_unzip_stream = __commonJS({ this.parsedEntity = null; this.outStreamInfo = {}; } - util.inherits(UnzipStream, stream2.Transform); + util.inherits(UnzipStream, stream.Transform); UnzipStream.prototype.processDataChunk = function(chunk) { var requiredLength; switch (this.state) { @@ -151069,11 +151069,11 @@ var require_unzip_stream = __commonJS({ return requiredLength; case states.CENTRAL_DIRECTORY_FILE_HEADER_SUFFIX: var isUtf8 = (this.parsedEntity.flags & 2048) !== 0; - var path17 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); + var path14 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); var extraDataBuffer = chunk.slice(this.parsedEntity.fileNameLength, this.parsedEntity.fileNameLength + this.parsedEntity.extraFieldLength); var extra = this._readExtraFields(extraDataBuffer); if (extra && extra.parsed && extra.parsed.path && !isUtf8) { - path17 = extra.parsed.path; + path14 = extra.parsed.path; } this.parsedEntity.extra = extra.parsed; var isUnix = (this.parsedEntity.versionMadeBy & 65280) >> 8 === 3; @@ -151085,7 +151085,7 @@ var require_unzip_stream = __commonJS({ } if (this.options.debug) { const debugObj = Object.assign({}, this.parsedEntity, { - path: path17, + path: path14, flags: "0x" + this.parsedEntity.flags.toString(16), unixAttrs: unixAttrs && "0" + unixAttrs.toString(8), isSymlink, @@ -151179,7 +151179,7 @@ var require_unzip_stream = __commonJS({ }); this.outStreamInfo.stream = matcherStream; } else { - this.outStreamInfo.stream = new stream2.PassThrough(); + this.outStreamInfo.stream = new stream.PassThrough(); } var isEncrypted = vars.flags & 1 || vars.flags & 64; if (isEncrypted || !isVersionSupported) { @@ -151522,8 +151522,8 @@ var require_parser_stream = __commonJS({ // node_modules/mkdirp/index.js var require_mkdirp = __commonJS({ "node_modules/mkdirp/index.js"(exports2, module2) { - var path17 = require("path"); - var fs18 = require("fs"); + var path14 = require("path"); + var fs15 = require("fs"); var _0777 = parseInt("0777", 8); module2.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; function mkdirP(p, opts, f, made) { @@ -151534,7 +151534,7 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs18; + var xfs = opts.fs || fs15; if (mode === void 0) { mode = _0777; } @@ -151542,7 +151542,7 @@ var require_mkdirp = __commonJS({ var cb = f || /* istanbul ignore next */ function() { }; - p = path17.resolve(p); + p = path14.resolve(p); xfs.mkdir(p, mode, function(er) { if (!er) { made = made || p; @@ -151550,8 +151550,8 @@ var require_mkdirp = __commonJS({ } switch (er.code) { case "ENOENT": - if (path17.dirname(p) === p) return cb(er); - mkdirP(path17.dirname(p), opts, function(er2, made2) { + if (path14.dirname(p) === p) return cb(er); + mkdirP(path14.dirname(p), opts, function(er2, made2) { if (er2) cb(er2, made2); else mkdirP(p, opts, cb, made2); }); @@ -151573,19 +151573,19 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs18; + var xfs = opts.fs || fs15; if (mode === void 0) { mode = _0777; } if (!made) made = null; - p = path17.resolve(p); + p = path14.resolve(p); try { xfs.mkdirSync(p, mode); made = made || p; } catch (err0) { switch (err0.code) { case "ENOENT": - made = sync(path17.dirname(p), opts, made); + made = sync(path14.dirname(p), opts, made); sync(p, opts, made); break; // In the case of any other error, just see if there's a dir @@ -151610,8 +151610,8 @@ var require_mkdirp = __commonJS({ // node_modules/unzip-stream/lib/extract.js var require_extract2 = __commonJS({ "node_modules/unzip-stream/lib/extract.js"(exports2, module2) { - var fs18 = require("fs"); - var path17 = require("path"); + var fs15 = require("fs"); + var path14 = require("path"); var util = require("util"); var mkdirp = require_mkdirp(); var Transform = require("stream").Transform; @@ -151653,11 +151653,11 @@ var require_extract2 = __commonJS({ }; Extract.prototype._processEntry = function(entry) { var self2 = this; - var destPath = path17.join(this.opts.path, entry.path); - var directory = entry.isDirectory ? destPath : path17.dirname(destPath); + var destPath = path14.join(this.opts.path, entry.path); + var directory = entry.isDirectory ? destPath : path14.dirname(destPath); this.unfinishedEntries++; var writeFileFn = function() { - var pipedStream = fs18.createWriteStream(destPath); + var pipedStream = fs15.createWriteStream(destPath); pipedStream.on("close", function() { self2.unfinishedEntries--; self2._notifyAwaiter(); @@ -151765,7 +151765,7 @@ var require_download_artifact = __commonJS({ exports2.downloadArtifactInternal = exports2.downloadArtifactPublic = exports2.streamExtractExternal = void 0; var promises_1 = __importDefault2(require("fs/promises")); var crypto2 = __importStar2(require("crypto")); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var github3 = __importStar2(require_github2()); var core17 = __importStar2(require_core()); var httpClient = __importStar2(require_lib()); @@ -151781,10 +151781,10 @@ var require_download_artifact = __commonJS({ parsed.search = ""; return parsed.toString(); }; - function exists(path17) { + function exists(path14) { return __awaiter2(this, void 0, void 0, function* () { try { - yield promises_1.default.access(path17); + yield promises_1.default.access(path14); return true; } catch (error3) { if (error3.code === "ENOENT") { @@ -151826,7 +151826,7 @@ var require_download_artifact = __commonJS({ }; const timer = setTimeout(timerFn, opts.timeout); const hashStream = crypto2.createHash("sha256").setEncoding("hex"); - const passThrough = new stream2.PassThrough(); + const passThrough = new stream.PassThrough(); response.message.pipe(passThrough); passThrough.pipe(hashStream); const extractStream = passThrough; @@ -152016,12 +152016,12 @@ var require_dist_node11 = __commonJS({ octokit.log.debug("request", options); const start = Date.now(); const requestOptions = octokit.request.endpoint.parse(options); - const path17 = requestOptions.url.replace(options.baseUrl, ""); + const path14 = requestOptions.url.replace(options.baseUrl, ""); return request2(options).then((response) => { - octokit.log.info(`${requestOptions.method} ${path17} - ${response.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path14} - ${response.status} in ${Date.now() - start}ms`); return response; }).catch((error3) => { - octokit.log.info(`${requestOptions.method} ${path17} - ${error3.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path14} - ${error3.status} in ${Date.now() - start}ms`); throw error3; }); }); @@ -152768,11 +152768,11 @@ var require_command2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.issue = exports2.issueCommand = void 0; - var os4 = __importStar2(require("os")); + var os3 = __importStar2(require("os")); var utils_1 = require_utils12(); function issueCommand(command, properties, message) { const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os4.EOL); + process.stdout.write(cmd.toString() + os3.EOL); } exports2.issueCommand = issueCommand; function issue(name, message = "") { @@ -152855,18 +152855,18 @@ var require_file_command2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto2 = __importStar2(require("crypto")); - var fs18 = __importStar2(require("fs")); - var os4 = __importStar2(require("os")); + var fs15 = __importStar2(require("fs")); + var os3 = __importStar2(require("os")); var utils_1 = require_utils12(); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs18.existsSync(filePath)) { + if (!fs15.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { + fs15.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, { encoding: "utf8" }); } @@ -152880,7 +152880,7 @@ var require_file_command2 = __commonJS({ if (convertedValue.includes(delimiter)) { throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); } - return `${key}<<${delimiter}${os4.EOL}${convertedValue}${os4.EOL}${delimiter}`; + return `${key}<<${delimiter}${os3.EOL}${convertedValue}${os3.EOL}${delimiter}`; } exports2.prepareKeyValueMessage = prepareKeyValueMessage; } @@ -153211,9 +153211,9 @@ var require_lib5 = __commonJS({ return this.request("HEAD", requestUrl, null, additionalHeaders || {}); }); } - sendStream(verb, requestUrl, stream2, additionalHeaders) { + sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter2(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream2, additionalHeaders); + return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** @@ -154116,7 +154116,7 @@ var require_path_utils2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -154126,7 +154126,7 @@ var require_path_utils2 = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path17.sep); + return pth.replace(/[/\\]/g, path14.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -154189,12 +154189,12 @@ var require_io_util2 = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs18 = __importStar2(require("fs")); - var path17 = __importStar2(require("path")); - _a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs15 = __importStar2(require("fs")); + var path14 = __importStar2(require("path")); + _a = fs15.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs18.constants.O_RDONLY; + exports2.READONLY = fs15.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -154239,7 +154239,7 @@ var require_io_util2 = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path17.extname(filePath).toUpperCase(); + const upperExt = path14.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -154263,11 +154263,11 @@ var require_io_util2 = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path17.dirname(filePath); - const upperName = path17.basename(filePath).toUpperCase(); + const directory = path14.dirname(filePath); + const upperName = path14.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path17.join(directory, actualName); + filePath = path14.join(directory, actualName); break; } } @@ -154362,7 +154362,7 @@ var require_io2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util2()); function cp(source, dest, options = {}) { return __awaiter2(this, void 0, void 0, function* () { @@ -154371,7 +154371,7 @@ var require_io2 = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path17.join(dest, path17.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path14.join(dest, path14.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -154383,7 +154383,7 @@ var require_io2 = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path17.relative(source, newDest) === "") { + if (path14.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -154396,7 +154396,7 @@ var require_io2 = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path17.join(dest, path17.basename(source)); + dest = path14.join(dest, path14.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -154407,7 +154407,7 @@ var require_io2 = __commonJS({ } } } - yield mkdirP(path17.dirname(dest)); + yield mkdirP(path14.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -154470,7 +154470,7 @@ var require_io2 = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path17.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path14.delimiter)) { if (extension) { extensions.push(extension); } @@ -154483,12 +154483,12 @@ var require_io2 = __commonJS({ } return []; } - if (tool.includes(path17.sep)) { + if (tool.includes(path14.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path17.delimiter)) { + for (const p of process.env.PATH.split(path14.delimiter)) { if (p) { directories.push(p); } @@ -154496,7 +154496,7 @@ var require_io2 = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path17.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path14.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -154609,10 +154609,10 @@ var require_toolrunner2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.argStringToArray = exports2.ToolRunner = void 0; - var os4 = __importStar2(require("os")); + var os3 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); var io7 = __importStar2(require_io2()); var ioUtil = __importStar2(require_io_util2()); var timers_1 = require("timers"); @@ -154664,12 +154664,12 @@ var require_toolrunner2 = __commonJS({ _processLineBuffer(data, strBuffer, onLine) { try { let s = strBuffer + data.toString(); - let n = s.indexOf(os4.EOL); + let n = s.indexOf(os3.EOL); while (n > -1) { const line = s.substring(0, n); onLine(line); - s = s.substring(n + os4.EOL.length); - n = s.indexOf(os4.EOL); + s = s.substring(n + os3.EOL.length); + n = s.indexOf(os3.EOL); } return s; } catch (err) { @@ -154827,7 +154827,7 @@ var require_toolrunner2 = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path17.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path14.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -154838,7 +154838,7 @@ var require_toolrunner2 = __commonJS({ } const optionsNonNull = this._cloneExecOptions(this.options); if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os4.EOL); + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os3.EOL); } const state = new ExecState(optionsNonNull, this.toolPath); state.on("debug", (message) => { @@ -155326,8 +155326,8 @@ var require_core3 = __commonJS({ var command_1 = require_command2(); var file_command_1 = require_file_command2(); var utils_1 = require_utils12(); - var os4 = __importStar2(require("os")); - var path17 = __importStar2(require("path")); + var os3 = __importStar2(require("os")); + var path14 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils2(); var ExitCode; (function(ExitCode2) { @@ -155355,7 +155355,7 @@ var require_core3 = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path17.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path14.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -155394,7 +155394,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); if (filePath) { return (0, file_command_1.issueFileCommand)("OUTPUT", (0, file_command_1.prepareKeyValueMessage)(name, value)); } - process.stdout.write(os4.EOL); + process.stdout.write(os3.EOL); (0, command_1.issueCommand)("set-output", { name }, (0, utils_1.toCommandValue)(value)); } exports2.setOutput = setOutput; @@ -155428,7 +155428,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); } exports2.notice = notice; function info7(message) { - process.stdout.write(message + os4.EOL); + process.stdout.write(message + os3.EOL); } exports2.info = info7; function startGroup4(name) { @@ -155531,13 +155531,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.checkArtifactName = checkArtifactName; - function checkArtifactFilePath(path17) { - if (!path17) { - throw new Error(`Artifact path: ${path17}, is incorrectly provided`); + function checkArtifactFilePath(path14) { + if (!path14) { + throw new Error(`Artifact path: ${path14}, is incorrectly provided`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path17.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path17}. Contains the following character: ${errorMessageForCharacter} + if (path14.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path14}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -155583,25 +155583,25 @@ var require_upload_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadSpecification = void 0; - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var core_1 = require_core3(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation2(); function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { const specifications = []; - if (!fs18.existsSync(rootDirectory)) { + if (!fs15.existsSync(rootDirectory)) { throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs18.statSync(rootDirectory).isDirectory()) { + if (!fs15.statSync(rootDirectory).isDirectory()) { throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); } rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of artifactFiles) { - if (!fs18.existsSync(file)) { + if (!fs15.existsSync(file)) { throw new Error(`File ${file} does not exist`); } - if (!fs18.statSync(file).isDirectory()) { + if (!fs15.statSync(file).isDirectory()) { file = (0, path_1.normalize)(file); file = (0, path_1.resolve)(file); if (!file.startsWith(rootDirectory)) { @@ -155626,29 +155626,29 @@ var require_upload_specification = __commonJS({ // node_modules/tmp/lib/tmp.js var require_tmp = __commonJS({ "node_modules/tmp/lib/tmp.js"(exports2, module2) { - var fs18 = require("fs"); - var os4 = require("os"); - var path17 = require("path"); + var fs15 = require("fs"); + var os3 = require("os"); + var path14 = require("path"); var crypto2 = require("crypto"); - var _c = { fs: fs18.constants, os: os4.constants }; + var _c = { fs: fs15.constants, os: os3.constants }; var RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; var TEMPLATE_PATTERN = /XXXXXX/; var DEFAULT_TRIES = 3; var CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR); - var IS_WIN32 = os4.platform() === "win32"; + var IS_WIN32 = os3.platform() === "win32"; var EBADF = _c.EBADF || _c.os.errno.EBADF; var ENOENT = _c.ENOENT || _c.os.errno.ENOENT; var DIR_MODE = 448; var FILE_MODE = 384; var EXIT = "exit"; var _removeObjects = []; - var FN_RMDIR_SYNC = fs18.rmdirSync.bind(fs18); + var FN_RMDIR_SYNC = fs15.rmdirSync.bind(fs15); var _gracefulCleanup = false; function rimraf(dirPath, callback) { - return fs18.rm(dirPath, { recursive: true }, callback); + return fs15.rm(dirPath, { recursive: true }, callback); } function FN_RIMRAF_SYNC(dirPath) { - return fs18.rmSync(dirPath, { recursive: true }); + return fs15.rmSync(dirPath, { recursive: true }); } function tmpName(options, callback) { const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; @@ -155658,7 +155658,7 @@ var require_tmp = __commonJS({ (function _getUniqueName() { try { const name = _generateTmpName(sanitizedOptions); - fs18.stat(name, function(err2) { + fs15.stat(name, function(err2) { if (!err2) { if (tries-- > 0) return _getUniqueName(); return cb(new Error("Could not get a unique tmp filename, max tries reached " + name)); @@ -155678,7 +155678,7 @@ var require_tmp = __commonJS({ do { const name = _generateTmpName(sanitizedOptions); try { - fs18.statSync(name); + fs15.statSync(name); } catch (e) { return name; } @@ -155689,10 +155689,10 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs18.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { + fs15.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { if (err2) return cb(err2); if (opts.discardDescriptor) { - return fs18.close(fd, function _discardCallback(possibleErr) { + return fs15.close(fd, function _discardCallback(possibleErr) { return cb(possibleErr, name, void 0, _prepareTmpFileRemoveCallback(name, -1, opts, false)); }); } else { @@ -155706,9 +155706,9 @@ var require_tmp = __commonJS({ const args = _parseArguments(options), opts = args[0]; const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; const name = tmpNameSync(opts); - let fd = fs18.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + let fd = fs15.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); if (opts.discardDescriptor) { - fs18.closeSync(fd); + fs15.closeSync(fd); fd = void 0; } return { @@ -155721,7 +155721,7 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs18.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { + fs15.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { if (err2) return cb(err2); cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); }); @@ -155730,7 +155730,7 @@ var require_tmp = __commonJS({ function dirSync(options) { const args = _parseArguments(options), opts = args[0]; const name = tmpNameSync(opts); - fs18.mkdirSync(name, opts.mode || DIR_MODE); + fs15.mkdirSync(name, opts.mode || DIR_MODE); return { name, removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) @@ -155744,20 +155744,20 @@ var require_tmp = __commonJS({ next(); }; if (0 <= fdPath[0]) - fs18.close(fdPath[0], function() { - fs18.unlink(fdPath[1], _handler); + fs15.close(fdPath[0], function() { + fs15.unlink(fdPath[1], _handler); }); - else fs18.unlink(fdPath[1], _handler); + else fs15.unlink(fdPath[1], _handler); } function _removeFileSync(fdPath) { let rethrownException = null; try { - if (0 <= fdPath[0]) fs18.closeSync(fdPath[0]); + if (0 <= fdPath[0]) fs15.closeSync(fdPath[0]); } catch (e) { if (!_isEBADF(e) && !_isENOENT(e)) throw e; } finally { try { - fs18.unlinkSync(fdPath[1]); + fs15.unlinkSync(fdPath[1]); } catch (e) { if (!_isENOENT(e)) rethrownException = e; } @@ -155773,7 +155773,7 @@ var require_tmp = __commonJS({ return sync ? removeCallbackSync : removeCallback; } function _prepareTmpDirRemoveCallback(name, opts, sync) { - const removeFunction = opts.unsafeCleanup ? rimraf : fs18.rmdir.bind(fs18); + const removeFunction = opts.unsafeCleanup ? rimraf : fs15.rmdir.bind(fs15); const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); @@ -155835,35 +155835,35 @@ var require_tmp = __commonJS({ return [actualOptions, callback]; } function _resolvePath(name, tmpDir, cb) { - const pathToResolve = path17.isAbsolute(name) ? name : path17.join(tmpDir, name); - fs18.stat(pathToResolve, function(err) { + const pathToResolve = path14.isAbsolute(name) ? name : path14.join(tmpDir, name); + fs15.stat(pathToResolve, function(err) { if (err) { - fs18.realpath(path17.dirname(pathToResolve), function(err2, parentDir) { + fs15.realpath(path14.dirname(pathToResolve), function(err2, parentDir) { if (err2) return cb(err2); - cb(null, path17.join(parentDir, path17.basename(pathToResolve))); + cb(null, path14.join(parentDir, path14.basename(pathToResolve))); }); } else { - fs18.realpath(path17, cb); + fs15.realpath(path14, cb); } }); } function _resolvePathSync(name, tmpDir) { - const pathToResolve = path17.isAbsolute(name) ? name : path17.join(tmpDir, name); + const pathToResolve = path14.isAbsolute(name) ? name : path14.join(tmpDir, name); try { - fs18.statSync(pathToResolve); - return fs18.realpathSync(pathToResolve); + fs15.statSync(pathToResolve); + return fs15.realpathSync(pathToResolve); } catch (_err) { - const parentDir = fs18.realpathSync(path17.dirname(pathToResolve)); - return path17.join(parentDir, path17.basename(pathToResolve)); + const parentDir = fs15.realpathSync(path14.dirname(pathToResolve)); + return path14.join(parentDir, path14.basename(pathToResolve)); } } function _generateTmpName(opts) { const tmpDir = opts.tmpdir; if (!_isUndefined(opts.name)) { - return path17.join(tmpDir, opts.dir, opts.name); + return path14.join(tmpDir, opts.dir, opts.name); } if (!_isUndefined(opts.template)) { - return path17.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + return path14.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); } const name = [ opts.prefix ? opts.prefix : "tmp", @@ -155873,13 +155873,13 @@ var require_tmp = __commonJS({ _randomChars(12), opts.postfix ? "-" + opts.postfix : "" ].join(""); - return path17.join(tmpDir, opts.dir, name); + return path14.join(tmpDir, opts.dir, name); } function _assertOptionsBase(options) { if (!_isUndefined(options.name)) { const name = options.name; - if (path17.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); - const basename2 = path17.basename(name); + if (path14.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); + const basename2 = path14.basename(name); if (basename2 === ".." || basename2 === "." || basename2 !== name) throw new Error(`name option must not contain a path, found "${name}".`); } @@ -155901,7 +155901,7 @@ var require_tmp = __commonJS({ if (_isUndefined(name)) return cb(null); _resolvePath(name, tmpDir, function(err, resolvedPath) { if (err) return cb(err); - const relativePath = path17.relative(tmpDir, resolvedPath); + const relativePath = path14.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { return cb(new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`)); } @@ -155911,7 +155911,7 @@ var require_tmp = __commonJS({ function _getRelativePathSync(option, name, tmpDir) { if (_isUndefined(name)) return; const resolvedPath = _resolvePathSync(name, tmpDir); - const relativePath = path17.relative(tmpDir, resolvedPath); + const relativePath = path14.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { throw new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`); } @@ -155958,10 +155958,10 @@ var require_tmp = __commonJS({ _gracefulCleanup = true; } function _getTmpDir(options, cb) { - return fs18.realpath(options && options.tmpdir || os4.tmpdir(), cb); + return fs15.realpath(options && options.tmpdir || os3.tmpdir(), cb); } function _getTmpDirSync(options) { - return fs18.realpathSync(options && options.tmpdir || os4.tmpdir()); + return fs15.realpathSync(options && options.tmpdir || os3.tmpdir()); } process.addListener(EXIT, _garbageCollector); Object.defineProperty(module2.exports, "tmpdir", { @@ -155991,14 +155991,14 @@ var require_tmp_promise = __commonJS({ var fileWithOptions = promisify( (options, cb) => tmp.file( options, - (err, path17, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path17, fd, cleanup: promisify(cleanup) }) + (err, path14, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path14, fd, cleanup: promisify(cleanup) }) ) ); module2.exports.file = async (options) => fileWithOptions(options); module2.exports.withFile = async function withFile(fn, options) { - const { path: path17, fd, cleanup } = await module2.exports.file(options); + const { path: path14, fd, cleanup } = await module2.exports.file(options); try { - return await fn({ path: path17, fd }); + return await fn({ path: path14, fd }); } finally { await cleanup(); } @@ -156007,14 +156007,14 @@ var require_tmp_promise = __commonJS({ var dirWithOptions = promisify( (options, cb) => tmp.dir( options, - (err, path17, cleanup) => err ? cb(err) : cb(void 0, { path: path17, cleanup: promisify(cleanup) }) + (err, path14, cleanup) => err ? cb(err) : cb(void 0, { path: path14, cleanup: promisify(cleanup) }) ) ); module2.exports.dir = async (options) => dirWithOptions(options); module2.exports.withDir = async function withDir(fn, options) { - const { path: path17, cleanup } = await module2.exports.dir(options); + const { path: path14, cleanup } = await module2.exports.dir(options); try { - return await fn({ path: path17 }); + return await fn({ path: path14 }); } finally { await cleanup(); } @@ -156637,12 +156637,12 @@ Header Information: ${JSON.stringify(response.message.headers, void 0, 2)} }); } exports2.sleep = sleep; - function digestForStream(stream2) { + function digestForStream(stream) { return __awaiter2(this, void 0, void 0, function* () { return new Promise((resolve8, reject) => { const crc64 = new crc64_1.default(); const md5 = crypto_1.default.createHash("md5"); - stream2.on("data", (data) => { + stream.on("data", (data) => { crc64.update(data); md5.update(data); }).on("end", () => resolve8({ @@ -156815,10 +156815,10 @@ var require_upload_gzip = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createGZipFileInBuffer = exports2.createGZipFileOnDisk = void 0; - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var zlib3 = __importStar2(require("zlib")); var util_1 = require("util"); - var stat = (0, util_1.promisify)(fs18.stat); + var stat = (0, util_1.promisify)(fs15.stat); var gzipExemptFileExtensions = [ ".gz", ".gzip", @@ -156851,9 +156851,9 @@ var require_upload_gzip = __commonJS({ } } return new Promise((resolve8, reject) => { - const inputStream = fs18.createReadStream(originalFilePath); + const inputStream = fs15.createReadStream(originalFilePath); const gzip = zlib3.createGzip(); - const outputStream = fs18.createWriteStream(tempFilePath); + const outputStream = fs15.createWriteStream(tempFilePath); inputStream.pipe(gzip).pipe(outputStream); outputStream.on("finish", () => __awaiter2(this, void 0, void 0, function* () { const size = (yield stat(tempFilePath)).size; @@ -156871,7 +156871,7 @@ var require_upload_gzip = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { return new Promise((resolve8) => __awaiter2(this, void 0, void 0, function* () { var _a, e_1, _b, _c; - const inputStream = fs18.createReadStream(originalFilePath); + const inputStream = fs15.createReadStream(originalFilePath); const gzip = zlib3.createGzip(); inputStream.pipe(gzip); const chunks = []; @@ -157080,10 +157080,10 @@ var require_upload_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var core17 = __importStar2(require_core3()); var tmp = __importStar2(require_tmp_promise()); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var utils_1 = require_utils13(); var config_variables_1 = require_config_variables(); var util_1 = require("util"); @@ -157094,7 +157094,7 @@ var require_upload_http_client = __commonJS({ var http_manager_1 = require_http_manager(); var upload_gzip_1 = require_upload_gzip(); var requestUtils_1 = require_requestUtils2(); - var stat = (0, util_1.promisify)(fs18.stat); + var stat = (0, util_1.promisify)(fs15.stat); var UploadHttpClient = class { constructor() { this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), "@actions/artifact-upload"); @@ -157231,13 +157231,13 @@ var require_upload_http_client = __commonJS({ let openUploadStream; if (totalFileSize < buffer.byteLength) { core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - openUploadStream = () => fs18.createReadStream(parameters.file); + openUploadStream = () => fs15.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { core17.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { - const passThrough = new stream2.PassThrough(); + const passThrough = new stream.PassThrough(); passThrough.end(buffer); return passThrough; }; @@ -157277,7 +157277,7 @@ var require_upload_http_client = __commonJS({ failedChunkSizes += chunkSize; continue; } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs18.createReadStream(uploadFilePath, { + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs15.createReadStream(uploadFilePath, { start: startChunkIndex, end: endChunkIndex, autoClose: false @@ -157472,7 +157472,7 @@ var require_download_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; - var fs18 = __importStar2(require("fs")); + var fs15 = __importStar2(require("fs")); var core17 = __importStar2(require_core3()); var zlib3 = __importStar2(require("zlib")); var utils_1 = require_utils13(); @@ -157563,7 +157563,7 @@ var require_download_http_client = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { let retryCount = 0; const retryLimit = (0, config_variables_1.getRetryLimit)(); - let destinationStream = fs18.createWriteStream(downloadPath); + let destinationStream = fs15.createWriteStream(downloadPath); const headers = (0, utils_1.getDownloadHeaders)("application/json", true, true); const makeDownloadRequest = () => __awaiter2(this, void 0, void 0, function* () { const client = this.downloadHttpManager.getClient(httpClientIndex); @@ -157605,7 +157605,7 @@ var require_download_http_client = __commonJS({ } }); yield (0, utils_1.rmFile)(fileDownloadPath); - destinationStream = fs18.createWriteStream(fileDownloadPath); + destinationStream = fs15.createWriteStream(fileDownloadPath); }); while (retryCount <= retryLimit) { let response; @@ -157722,21 +157722,21 @@ var require_download_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadSpecification = void 0; - var path17 = __importStar2(require("path")); + var path14 = __importStar2(require("path")); function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { const directories = /* @__PURE__ */ new Set(); const specifications = { - rootDownloadLocation: includeRootDirectory ? path17.join(downloadPath, artifactName) : downloadPath, + rootDownloadLocation: includeRootDirectory ? path14.join(downloadPath, artifactName) : downloadPath, directoryStructure: [], emptyFilesToCreate: [], filesToDownload: [] }; for (const entry of artifactEntries) { if (entry.path.startsWith(`${artifactName}/`) || entry.path.startsWith(`${artifactName}\\`)) { - const normalizedPathEntry = path17.normalize(entry.path); - const filePath = path17.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); + const normalizedPathEntry = path14.normalize(entry.path); + const filePath = path14.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); if (entry.itemType === "file") { - directories.add(path17.dirname(filePath)); + directories.add(path14.dirname(filePath)); if (entry.fileLength === 0) { specifications.emptyFilesToCreate.push(filePath); } else { @@ -157878,7 +157878,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz return uploadResponse; }); } - downloadArtifact(name, path17, options) { + downloadArtifact(name, path14, options) { return __awaiter2(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const artifacts = yield downloadHttpClient.listArtifacts(); @@ -157892,12 +157892,12 @@ Note: The size of downloaded zips can differ significantly from the reported siz throw new Error(`Unable to find an artifact with the name: ${name}`); } const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path17) { - path17 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path14) { + path14 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path17 = (0, path_1.normalize)(path17); - path17 = (0, path_1.resolve)(path17); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path17, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + path14 = (0, path_1.normalize)(path14); + path14 = (0, path_1.resolve)(path14); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path14, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { @@ -157912,7 +157912,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz }; }); } - downloadAllArtifacts(path17) { + downloadAllArtifacts(path14) { return __awaiter2(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const response = []; @@ -157921,18 +157921,18 @@ Note: The size of downloaded zips can differ significantly from the reported siz core17.info("Unable to find any artifacts for the associated workflow"); return response; } - if (!path17) { - path17 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path14) { + path14 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path17 = (0, path_1.normalize)(path17); - path17 = (0, path_1.resolve)(path17); + path14 = (0, path_1.normalize)(path14); + path14 = (0, path_1.resolve)(path14); let downloadedArtifacts = 0; while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path17, true); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path14, true); if (downloadSpecification.filesToDownload.length === 0) { core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { @@ -160889,21 +160889,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs18 = options.fs || await import("node:fs/promises"); + const fs15 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs15.lstat(itemPath, { bigint: true }) : await fs15.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs15.readdir(itemPath) : await fs15.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -163531,7 +163531,6 @@ var minimumVersion = "3.14"; // src/util.ts var BASE_DATABASE_OIDS_FILE_NAME = "base-database-oids.json"; -var BROKEN_VERSIONS = ["0.0.0-20211207"]; var GITHUB_DOTCOM_URL = "https://github.com"; var MINIMUM_CGROUP_MEMORY_LIMIT_BYTES = 1024 * 1024; function getExtraOptionsEnvParam() { @@ -163712,9 +163711,6 @@ async function delay(milliseconds, opts) { } }); } -function isGoodVersion(versionSpec) { - return !BROKEN_VERSIONS.includes(versionSpec); -} function isInTestMode() { return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true"; } @@ -163802,27 +163798,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) { function cloneObject(obj) { return JSON.parse(JSON.stringify(obj)); } -async function cleanUpPath(file, name, logger) { - logger.debug(`Cleaning up ${name}.`); - try { - await fs.promises.rm(file, { - force: true, - recursive: true - }); - } catch (e) { - logger.warning(`Failed to clean up ${name}: ${e}.`); - } -} -async function isBinaryAccessible(binary2, logger) { - try { - await io.which(binary2, true); - logger.debug(`Found ${binary2}.`); - return true; - } catch (e) { - logger.debug(`Could not find ${binary2}: ${e}`); - return false; - } -} async function asyncSome(array, predicate) { const results = await Promise.all(array.map(predicate)); return results.some((result) => result); @@ -163851,15 +163826,6 @@ function getActionVersion() { function getWorkflowEventName() { return getRequiredEnvParam("GITHUB_EVENT_NAME"); } -function isRunningLocalAction() { - const relativeScriptPath = getRelativeScriptPath(); - return relativeScriptPath.startsWith("..") || path2.isAbsolute(relativeScriptPath); -} -function getRelativeScriptPath() { - const runnerTemp = getRequiredEnvParam("RUNNER_TEMP"); - const actionsDirectory = path2.join(path2.dirname(runnerTemp), "_actions"); - return path2.relative(actionsDirectory, __filename); -} function getWorkflowEvent() { const eventJsonFile = getRequiredEnvParam("GITHUB_EVENT_PATH"); try { @@ -164179,14 +164145,6 @@ function getApiDetails() { function getApiClient() { return createApiClientWithDetails(getApiDetails()); } -function getAuthorizationHeaderFor(logger, apiDetails, url2) { - if (url2.startsWith(`${apiDetails.url}/`) || apiDetails.apiURL && url2.startsWith(`${apiDetails.apiURL}/`)) { - logger.debug(`Providing an authorization token.`); - return `token ${apiDetails.auth}`; - } - logger.debug(`Not using an authorization token.`); - return void 0; -} var cachedGitHubVersion = void 0; async function getGitHubVersionFromApi(apiClient, apiDetails) { if (parseGitHubUrl(apiDetails.url) === GITHUB_DOTCOM_URL) { @@ -164310,8 +164268,8 @@ function wrapApiConfigurationError(e) { var core6 = __toESM(require_core()); // src/codeql.ts -var fs10 = __toESM(require("fs")); -var path10 = __toESM(require("path")); +var fs7 = __toESM(require("fs")); +var path7 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -164533,7 +164491,7 @@ function getCliConfigCategoryIfExists(cliError) { } function isUnsupportedPlatform() { return !SUPPORTED_PLATFORMS.some( - ([platform, arch2]) => platform === process.platform && arch2 === process.arch + ([platform, arch]) => platform === process.platform && arch === process.arch ); } function getUnsupportedPlatformError(cliError) { @@ -164559,7 +164517,7 @@ function wrapCliConfigurationError(cliError) { // src/config-utils.ts var fs6 = __toESM(require("fs")); -var path7 = __toESM(require("path")); +var path6 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -164625,10 +164583,6 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); -// src/diagnostics.ts -var import_fs = require("fs"); -var import_path = __toESM(require("path")); - // src/logging.ts var core7 = __toESM(require_core()); function getActionsLogger() { @@ -164650,82 +164604,14 @@ function withGroup(groupName, f) { core7.endGroup(); } } -function formatDuration(durationMs) { - if (durationMs < 1e3) { - return `${durationMs}ms`; - } - if (durationMs < 60 * 1e3) { - return `${(durationMs / 1e3).toFixed(1)}s`; - } - const minutes = Math.floor(durationMs / (60 * 1e3)); - const seconds = Math.floor(durationMs % (60 * 1e3) / 1e3); - return `${minutes}m${seconds}s`; -} - -// src/diagnostics.ts -var unwrittenDiagnostics = []; -var unwrittenDefaultLanguageDiagnostics = []; -function makeDiagnostic(id, name, data = void 0) { - return { - ...data, - timestamp: data?.timestamp ?? (/* @__PURE__ */ new Date()).toISOString(), - source: { ...data?.source, id, name } - }; -} -function addDiagnostic(config, language, diagnostic) { - const logger = getActionsLogger(); - const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; - if ((0, import_fs.existsSync)(databasePath)) { - writeDiagnostic(config, language, diagnostic); - } else { - logger.debug( - `Writing a diagnostic for ${language}, but the database at ${databasePath} does not exist yet.` - ); - unwrittenDiagnostics.push({ diagnostic, language }); - } -} -function addNoLanguageDiagnostic(config, diagnostic) { - if (config !== void 0) { - addDiagnostic( - config, - // Arbitrarily choose the first language. We could also choose all languages, but that - // increases the risk of misinterpreting the data. - config.languages[0], - diagnostic - ); - } else { - unwrittenDefaultLanguageDiagnostics.push(diagnostic); - } -} -function writeDiagnostic(config, language, diagnostic) { - const logger = getActionsLogger(); - const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; - const diagnosticsPath = import_path.default.resolve( - databasePath, - "diagnostic", - "codeql-action" - ); - try { - (0, import_fs.mkdirSync)(diagnosticsPath, { recursive: true }); - const jsonPath = import_path.default.resolve( - diagnosticsPath, - // Remove colons from the timestamp as these are not allowed in Windows filenames. - `codeql-action-${diagnostic.timestamp.replaceAll(":", "")}.json` - ); - (0, import_fs.writeFileSync)(jsonPath, JSON.stringify(diagnostic)); - } catch (err) { - logger.warning(`Unable to write diagnostic message to database: ${err}`); - logger.debug(JSON.stringify(diagnostic)); - } -} // src/diff-informed-analysis-utils.ts var fs5 = __toESM(require("fs")); -var path6 = __toESM(require("path")); +var path5 = __toESM(require("path")); // src/feature-flags.ts var fs4 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var path4 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json @@ -164734,7 +164620,7 @@ var cliVersion = "2.24.2"; // src/overlay-database-utils.ts var fs3 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache5()); // src/git-utils.ts @@ -164862,8 +164748,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path17 = decodeGitFilePath(match[2]); - fileOidMap[path17] = oid; + const path14 = decodeGitFilePath(match[2]); + fileOidMap[path14] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -164969,7 +164855,7 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); - const overlayChangesFile = path4.join( + const overlayChangesFile = path3.join( getTemporaryDirectory(), "overlay-changes.json" ); @@ -165007,7 +164893,6 @@ function isSafeArtifactUpload(codeQlVersion) { // src/feature-flags.ts var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_"; var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled"; -var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0"; var featureConfig = { ["allow_toolcache_input" /* AllowToolcacheInput */]: { defaultValue: false, @@ -165345,7 +165230,7 @@ var Features = class extends OfflineFeatures { super(logger); this.gitHubFeatureFlags = new GitHubFeatureFlags( repositoryNwo, - path5.join(tempDir, FEATURE_FLAGS_FILE_NAME), + path4.join(tempDir, FEATURE_FLAGS_FILE_NAME), logger ); } @@ -165562,7 +165447,7 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) { // src/diff-informed-analysis-utils.ts function getDiffRangesJsonFilePath() { - return path6.join(getTemporaryDirectory(), "pr-diff-range.json"); + return path5.join(getTemporaryDirectory(), "pr-diff-range.json"); } function readDiffRangesJsonFile(logger) { const jsonFilePath = getDiffRangesJsonFilePath(); @@ -165610,7 +165495,7 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path7.join(tempDir, "config"); + return path6.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); @@ -165656,949 +165541,23 @@ function isCodeScanningEnabled(config) { } // src/setup-codeql.ts -var fs9 = __toESM(require("fs")); -var path9 = __toESM(require("path")); var toolcache3 = __toESM(require_tool_cache()); var import_fast_deep_equal = __toESM(require_fast_deep_equal()); var semver8 = __toESM(require_semver2()); -// node_modules/uuid/dist-node/stringify.js -var byteToHex = []; -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 256).toString(16).slice(1)); -} -function unsafeStringify(arr, offset = 0) { - return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); -} - -// node_modules/uuid/dist-node/rng.js -var import_node_crypto = require("node:crypto"); -var rnds8Pool = new Uint8Array(256); -var poolPtr = rnds8Pool.length; -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - (0, import_node_crypto.randomFillSync)(rnds8Pool); - poolPtr = 0; - } - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -// node_modules/uuid/dist-node/native.js -var import_node_crypto2 = require("node:crypto"); -var native_default = { randomUUID: import_node_crypto2.randomUUID }; - -// node_modules/uuid/dist-node/v4.js -function _v4(options, buf, offset) { - options = options || {}; - const rnds = options.random ?? options.rng?.() ?? rng(); - if (rnds.length < 16) { - throw new Error("Random bytes length must be >= 16"); - } - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - offset = offset || 0; - if (offset < 0 || offset + 16 > buf.length) { - throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`); - } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - return buf; - } - return unsafeStringify(rnds); -} -function v4(options, buf, offset) { - if (native_default.randomUUID && !buf && !options) { - return native_default.randomUUID(); - } - return _v4(options, buf, offset); -} -var v4_default = v4; - // src/tar.ts -var import_child_process = require("child_process"); -var fs7 = __toESM(require("fs")); -var stream = __toESM(require("stream")); var import_toolrunner = __toESM(require_toolrunner()); var io4 = __toESM(require_io()); var toolcache = __toESM(require_tool_cache()); var semver6 = __toESM(require_semver2()); -var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3"; -var MIN_REQUIRED_GNU_TAR_VERSION = "1.31"; -async function getTarVersion() { - const tar = await io4.which("tar", true); - let stdout = ""; - const exitCode = await new import_toolrunner.ToolRunner(tar, ["--version"], { - listeners: { - stdout: (data) => { - stdout += data.toString(); - } - } - }).exec(); - if (exitCode !== 0) { - throw new Error("Failed to call tar --version"); - } - if (stdout.includes("GNU tar")) { - const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); - } - return { type: "gnu", version: match[1] }; - } else if (stdout.includes("bsdtar")) { - const match = stdout.match(/bsdtar ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); - } - return { type: "bsd", version: match[1] }; - } else { - throw new Error("Unknown tar version"); - } -} -async function isZstdAvailable(logger) { - const foundZstdBinary = await isBinaryAccessible("zstd", logger); - try { - const tarVersion = await getTarVersion(); - const { type: type2, version } = tarVersion; - logger.info(`Found ${type2} tar version ${version}.`); - switch (type2) { - case "gnu": - return { - available: foundZstdBinary && // GNU tar only uses major and minor version numbers - semver6.gte( - semver6.coerce(version), - semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION) - ), - foundZstdBinary, - version: tarVersion - }; - case "bsd": - return { - available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain - // a patch version number. - semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION), - foundZstdBinary, - version: tarVersion - }; - default: - assertNever(type2); - } - } catch (e) { - logger.warning( - `Failed to determine tar version, therefore will assume zstd is not available. The underlying error was: ${e}` - ); - return { available: false, foundZstdBinary }; - } -} -async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs7.mkdirSync(dest, { recursive: true }); - switch (compressionMethod) { - case "gzip": - return await toolcache.extractTar(tarPath, dest); - case "zstd": { - if (!tarVersion) { - throw new Error( - "Could not determine tar version, which is required to extract a Zstandard archive." - ); - } - await extractTarZst(tarPath, dest, tarVersion, logger); - return dest; - } - } -} -async function extractTarZst(tar, dest, tarVersion, logger) { - logger.debug( - `Extracting to ${dest}.${tar instanceof stream.Readable ? ` Input stream has high water mark ${tar.readableHighWaterMark}.` : ""}` - ); - try { - const args = ["-x", "--zstd", "--ignore-zeros"]; - if (tarVersion.type === "gnu") { - args.push("--warning=no-unknown-keyword"); - args.push("--overwrite"); - } - args.push("-f", tar instanceof stream.Readable ? "-" : tar, "-C", dest); - process.stdout.write(`[command]tar ${args.join(" ")} -`); - await new Promise((resolve8, reject) => { - const tarProcess = (0, import_child_process.spawn)("tar", args, { stdio: "pipe" }); - let stdout = ""; - tarProcess.stdout?.on("data", (data) => { - stdout += data.toString(); - process.stdout.write(data); - }); - let stderr = ""; - tarProcess.stderr?.on("data", (data) => { - stderr += data.toString(); - process.stdout.write(data); - }); - tarProcess.on("error", (err) => { - reject(new Error(`Error while extracting tar: ${err}`)); - }); - if (tar instanceof stream.Readable) { - tar.pipe(tarProcess.stdin).on("error", (err) => { - reject( - new Error(`Error while downloading and extracting tar: ${err}`) - ); - }); - } - tarProcess.on("exit", (code) => { - if (code !== 0) { - reject( - new CommandInvocationError( - "tar", - args, - code ?? void 0, - stdout, - stderr - ) - ); - } - resolve8(); - }); - }); - } catch (e) { - await cleanUpPath(dest, "extraction destination directory", logger); - throw e; - } -} -var KNOWN_EXTENSIONS = { - "tar.gz": "gzip", - "tar.zst": "zstd" -}; -function inferCompressionMethod(tarPath) { - for (const [ext, method] of Object.entries(KNOWN_EXTENSIONS)) { - if (tarPath.endsWith(`.${ext}`)) { - return method; - } - } - return void 0; -} // src/tools-download.ts -var fs8 = __toESM(require("fs")); -var os = __toESM(require("os")); -var path8 = __toESM(require("path")); -var import_perf_hooks = require("perf_hooks"); var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); var semver7 = __toESM(require_semver2()); var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; -var TOOLCACHE_TOOL_NAME = "CodeQL"; -function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) { - return { - combinedDurationMs: downloadDurationMs + extractionDurationMs, - downloadDurationMs, - extractionDurationMs, - streamExtraction: false - }; -} -function makeStreamedToolsDownloadDurations(combinedDurationMs) { - return { - combinedDurationMs, - downloadDurationMs: void 0, - extractionDurationMs: void 0, - streamExtraction: true - }; -} -async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorization, headers, tarVersion, logger) { - logger.info( - `Downloading CodeQL tools from ${codeqlURL} . This may take a while.` - ); - try { - if (compressionMethod === "zstd" && process.platform === "linux") { - logger.info(`Streaming the extraction of the CodeQL bundle.`); - const toolsInstallStart = import_perf_hooks.performance.now(); - await downloadAndExtractZstdWithStreaming( - codeqlURL, - dest, - authorization, - headers, - tarVersion, - logger - ); - const combinedDurationMs = Math.round( - import_perf_hooks.performance.now() - toolsInstallStart - ); - logger.info( - `Finished downloading and extracting CodeQL bundle to ${dest} (${formatDuration( - combinedDurationMs - )}).` - ); - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeStreamedToolsDownloadDurations(combinedDurationMs) - }; - } - } catch (e) { - core9.warning( - `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` - ); - core9.warning(`Falling back to downloading the bundle before extracting.`); - await cleanUpPath(dest, "CodeQL bundle", logger); - } - const toolsDownloadStart = import_perf_hooks.performance.now(); - const archivedBundlePath = await toolcache2.downloadTool( - codeqlURL, - void 0, - authorization, - headers - ); - const downloadDurationMs = Math.round(import_perf_hooks.performance.now() - toolsDownloadStart); - logger.info( - `Finished downloading CodeQL bundle to ${archivedBundlePath} (${formatDuration( - downloadDurationMs - )}).` - ); - let extractionDurationMs; - try { - logger.info("Extracting CodeQL bundle."); - const extractionStart = import_perf_hooks.performance.now(); - await extract( - archivedBundlePath, - dest, - compressionMethod, - tarVersion, - logger - ); - extractionDurationMs = Math.round(import_perf_hooks.performance.now() - extractionStart); - logger.info( - `Finished extracting CodeQL bundle to ${dest} (${formatDuration( - extractionDurationMs - )}).` - ); - } finally { - await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger); - } - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeDownloadFirstToolsDownloadDurations( - downloadDurationMs, - extractionDurationMs - ) - }; -} -async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs8.mkdirSync(dest, { recursive: true }); - const agent = new import_http_client.HttpClient().getAgent(codeqlURL); - headers = Object.assign( - { "User-Agent": "CodeQL Action" }, - authorization ? { authorization } : {}, - headers - ); - const response = await new Promise( - (resolve8) => import_follow_redirects.https.get( - codeqlURL, - { - headers, - // Increase the high water mark to improve performance. - highWaterMark: STREAMING_HIGH_WATERMARK_BYTES, - // Use the agent to respect proxy settings. - agent - }, - (r) => resolve8(r) - ) - ); - if (response.statusCode !== 200) { - throw new Error( - `Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.` - ); - } - await extractTarZst(response, dest, tarVersion, logger); -} -function getToolcacheDirectory(version) { - return path8.join( - getRequiredEnvParam("RUNNER_TOOL_CACHE"), - TOOLCACHE_TOOL_NAME, - semver7.clean(version) || version, - os.arch() || "" - ); -} -function writeToolcacheMarkerFile(extractedPath, logger) { - const markerFilePath = `${extractedPath}.complete`; - fs8.writeFileSync(markerFilePath, ""); - logger.info(`Created toolcache marker file ${markerFilePath}`); -} -function sanitizeUrlForStatusReport(url2) { - return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some( - (repo) => url2.startsWith(`https://github.com/${repo}/releases/download/`) - ) ? url2 : "sanitized-value"; -} - -// src/setup-codeql.ts -var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; -var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing"; -var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies"; -var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"]; -var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"]; -var CODEQL_TOOLCACHE_INPUT = "toolcache"; -function getCodeQLBundleExtension(compressionMethod) { - switch (compressionMethod) { - case "gzip": - return ".tar.gz"; - case "zstd": - return ".tar.zst"; - default: - assertNever(compressionMethod); - } -} -function getCodeQLBundleName(compressionMethod) { - const extension = getCodeQLBundleExtension(compressionMethod); - let platform; - if (process.platform === "win32") { - platform = "win64"; - } else if (process.platform === "linux") { - platform = "linux64"; - } else if (process.platform === "darwin") { - platform = "osx64"; - } else { - return `codeql-bundle${extension}`; - } - return `codeql-bundle-${platform}${extension}`; -} -function getCodeQLActionRepository(logger) { - if (isRunningLocalAction()) { - logger.info( - "The CodeQL Action is checked out locally. Using the default CodeQL Action repository." - ); - return CODEQL_DEFAULT_ACTION_REPOSITORY; - } - return getRequiredEnvParam("GITHUB_ACTION_REPOSITORY"); -} -async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod, logger) { - const codeQLActionRepository = getCodeQLActionRepository(logger); - const potentialDownloadSources = [ - // This GitHub instance, and this Action. - [apiDetails.url, codeQLActionRepository], - // This GitHub instance, and the canonical Action. - [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY], - // GitHub.com, and the canonical Action. - [GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY] - ]; - const uniqueDownloadSources = potentialDownloadSources.filter( - (source, index, self2) => { - return !self2.slice(0, index).some((other) => (0, import_fast_deep_equal.default)(source, other)); - } - ); - const codeQLBundleName = getCodeQLBundleName(compressionMethod); - for (const downloadSource of uniqueDownloadSources) { - const [apiURL, repository] = downloadSource; - if (apiURL === GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { - break; - } - const [repositoryOwner, repositoryName] = repository.split("/"); - try { - const release2 = await getApiClient().rest.repos.getReleaseByTag({ - owner: repositoryOwner, - repo: repositoryName, - tag: tagName - }); - for (const asset of release2.data.assets) { - if (asset.name === codeQLBundleName) { - logger.info( - `Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.` - ); - return asset.url; - } - } - } catch (e) { - logger.info( - `Looked for CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} but got error ${e}.` - ); - } - } - return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`; -} -function tryGetBundleVersionFromTagName(tagName, logger) { - const match = tagName.match(/^codeql-bundle-(.*)$/); - if (match === null || match.length < 2) { - logger.debug(`Could not determine bundle version from tag ${tagName}.`); - return void 0; - } - return match[1]; -} -function tryGetTagNameFromUrl(url2, logger) { - const matches = [...url2.matchAll(/\/(codeql-bundle-[^/]*)\//g)]; - if (matches.length === 0) { - logger.debug(`Could not determine tag name for URL ${url2}.`); - return void 0; - } - const match = matches[matches.length - 1]; - if (match?.length !== 2) { - logger.debug( - `Could not determine tag name for URL ${url2}. Matched ${JSON.stringify( - match - )}.` - ); - return void 0; - } - return match[1]; -} -function convertToSemVer(version, logger) { - if (!semver8.valid(version)) { - logger.debug( - `Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.` - ); - version = `0.0.0-${version}`; - } - const s = semver8.clean(version); - if (!s) { - throw new Error(`Bundle version ${version} is not in SemVer format.`); - } - return s; -} -async function findOverridingToolsInCache(humanReadableVersion, logger) { - const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ - folder: toolcache3.find("CodeQL", version), - version - })).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version"))); - if (candidates.length === 1) { - const candidate = candidates[0]; - logger.debug( - `CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.` - ); - return { - codeqlFolder: candidate.folder, - sourceType: "toolcache", - toolsVersion: candidate.version - }; - } else if (candidates.length === 0) { - logger.debug( - "Did not find any candidate pinned versions of the CodeQL tools in the toolcache." - ); - } else { - logger.debug( - "Could not use CodeQL tools from the toolcache since more than one candidate pinned version was found in the toolcache." - ); - } - return void 0; -} -async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) { - if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) { - logger.info(`Using CodeQL CLI from local path ${toolsInput}`); - const compressionMethod2 = inferCompressionMethod(toolsInput); - if (compressionMethod2 === void 0) { - throw new ConfigurationError( - `Could not infer compression method from path ${toolsInput}. Please specify a path ending in '.tar.gz' or '.tar.zst'.` - ); - } - return { - codeqlTarPath: toolsInput, - compressionMethod: compressionMethod2, - sourceType: "local", - toolsVersion: "local" - }; - } - let cliVersion2; - let tagName; - let url2; - const canForceNightlyWithFF = isDynamicWorkflow() || isInTestMode(); - const forceNightlyValueFF = await features.getValue("force_nightly" /* ForceNightly */); - const forceNightly = forceNightlyValueFF && canForceNightlyWithFF; - const nightlyRequestedByToolsInput = toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput); - if (forceNightly || nightlyRequestedByToolsInput) { - if (forceNightly) { - logger.info( - `Using the latest CodeQL CLI nightly, as forced by the ${"force_nightly" /* ForceNightly */} feature flag.` - ); - addNoLanguageDiagnostic( - void 0, - makeDiagnostic( - "codeql-action/forced-nightly-cli", - "A nightly release of CodeQL was used", - { - markdownMessage: "GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\nNightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\nIf use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.", - visibility: { - cliSummaryTable: true, - statusPage: true, - telemetry: true - }, - severity: "note" - } - ) - ); - } else { - logger.info( - `Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.` - ); - } - toolsInput = await getNightlyToolsUrl(logger); - } - const forceShippedTools = toolsInput && CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput); - if (forceShippedTools) { - cliVersion2 = cliVersion; - tagName = bundleVersion; - logger.info( - `'tools: ${toolsInput}' was requested, so using CodeQL version ${cliVersion2}, the version shipped with the Action.` - ); - if (toolsInput === "latest") { - logger.warning( - "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required." - ); - } - } else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) { - let latestToolcacheVersion; - const allowToolcacheValueFF = await features.getValue( - "allow_toolcache_input" /* AllowToolcacheInput */ - ); - const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode()); - if (allowToolcacheValue) { - logger.info( - `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.` - ); - latestToolcacheVersion = getLatestToolcacheVersion(logger); - if (latestToolcacheVersion) { - cliVersion2 = latestToolcacheVersion; - } - } - if (latestToolcacheVersion === void 0) { - if (allowToolcacheValue) { - logger.info( - `Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...` - ); - } else { - if (allowToolcacheValueFF) { - logger.warning( - `Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.` - ); - } else { - logger.info( - `Ignoring 'tools: ${toolsInput}' because the feature is not enabled.` - ); - } - } - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; - } - } else if (toolsInput !== void 0) { - tagName = tryGetTagNameFromUrl(toolsInput, logger); - url2 = toolsInput; - if (tagName) { - const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger); - if (bundleVersion3 && semver8.valid(bundleVersion3)) { - cliVersion2 = convertToSemVer(bundleVersion3, logger); - } - } - } else { - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; - } - const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger); - const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown"; - logger.debug( - `Attempting to obtain CodeQL tools. CLI version: ${cliVersion2 ?? "unknown"}, bundle tag name: ${tagName ?? "unknown"}, URL: ${url2 ?? "unspecified"}.` - ); - let codeqlFolder; - if (cliVersion2) { - codeqlFolder = toolcache3.find("CodeQL", cliVersion2); - if (!codeqlFolder) { - logger.debug( - `Didn't find a version of the CodeQL tools in the toolcache with a version number exactly matching ${cliVersion2}.` - ); - const allVersions = toolcache3.findAllVersions("CodeQL"); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - const candidateVersions = allVersions.filter( - (version) => version.startsWith(`${cliVersion2}-`) - ); - if (candidateVersions.length === 1) { - logger.debug( - `Exactly one version of the CodeQL tools starting with ${cliVersion2} found in the toolcache, using that.` - ); - codeqlFolder = toolcache3.find("CodeQL", candidateVersions[0]); - } else if (candidateVersions.length === 0) { - logger.debug( - `Didn't find any versions of the CodeQL tools starting with ${cliVersion2} in the toolcache. Trying next fallback method.` - ); - } else { - logger.warning( - `Found ${candidateVersions.length} versions of the CodeQL tools starting with ${cliVersion2} in the toolcache, but at most one was expected.` - ); - logger.debug("Trying next fallback method."); - } - } - } - if (!codeqlFolder && tagName) { - const fallbackVersion = await tryGetFallbackToolcacheVersion( - cliVersion2, - tagName, - logger - ); - if (fallbackVersion) { - codeqlFolder = toolcache3.find("CodeQL", fallbackVersion); - } else { - logger.debug( - `Could not determine a fallback toolcache version number for CodeQL tools version ${humanReadableVersion}.` - ); - } - } - if (codeqlFolder) { - logger.info( - `Found CodeQL tools version ${humanReadableVersion} in the toolcache.` - ); - } else { - logger.info( - `Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.` - ); - } - if (codeqlFolder) { - if (cliVersion2) { - logger.info( - `Using CodeQL CLI version ${cliVersion2} from toolcache at ${codeqlFolder}` - ); - } else { - logger.info(`Using CodeQL CLI from toolcache at ${codeqlFolder}`); - } - return { - codeqlFolder, - sourceType: "toolcache", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; - } - if (variant === "GitHub Enterprise Server" /* GHES */ && !forceShippedTools && !toolsInput) { - const result = await findOverridingToolsInCache( - humanReadableVersion, - logger - ); - if (result !== void 0) { - return result; - } - } - let compressionMethod; - if (!url2) { - compressionMethod = cliVersion2 !== void 0 && await useZstdBundle(cliVersion2, tarSupportsZstd) ? "zstd" : "gzip"; - url2 = await getCodeQLBundleDownloadURL( - tagName, - apiDetails, - compressionMethod, - logger - ); - } else { - const method = inferCompressionMethod(url2); - if (method === void 0) { - throw new ConfigurationError( - `Could not infer compression method from URL ${url2}. Please specify a URL ending in '.tar.gz' or '.tar.zst'.` - ); - } - compressionMethod = method; - } - if (cliVersion2) { - logger.info(`Using CodeQL CLI version ${cliVersion2} sourced from ${url2} .`); - } else { - logger.info(`Using CodeQL CLI sourced from ${url2} .`); - } - return { - bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger), - cliVersion: cliVersion2, - codeqlURL: url2, - compressionMethod, - sourceType: "download", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; -} -async function tryGetFallbackToolcacheVersion(cliVersion2, tagName, logger) { - const bundleVersion2 = tryGetBundleVersionFromTagName(tagName, logger); - if (!bundleVersion2) { - return void 0; - } - const fallbackVersion = convertToSemVer(bundleVersion2, logger); - logger.debug( - `Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ${cliVersion2 ?? tagName}.` - ); - return fallbackVersion; -} -var downloadCodeQL = async function(codeqlURL, compressionMethod, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) { - const parsedCodeQLURL = new URL(codeqlURL); - const searchParams = new URLSearchParams(parsedCodeQLURL.search); - const headers = { - accept: "application/octet-stream" - }; - let authorization = void 0; - if (searchParams.has("token")) { - logger.debug("CodeQL tools URL contains an authorization token."); - } else { - authorization = getAuthorizationHeaderFor( - logger, - apiDetails, - codeqlURL - ); - } - const toolcacheInfo = getToolcacheDestinationInfo( - maybeBundleVersion, - maybeCliVersion, - logger - ); - const extractedBundlePath = toolcacheInfo?.path ?? getTempExtractionDir(tempDir); - const statusReport = await downloadAndExtract( - codeqlURL, - compressionMethod, - extractedBundlePath, - authorization, - { "User-Agent": "CodeQL Action", ...headers }, - tarVersion, - logger - ); - if (!toolcacheInfo) { - logger.debug( - `Could not cache CodeQL tools because we could not determine the bundle version from the URL ${codeqlURL}.` - ); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? "unknown" - }; - } - writeToolcacheMarkerFile(toolcacheInfo.path, logger); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? toolcacheInfo.version - }; -}; -function getToolcacheDestinationInfo(maybeBundleVersion, maybeCliVersion, logger) { - if (maybeBundleVersion) { - const version = getCanonicalToolcacheVersion( - maybeCliVersion, - maybeBundleVersion, - logger - ); - return { - path: getToolcacheDirectory(version), - version - }; - } - return void 0; -} -function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) { - if (!cliVersion2?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) { - return convertToSemVer(bundleVersion2, logger); - } - return cliVersion2; -} -async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { - if (!await isBinaryAccessible("tar", logger)) { - throw new ConfigurationError( - "Could not find tar in PATH, so unable to extract CodeQL bundle." - ); - } - const zstdAvailability = await isZstdAvailable(logger); - const source = await getCodeQLSource( - toolsInput, - defaultCliVersion, - apiDetails, - variant, - zstdAvailability.available, - features, - logger - ); - let codeqlFolder; - let toolsVersion = source.toolsVersion; - let toolsDownloadStatusReport; - let toolsSource; - switch (source.sourceType) { - case "local": { - codeqlFolder = await extract( - source.codeqlTarPath, - getTempExtractionDir(tempDir), - source.compressionMethod, - zstdAvailability.version, - logger - ); - toolsSource = "LOCAL" /* Local */; - break; - } - case "toolcache": - codeqlFolder = source.codeqlFolder; - logger.debug(`CodeQL found in cache ${codeqlFolder}`); - toolsSource = "TOOLCACHE" /* Toolcache */; - break; - case "download": { - const result = await downloadCodeQL( - source.codeqlURL, - source.compressionMethod, - source.bundleVersion, - source.cliVersion, - apiDetails, - zstdAvailability.version, - tempDir, - logger - ); - toolsVersion = result.toolsVersion; - codeqlFolder = result.codeqlFolder; - toolsDownloadStatusReport = result.statusReport; - toolsSource = "DOWNLOAD" /* Download */; - break; - } - default: - assertNever(source); - } - return { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; -} -async function useZstdBundle(cliVersion2, tarSupportsZstd) { - return ( - // In testing, gzip performs better than zstd on Windows. - process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE) - ); -} -function getTempExtractionDir(tempDir) { - return path9.join(tempDir, v4_default()); -} -async function getNightlyToolsUrl(logger) { - const zstdAvailability = await isZstdAvailable(logger); - const compressionMethod = await useZstdBundle( - CODEQL_VERSION_ZSTD_BUNDLE, - zstdAvailability.available - ) ? "zstd" : "gzip"; - try { - const release2 = await getApiClient().rest.repos.listReleases({ - owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER, - repo: CODEQL_NIGHTLIES_REPOSITORY_NAME, - per_page: 1, - page: 1, - prerelease: true - }); - const latestRelease = release2.data[0]; - if (!latestRelease) { - throw new Error("Could not find the latest nightly release."); - } - return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`; - } catch (e) { - throw new Error( - `Failed to retrieve the latest nightly release: ${wrapError(e)}` - ); - } -} -function getLatestToolcacheVersion(logger) { - const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a)); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - if (allVersions.length > 0) { - const latestToolcacheVersion = allVersions[0]; - logger.info( - `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.` - ); - return latestToolcacheVersion; - } - return void 0; -} -function isReservedToolsValue(tools) { - return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT; -} // src/tracer-config.ts async function shouldEnableIndirectTracing(codeql, config) { @@ -166619,54 +165578,6 @@ var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13"; var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19"; var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++"; var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1"; -async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) { - try { - const { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - } = await setupCodeQLBundle( - toolsInput, - apiDetails, - tempDir, - variant, - defaultCliVersion, - features, - logger - ); - logger.debug( - `Bundle download status report: ${JSON.stringify( - toolsDownloadStatusReport - )}` - ); - let codeqlCmd = path10.join(codeqlFolder, "codeql", "codeql"); - if (process.platform === "win32") { - codeqlCmd += ".exe"; - } else if (process.platform !== "linux" && process.platform !== "darwin") { - throw new ConfigurationError( - `Unsupported platform: ${process.platform}` - ); - } - cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion); - return { - codeql: cachedCodeQL, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; - } catch (rawError) { - const e = wrapApiConfigurationError(rawError); - const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error; - throw new ErrorClass( - `Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? ` - -Details: ${e.stack}` : ""}` - ); - } -} async function getCodeQL(cmd) { if (cachedCodeQL === void 0) { cachedCodeQL = await getCodeQLForCmd(cmd, true); @@ -166703,12 +165614,12 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path10.join( + const tracingConfigPath = path7.join( extractorPath, "tools", "tracing-config.lua" ); - return fs10.existsSync(tracingConfigPath); + return fs7.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -166785,7 +165696,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path10.join( + const autobuildCmd = path7.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -167184,7 +166095,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs10.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs7.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -167207,7 +166118,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path10.resolve(config.tempDir, "user-config.yaml"); + return path7.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; @@ -167228,16 +166139,16 @@ async function getJobRunUuidSarifOptions(codeql) { } // src/debug-artifacts.ts -var fs13 = __toESM(require("fs")); -var path13 = __toESM(require("path")); +var fs10 = __toESM(require("fs")); +var path10 = __toESM(require("path")); var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); var core12 = __toESM(require_core()); var import_archiver = __toESM(require_archiver()); // src/analyze.ts -var fs11 = __toESM(require("fs")); -var path11 = __toESM(require("path")); +var fs8 = __toESM(require("fs")); +var path8 = __toESM(require("path")); var io5 = __toESM(require_io()); // src/autobuild.ts @@ -167268,7 +166179,7 @@ function dbIsFinalized(config, language, logger) { const dbPath = getCodeQLDatabasePath(config, language); try { const dbInfo = load( - fs11.readFileSync(path11.resolve(dbPath, "codeql-database.yml"), "utf8") + fs8.readFileSync(path8.resolve(dbPath, "codeql-database.yml"), "utf8") ); return !("inProgress" in dbInfo); } catch { @@ -167280,9 +166191,9 @@ function dbIsFinalized(config, language, logger) { } // src/artifact-scanner.ts -var fs12 = __toESM(require("fs")); -var os2 = __toESM(require("os")); -var path12 = __toESM(require("path")); +var fs9 = __toESM(require("fs")); +var os = __toESM(require("os")); +var path9 = __toESM(require("path")); var exec = __toESM(require_exec()); var GITHUB_PAT_CLASSIC_PATTERN = { type: "Personal Access Token (Classic)" /* PersonalAccessClassic */, @@ -167319,7 +166230,7 @@ var GITHUB_TOKEN_PATTERNS = [ function scanFileForTokens(filePath, relativePath, logger) { const findings = []; try { - const content = fs12.readFileSync(filePath, "utf8"); + const content = fs9.readFileSync(filePath, "utf8"); for (const { type: type2, pattern } of GITHUB_TOKEN_PATTERNS) { const matches = content.match(pattern); if (matches) { @@ -167349,10 +166260,10 @@ async function scanArchiveFile(archivePath, relativeArchivePath, extractDir, log findings: [] }; try { - const tempExtractDir = fs12.mkdtempSync( - path12.join(extractDir, `extract-${depth}-`) + const tempExtractDir = fs9.mkdtempSync( + path9.join(extractDir, `extract-${depth}-`) ); - const fileName = path12.basename(archivePath).toLowerCase(); + const fileName = path9.basename(archivePath).toLowerCase(); if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) { logger.debug(`Extracting tar.gz file: ${archivePath}`); await exec.exec("tar", ["-xzf", archivePath, "-C", tempExtractDir], { @@ -167369,21 +166280,21 @@ async function scanArchiveFile(archivePath, relativeArchivePath, extractDir, log ); } else if (fileName.endsWith(".zst")) { logger.debug(`Extracting zst file: ${archivePath}`); - const outputFile = path12.join( + const outputFile = path9.join( tempExtractDir, - path12.basename(archivePath, ".zst") + path9.basename(archivePath, ".zst") ); await exec.exec("zstd", ["-d", archivePath, "-o", outputFile], { silent: true }); } else if (fileName.endsWith(".gz")) { logger.debug(`Extracting gz file: ${archivePath}`); - const outputFile = path12.join( + const outputFile = path9.join( tempExtractDir, - path12.basename(archivePath, ".gz") + path9.basename(archivePath, ".gz") ); await exec.exec("gunzip", ["-c", archivePath], { - outStream: fs12.createWriteStream(outputFile), + outStream: fs9.createWriteStream(outputFile), silent: true }); } else if (fileName.endsWith(".zip")) { @@ -167404,7 +166315,7 @@ async function scanArchiveFile(archivePath, relativeArchivePath, extractDir, log ); result.scannedFiles += scanResult.scannedFiles; result.findings.push(...scanResult.findings); - fs12.rmSync(tempExtractDir, { recursive: true, force: true }); + fs9.rmSync(tempExtractDir, { recursive: true, force: true }); } catch (e) { logger.debug( `Could not extract or scan archive file ${archivePath}: ${getErrorMessage(e)}` @@ -167417,7 +166328,7 @@ async function scanFile(fullPath, relativePath, extractDir, logger, depth = 0) { scannedFiles: 1, findings: [] }; - const fileName = path12.basename(fullPath).toLowerCase(); + const fileName = path9.basename(fullPath).toLowerCase(); const isArchive = fileName.endsWith(".zip") || fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz") || fileName.endsWith(".tar.zst") || fileName.endsWith(".zst") || fileName.endsWith(".gz"); if (isArchive) { const archiveResult = await scanArchiveFile( @@ -167439,10 +166350,10 @@ async function scanDirectory(dirPath, baseRelativePath, logger, depth = 0) { scannedFiles: 0, findings: [] }; - const entries = fs12.readdirSync(dirPath, { withFileTypes: true }); + const entries = fs9.readdirSync(dirPath, { withFileTypes: true }); for (const entry of entries) { - const fullPath = path12.join(dirPath, entry.name); - const relativePath = path12.join(baseRelativePath, entry.name); + const fullPath = path9.join(dirPath, entry.name); + const relativePath = path9.join(baseRelativePath, entry.name); if (entry.isDirectory()) { const subResult = await scanDirectory( fullPath, @@ -167456,7 +166367,7 @@ async function scanDirectory(dirPath, baseRelativePath, logger, depth = 0) { const fileResult = await scanFile( fullPath, relativePath, - path12.dirname(fullPath), + path9.dirname(fullPath), logger, depth ); @@ -167474,11 +166385,11 @@ async function scanArtifactsForTokens(filesToScan, logger) { scannedFiles: 0, findings: [] }; - const tempScanDir = fs12.mkdtempSync(path12.join(os2.tmpdir(), "artifact-scan-")); + const tempScanDir = fs9.mkdtempSync(path9.join(os.tmpdir(), "artifact-scan-")); try { for (const filePath of filesToScan) { - const stats = fs12.statSync(filePath); - const fileName = path12.basename(filePath); + const stats = fs9.statSync(filePath); + const fileName = path9.basename(filePath); if (stats.isDirectory()) { const dirResult = await scanDirectory(filePath, fileName, logger); result.scannedFiles += dirResult.scannedFiles; @@ -167515,7 +166426,7 @@ async function scanArtifactsForTokens(filesToScan, logger) { } } finally { try { - fs12.rmSync(tempScanDir, { recursive: true, force: true }); + fs9.rmSync(tempScanDir, { recursive: true, force: true }); } catch (e) { logger.debug( `Could not clean up temporary scan directory: ${getErrorMessage(e)}` @@ -167531,17 +166442,17 @@ function sanitizeArtifactName(name) { function tryPrepareSarifDebugArtifact(config, language, logger) { try { const analyzeActionOutputDir = process.env["CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */]; - if (analyzeActionOutputDir !== void 0 && fs13.existsSync(analyzeActionOutputDir) && fs13.lstatSync(analyzeActionOutputDir).isDirectory()) { - const sarifFile = path13.resolve( + if (analyzeActionOutputDir !== void 0 && fs10.existsSync(analyzeActionOutputDir) && fs10.lstatSync(analyzeActionOutputDir).isDirectory()) { + const sarifFile = path10.resolve( analyzeActionOutputDir, `${language}.sarif` ); - if (fs13.existsSync(sarifFile)) { - const sarifInDbLocation = path13.resolve( + if (fs10.existsSync(sarifFile)) { + const sarifInDbLocation = path10.resolve( config.dbLocation, `${language}.sarif` ); - fs13.copyFileSync(sarifFile, sarifInDbLocation); + fs10.copyFileSync(sarifFile, sarifInDbLocation); return sarifInDbLocation; } } @@ -167592,13 +166503,13 @@ async function tryUploadAllAvailableDebugArtifacts(codeql, config, logger, codeQ } logger.info("Preparing database logs debug artifact..."); const databaseDirectory = getCodeQLDatabasePath(config, language); - const logsDirectory = path13.resolve(databaseDirectory, "log"); + const logsDirectory = path10.resolve(databaseDirectory, "log"); if (doesDirectoryExist(logsDirectory)) { filesToUpload.push(...listFolder(logsDirectory)); logger.info("Database logs debug artifact ready for upload."); } logger.info("Preparing database cluster logs debug artifact..."); - const multiLanguageTracingLogsDirectory = path13.resolve( + const multiLanguageTracingLogsDirectory = path10.resolve( config.dbLocation, "log" ); @@ -167685,8 +166596,8 @@ async function uploadArtifacts(logger, toUpload, rootDir, artifactName, ghVarian try { await artifactUploader.uploadArtifact( sanitizeArtifactName(`${artifactName}${suffix}`), - toUpload.map((file) => path13.normalize(file)), - path13.normalize(rootDir), + toUpload.map((file) => path10.normalize(file)), + path10.normalize(rootDir), { // ensure we don't keep the debug artifacts around for too long since they can be large. retentionDays: 7 @@ -167713,17 +166624,17 @@ async function getArtifactUploaderClient(logger, ghVariant) { } async function createPartialDatabaseBundle(config, language) { const databasePath = getCodeQLDatabasePath(config, language); - const databaseBundlePath = path13.resolve( + const databaseBundlePath = path10.resolve( config.dbLocation, `${config.debugDatabaseName}-${language}-partial.zip` ); core12.info( `${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...` ); - if (fs13.existsSync(databaseBundlePath)) { - await fs13.promises.rm(databaseBundlePath, { force: true }); + if (fs10.existsSync(databaseBundlePath)) { + await fs10.promises.rm(databaseBundlePath, { force: true }); } - const output = fs13.createWriteStream(databaseBundlePath); + const output = fs10.createWriteStream(databaseBundlePath); const zip = (0, import_archiver.default)("zip"); zip.on("error", (err) => { throw err; @@ -167749,20 +166660,20 @@ async function createDatabaseBundleCli(codeql, config, language) { } // src/init-action-post-helper.ts -var fs17 = __toESM(require("fs")); +var fs14 = __toESM(require("fs")); var github2 = __toESM(require_github()); // src/upload-lib.ts -var fs15 = __toESM(require("fs")); -var path15 = __toESM(require("path")); +var fs12 = __toESM(require("fs")); +var path12 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts -var fs14 = __toESM(require("fs")); -var import_path2 = __toESM(require("path")); +var fs11 = __toESM(require("fs")); +var import_path = __toESM(require("path")); // node_modules/long/index.js var wasm = null; @@ -168749,7 +167660,7 @@ async function hash(callback, filepath) { } updateHash(current); }; - const readStream = fs14.createReadStream(filepath, "utf8"); + const readStream = fs11.createReadStream(filepath, "utf8"); for await (const data of readStream) { for (let i = 0; i < data.length; ++i) { processCharacter(data.charCodeAt(i)); @@ -168821,14 +167732,14 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) { ); return void 0; } - if (!import_path2.default.isAbsolute(uri)) { + if (!import_path.default.isAbsolute(uri)) { uri = srcRootPrefix + uri; } - if (!fs14.existsSync(uri)) { + if (!fs11.existsSync(uri)) { logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); return void 0; } - if (fs14.statSync(uri).isDirectory()) { + if (fs11.statSync(uri).isDirectory()) { logger.debug(`Unable to compute fingerprint for directory: ${uri}`); return void 0; } @@ -168885,34 +167796,6 @@ async function addFingerprints(sarif, sourceRoot, logger) { // src/init.ts var toolrunner4 = __toESM(require_toolrunner()); var io6 = __toESM(require_io()); -async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { - logger.startGroup("Setup CodeQL tools"); - const { - codeql, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - } = await setupCodeQL( - toolsInput, - apiDetails, - tempDir, - variant, - defaultCliVersion, - features, - logger, - true - ); - await codeql.printVersion(); - logger.endGroup(); - return { - codeql, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; -} // src/upload-lib.ts var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; @@ -168926,7 +167809,7 @@ function combineSarifFiles(sarifFiles, logger) { for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); const sarifObject = JSON.parse( - fs15.readFileSync(sarifFile, "utf8") + fs12.readFileSync(sarifFile, "utf8") ); if (combinedSarif.version === null) { combinedSarif.version = sarifObject.version; @@ -168995,35 +167878,10 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } return true; } -async function minimalInitCodeQL(logger, gitHubVersion, features) { - logger.info( - "Initializing CodeQL since the 'init' Action was not called before this step." - ); - const apiDetails = { - auth: getRequiredInput("token"), - externalRepoAuth: getOptionalInput("external-repository-token"), - url: getRequiredEnvParam("GITHUB_SERVER_URL"), - apiURL: getRequiredEnvParam("GITHUB_API_URL") - }; - const codeQLDefaultVersionInfo = await features.getDefaultCliVersion( - gitHubVersion.type - ); - const initCodeQLResult = await initCodeQL( - void 0, - // There is no tools input on the upload action - apiDetails, - getTemporaryDirectory(), - gitHubVersion.type, - codeQLDefaultVersionInfo, - features, - logger - ); - return initCodeQLResult.codeql; -} -async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { +async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, _features, logger, getCodeQL2, tempDir) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs15.readFileSync(sarifFile, "utf8")); + return JSON.parse(fs12.readFileSync(sarifFile, "utf8")); }); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; @@ -169043,23 +167901,15 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo } return combineSarifFiles(sarifFiles, logger); } - let codeQL; - let tempDir = getTemporaryDirectory(); - const config = await getConfig(tempDir, logger); - if (config !== void 0) { - codeQL = await getCodeQL(config.codeQLCmd); - tempDir = config.tempDir; - } else { - codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); - } - const baseTempDir = path15.resolve(tempDir, "combined-sarif"); - fs15.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs15.mkdtempSync(path15.resolve(baseTempDir, "output-")); - const outputFile = path15.resolve(outputDirectory, "combined-sarif.sarif"); + const codeQL = await getCodeQL2(); + const baseTempDir = path12.resolve(tempDir, "combined-sarif"); + fs12.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs12.mkdtempSync(path12.resolve(baseTempDir, "output-")); + const outputFile = path12.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs15.readFileSync(outputFile, "utf8")); + return JSON.parse(fs12.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); @@ -169088,7 +167938,7 @@ function getAutomationID2(category, analysis_key, environment) { async function uploadPayload(payload, repositoryNwo, logger, analysis) { logger.info("Uploading results"); if (shouldSkipSarifUpload()) { - const payloadSaveFile = path15.join( + const payloadSaveFile = path12.join( getTemporaryDirectory(), `payload-${analysis.kind}.json` ); @@ -169096,7 +167946,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs15.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -169130,12 +167980,12 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs15.readdirSync(dir, { withFileTypes: true }); + const entries = fs12.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { - sarifFiles.push(path15.resolve(dir, entry.name)); + sarifFiles.push(path12.resolve(dir, entry.name)); } else if (entry.isDirectory()) { - walkSarifFiles(path15.resolve(dir, entry.name)); + walkSarifFiles(path12.resolve(dir, entry.name)); } } }; @@ -169143,11 +167993,11 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } function getSarifFilePaths(sarifPath, isSarif) { - if (!fs15.existsSync(sarifPath)) { + if (!fs12.existsSync(sarifPath)) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } let sarifFiles; - if (fs15.lstatSync(sarifPath).isDirectory()) { + if (fs12.lstatSync(sarifPath).isDirectory()) { sarifFiles = findSarifFilesInDir(sarifPath, isSarif); if (sarifFiles.length === 0) { throw new ConfigurationError( @@ -169177,7 +168027,7 @@ function countResultsInSarif(sarif) { } function readSarifFile(sarifFilePath) { try { - return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8")); + return JSON.parse(fs12.readFileSync(sarifFilePath, "utf8")); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` @@ -169246,7 +168096,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs15.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -169254,7 +168104,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { +async function postProcessSarifFiles(logger, features, getCodeQL2, tempPath, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; @@ -169268,7 +168118,9 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, sarifPaths, gitHubVersion, features, - logger + logger, + getCodeQL2, + tempPath ); } else { const sarifPath = sarifPaths[0]; @@ -169288,12 +168140,14 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, ); return { sarif, analysisKey, environment }; } -async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { +async function uploadFiles(tempDir, codeql, inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { const sarifPaths = getSarifFilePaths( inputSarifPath, uploadTarget.sarifPredicate ); return uploadSpecifiedFiles( + tempDir, + codeql, sarifPaths, checkoutPath, category, @@ -169302,10 +168156,12 @@ async function uploadFiles(inputSarifPath, checkoutPath, category, features, log uploadTarget ); } -async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { +async function uploadSpecifiedFiles(tempDir, codeql, sarifPaths, checkoutPath, category, features, logger, uploadTarget) { const processingResults = await postProcessSarifFiles( logger, features, + async () => codeql, + tempDir, checkoutPath, sarifPaths, category, @@ -169507,7 +168363,7 @@ function filterAlertsByDiffRange(logger, sarif) { if (!locationUri || locationStartLine === void 0) { return false; } - const locationPath = path15.join(checkoutPath, locationUri).replaceAll(path15.sep, "/"); + const locationPath = path12.join(checkoutPath, locationUri).replaceAll(path12.sep, "/"); return diffRanges.some( (range) => range.path === locationPath && (range.startLine <= locationStartLine && range.endLine >= locationStartLine || range.startLine === 0 && range.endLine === 0) ); @@ -169519,8 +168375,8 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/workflow.ts -var fs16 = __toESM(require("fs")); -var path16 = __toESM(require("path")); +var fs13 = __toESM(require("fs")); +var path13 = __toESM(require("path")); var import_zlib2 = __toESM(require("zlib")); var core14 = __toESM(require_core()); function toCodedErrors(errors) { @@ -169548,15 +168404,15 @@ async function getWorkflow(logger) { ); } const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs16.readFileSync(workflowPath, "utf-8")); + return load(fs13.readFileSync(workflowPath, "utf-8")); } async function getWorkflowAbsolutePath(logger) { const relativePath = await getWorkflowRelativePath(); - const absolutePath = path16.join( + const absolutePath = path13.join( getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath ); - if (fs16.existsSync(absolutePath)) { + if (fs13.existsSync(absolutePath)) { logger.debug( `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` ); @@ -169683,6 +168539,8 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { } logger.info(`Uploading failed SARIF file ${sarifFile}`); const uploadResult = await uploadFiles( + config.tempDir, + codeql, sarifFile, checkoutPath, category, @@ -169765,7 +168623,7 @@ async function run(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, co } if (isSelfHostedRunner()) { try { - fs17.rmSync(config.dbLocation, { + fs14.rmSync(config.dbLocation, { recursive: true, force: true, maxRetries: 3 @@ -169846,7 +168704,7 @@ async function removeUploadedSarif(uploadFailedSarifResult, logger) { } // src/status-report.ts -var os3 = __toESM(require("os")); +var os2 = __toESM(require("os")); var core15 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { @@ -169973,7 +168831,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi statusReport.runner_arch = process.env["RUNNER_ARCH"]; } if (!(runnerOs === "Linux" && isSelfHostedRunner())) { - statusReport.runner_os_release = os3.release(); + statusReport.runner_os_release = os2.release(); } if (codeQlCliVersion !== void 0) { statusReport.codeql_version = codeQlCliVersion.version; diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 984ecc5716..5d88713a2d 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -204,7 +204,7 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto2 = __importStar2(require("crypto")); - var fs12 = __importStar2(require("fs")); + var fs11 = __importStar2(require("fs")); var os2 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -212,10 +212,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs12.existsSync(filePath)) { + if (!fs11.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs12.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs11.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -1337,14 +1337,14 @@ var require_util = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path12 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path11 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path12 && path12[0] !== "/") { - path12 = `/${path12}`; + if (path11 && path11[0] !== "/") { + path11 = `/${path11}`; } - return new URL(`${origin}${path12}`); + return new URL(`${origin}${path11}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -1795,39 +1795,39 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path12, origin } + request: { method, path: path11, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path12); + debuglog("sending request to %s %s/%s", method, origin, path11); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path12, origin }, + request: { method, path: path11, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path12, + path11, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path12, origin } + request: { method, path: path11, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path12); + debuglog("trailers received from %s %s/%s", method, origin, path11); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path12, origin }, + request: { method, path: path11, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path12, + path11, error3.message ); }); @@ -1876,9 +1876,9 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path12, origin } + request: { method, path: path11, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path12); + debuglog("sending request to %s %s/%s", method, origin, path11); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -1941,7 +1941,7 @@ var require_request = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path12, + path: path11, method, body, headers, @@ -1956,11 +1956,11 @@ var require_request = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path12 !== "string") { + if (typeof path11 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path12[0] !== "/" && !(path12.startsWith("http://") || path12.startsWith("https://")) && method !== "CONNECT") { + } else if (path11[0] !== "/" && !(path11.startsWith("http://") || path11.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path12)) { + } else if (invalidPathRegex.test(path11)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -2023,7 +2023,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path12, query) : path12; + this.path = query ? buildURL(path11, query) : path11; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -2335,9 +2335,9 @@ var require_dispatcher_base = __commonJS({ } close(callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { this.close((err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -2375,12 +2375,12 @@ var require_dispatcher_base = __commonJS({ err = null; } if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { this.destroy(err, (err2, data) => { return err2 ? ( /* istanbul ignore next: should never error */ reject(err2) - ) : resolve6(data); + ) : resolve5(data); }); }); } @@ -4647,8 +4647,8 @@ var require_util2 = __commonJS({ function createDeferredPromise() { let res; let rej; - const promise = new Promise((resolve6, reject) => { - res = resolve6; + const promise = new Promise((resolve5, reject) => { + res = resolve5; rej = reject; }); return { promise, resolve: res, reject: rej }; @@ -6536,7 +6536,7 @@ var require_client_h1 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path12, host, upgrade, blocking, reset } = request2; + const { method, path: path11, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -6602,7 +6602,7 @@ var require_client_h1 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path12} HTTP/1.1\r + let header = `${method} ${path11} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -6789,12 +6789,12 @@ upgrade: ${upgrade}\r cb(); } } - const waitForDrain = () => new Promise((resolve6, reject) => { + const waitForDrain = () => new Promise((resolve5, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve6; + callback = resolve5; } }); socket.on("close", onDrain).on("drain", onDrain); @@ -7128,7 +7128,7 @@ var require_client_h2 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path12, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path11, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -7195,7 +7195,7 @@ var require_client_h2 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path12; + headers[HTTP2_HEADER_PATH] = path11; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -7431,12 +7431,12 @@ var require_client_h2 = __commonJS({ cb(); } } - const waitForDrain = () => new Promise((resolve6, reject) => { + const waitForDrain = () => new Promise((resolve5, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve6; + callback = resolve5; } }); h2stream.on("close", onDrain).on("drain", onDrain); @@ -7548,9 +7548,9 @@ var require_redirect_handler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path12 = search ? `${pathname}${search}` : pathname; + const path11 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path12; + this.opts.path = path11; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7913,16 +7913,16 @@ var require_client = __commonJS({ return this[kNeedDrain] < 2; } async [kClose]() { - return new Promise((resolve6) => { + return new Promise((resolve5) => { if (this[kSize]) { - this[kClosedResolve] = resolve6; + this[kClosedResolve] = resolve5; } else { - resolve6(null); + resolve5(null); } }); } async [kDestroy](err) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i = 0; i < requests.length; i++) { const request2 = requests[i]; @@ -7933,7 +7933,7 @@ var require_client = __commonJS({ this[kClosedResolve](); this[kClosedResolve] = null; } - resolve6(null); + resolve5(null); }; if (this[kHTTPContext]) { this[kHTTPContext].destroy(err, callback); @@ -7984,7 +7984,7 @@ var require_client = __commonJS({ }); } try { - const socket = await new Promise((resolve6, reject) => { + const socket = await new Promise((resolve5, reject) => { client[kConnector]({ host, hostname, @@ -7996,7 +7996,7 @@ var require_client = __commonJS({ if (err) { reject(err); } else { - resolve6(socket2); + resolve5(socket2); } }); }); @@ -8332,8 +8332,8 @@ var require_pool_base = __commonJS({ if (this[kQueue].isEmpty()) { await Promise.all(this[kClients].map((c) => c.close())); } else { - await new Promise((resolve6) => { - this[kClosedResolve] = resolve6; + await new Promise((resolve5) => { + this[kClosedResolve] = resolve5; }); } } @@ -8784,10 +8784,10 @@ var require_proxy_agent = __commonJS({ }; const { origin, - path: path12 = "/", + path: path11 = "/", headers = {} } = opts; - opts.path = origin + path12; + opts.path = origin + path11; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -9548,7 +9548,7 @@ var require_readable = __commonJS({ if (this._readableState.closeEmitted) { return null; } - return await new Promise((resolve6, reject) => { + return await new Promise((resolve5, reject) => { if (this[kContentLength] > limit) { this.destroy(new AbortError()); } @@ -9561,7 +9561,7 @@ var require_readable = __commonJS({ if (signal?.aborted) { reject(signal.reason ?? new AbortError()); } else { - resolve6(null); + resolve5(null); } }).on("error", noop3).on("data", function(chunk) { limit -= chunk.length; @@ -9580,7 +9580,7 @@ var require_readable = __commonJS({ } async function consume(stream2, type2) { assert(!stream2[kConsume]); - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { if (isUnusable(stream2)) { const rState = stream2._readableState; if (rState.destroyed && rState.closeEmitted === false) { @@ -9597,7 +9597,7 @@ var require_readable = __commonJS({ stream2[kConsume] = { type: type2, stream: stream2, - resolve: resolve6, + resolve: resolve5, reject, length: 0, body: [] @@ -9667,18 +9667,18 @@ var require_readable = __commonJS({ return buffer; } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve6, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve5, stream: stream2, length } = consume2; try { if (type2 === "text") { - resolve6(chunksDecode(body, length)); + resolve5(chunksDecode(body, length)); } else if (type2 === "json") { - resolve6(JSON.parse(chunksDecode(body, length))); + resolve5(JSON.parse(chunksDecode(body, length))); } else if (type2 === "arrayBuffer") { - resolve6(chunksConcat(body, length).buffer); + resolve5(chunksConcat(body, length).buffer); } else if (type2 === "blob") { - resolve6(new Blob(body, { type: stream2[kContentType] })); + resolve5(new Blob(body, { type: stream2[kContentType] })); } else if (type2 === "bytes") { - resolve6(chunksConcat(body, length)); + resolve5(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { @@ -9935,9 +9935,9 @@ var require_api_request = __commonJS({ }; function request2(opts, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { request2.call(this, opts, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -10160,9 +10160,9 @@ var require_api_stream = __commonJS({ }; function stream2(opts, factory, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { stream2.call(this, opts, factory, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -10447,9 +10447,9 @@ var require_api_upgrade = __commonJS({ }; function upgrade(opts, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { upgrade.call(this, opts, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -10541,9 +10541,9 @@ var require_api_connect = __commonJS({ }; function connect(opts, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { connect.call(this, opts, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -10708,20 +10708,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path12) { - if (typeof path12 !== "string") { - return path12; + function safeUrl(path11) { + if (typeof path11 !== "string") { + return path11; } - const pathSegments = path12.split("?"); + const pathSegments = path11.split("?"); if (pathSegments.length !== 2) { - return path12; + return path11; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path12, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path12); + function matchKey(mockDispatch2, { path: path11, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path11); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10743,7 +10743,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path12 }) => matchValue(safeUrl(path12), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path11 }) => matchValue(safeUrl(path11), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10781,9 +10781,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path12, method, body, headers, query } = opts; + const { path: path11, method, body, headers, query } = opts; return { - path: path12, + path: path11, method, body, headers, @@ -11246,10 +11246,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path12, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path11, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path12, + Path: path11, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -14405,7 +14405,7 @@ var require_fetch = __commonJS({ function dispatch({ body }) { const url2 = requestCurrentURL(request2); const agent = fetchParams.controller.dispatcher; - return new Promise((resolve6, reject) => agent.dispatch( + return new Promise((resolve5, reject) => agent.dispatch( { path: url2.pathname + url2.search, origin: url2.origin, @@ -14481,7 +14481,7 @@ var require_fetch = __commonJS({ } } const onError = this.onError.bind(this); - resolve6({ + resolve5({ status, statusText, headersList, @@ -14527,7 +14527,7 @@ var require_fetch = __commonJS({ for (let i = 0; i < rawHeaders.length; i += 2) { headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } - resolve6({ + resolve5({ status, statusText: STATUS_CODES[status], headersList, @@ -16130,9 +16130,9 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path12) { - for (let i = 0; i < path12.length; ++i) { - const code = path12.charCodeAt(i); + function validateCookiePath(path11) { + for (let i = 0; i < path11.length; ++i) { + const code = path11.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -18120,8 +18120,8 @@ var require_util8 = __commonJS({ return true; } function delay2(ms) { - return new Promise((resolve6) => { - setTimeout(resolve6, ms).unref(); + return new Promise((resolve5) => { + setTimeout(resolve5, ms).unref(); }); } module2.exports = { @@ -18726,11 +18726,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path12 = opts.path; + let path11 = opts.path; if (!opts.path.startsWith("/")) { - path12 = `/${path12}`; + path11 = `/${path11}`; } - url2 = new URL(util.parseOrigin(url2).origin + path12); + url2 = new URL(util.parseOrigin(url2).origin + path11); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -18844,11 +18844,11 @@ var require_lib = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -18864,7 +18864,7 @@ var require_lib = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -18951,26 +18951,26 @@ var require_lib = __commonJS({ } readBody() { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve5) => __awaiter2(this, void 0, void 0, function* () { let output = Buffer.alloc(0); this.message.on("data", (chunk) => { output = Buffer.concat([output, chunk]); }); this.message.on("end", () => { - resolve6(output.toString()); + resolve5(output.toString()); }); })); }); } readBodyBuffer() { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve5) => __awaiter2(this, void 0, void 0, function* () { const chunks = []; this.message.on("data", (chunk) => { chunks.push(chunk); }); this.message.on("end", () => { - resolve6(Buffer.concat(chunks)); + resolve5(Buffer.concat(chunks)); }); })); }); @@ -19178,14 +19178,14 @@ var require_lib = __commonJS({ */ requestRaw(info6, data) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { function callbackForResult(err, res) { if (err) { reject(err); } else if (!res) { reject(new Error("Unknown error")); } else { - resolve6(res); + resolve5(res); } } this.requestRawWithCallback(info6, data, callbackForResult); @@ -19429,12 +19429,12 @@ var require_lib = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise((resolve6) => setTimeout(() => resolve6(), ms)); + return new Promise((resolve5) => setTimeout(() => resolve5(), ms)); }); } _processResponse(res, options) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6, reject) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve5, reject) => __awaiter2(this, void 0, void 0, function* () { const statusCode = res.message.statusCode || 0; const response = { statusCode, @@ -19442,7 +19442,7 @@ var require_lib = __commonJS({ headers: {} }; if (statusCode === HttpCodes.NotFound) { - resolve6(response); + resolve5(response); } function dateTimeDeserializer(key, value) { if (typeof value === "string") { @@ -19481,7 +19481,7 @@ var require_lib = __commonJS({ err.result = response.result; reject(err); } else { - resolve6(response); + resolve5(response); } })); }); @@ -19498,11 +19498,11 @@ var require_auth = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -19518,7 +19518,7 @@ var require_auth = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -19602,11 +19602,11 @@ var require_oidc_utils = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -19622,7 +19622,7 @@ var require_oidc_utils = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -19700,11 +19700,11 @@ var require_summary = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -19720,7 +19720,7 @@ var require_summary = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20033,7 +20033,7 @@ var require_path_utils = __commonJS({ exports2.toPosixPath = toPosixPath; exports2.toWin32Path = toWin32Path; exports2.toPlatformPath = toPlatformPath; - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -20041,7 +20041,7 @@ var require_path_utils = __commonJS({ return pth.replace(/[/]/g, "\\"); } function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path12.sep); + return pth.replace(/[/\\]/g, path11.sep); } } }); @@ -20089,11 +20089,11 @@ var require_io_util = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -20109,7 +20109,7 @@ var require_io_util = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs12 = __importStar2(require("fs")); - var path12 = __importStar2(require("path")); - _a = fs12.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs11 = __importStar2(require("fs")); + var path11 = __importStar2(require("path")); + _a = fs11.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs12.promises.readlink(fsPath); + const result = yield fs11.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs12.constants.O_RDONLY; + exports2.READONLY = fs11.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -20179,7 +20179,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path12.extname(filePath).toUpperCase(); + const upperExt = path11.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -20203,11 +20203,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path12.dirname(filePath); - const upperName = path12.basename(filePath).toUpperCase(); + const directory = path11.dirname(filePath); + const upperName = path11.basename(filePath).toUpperCase(); for (const actualName of yield (0, exports2.readdir)(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path12.join(directory, actualName); + filePath = path11.join(directory, actualName); break; } } @@ -20286,11 +20286,11 @@ var require_io = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -20306,7 +20306,7 @@ var require_io = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20319,7 +20319,7 @@ var require_io = __commonJS({ exports2.which = which6; exports2.findInPath = findInPath; var assert_1 = require("assert"); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util()); function cp(source_1, dest_1) { return __awaiter2(this, arguments, void 0, function* (source, dest, options = {}) { @@ -20328,7 +20328,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path12.join(dest, path12.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path11.join(dest, path11.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -20340,7 +20340,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path12.relative(source, newDest) === "") { + if (path11.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -20352,7 +20352,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path12.join(dest, path12.basename(source)); + dest = path11.join(dest, path11.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -20363,7 +20363,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path12.dirname(dest)); + yield mkdirP(path11.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -20422,7 +20422,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path12.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path11.delimiter)) { if (extension) { extensions.push(extension); } @@ -20435,12 +20435,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path12.sep)) { + if (tool.includes(path11.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path12.delimiter)) { + for (const p of process.env.PATH.split(path11.delimiter)) { if (p) { directories.push(p); } @@ -20448,7 +20448,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path12.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path11.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -20547,11 +20547,11 @@ var require_toolrunner = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -20567,7 +20567,7 @@ var require_toolrunner = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -20578,7 +20578,7 @@ var require_toolrunner = __commonJS({ var os2 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var io6 = __importStar2(require_io()); var ioUtil = __importStar2(require_io_util()); var timers_1 = require("timers"); @@ -20793,10 +20793,10 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path12.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path11.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io6.which(this.toolPath, true); - return new Promise((resolve6, reject) => __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve5, reject) => __awaiter2(this, void 0, void 0, function* () { this._debug(`exec tool: ${this.toolPath}`); this._debug("arguments:"); for (const arg of this.args) { @@ -20879,7 +20879,7 @@ var require_toolrunner = __commonJS({ if (error3) { reject(error3); } else { - resolve6(exitCode); + resolve5(exitCode); } }); if (this.options.input) { @@ -21045,11 +21045,11 @@ var require_exec = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -21065,7 +21065,7 @@ var require_exec = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -21165,11 +21165,11 @@ var require_platform = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -21185,7 +21185,7 @@ var require_platform = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -21294,11 +21294,11 @@ var require_core = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -21314,7 +21314,7 @@ var require_core = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -21346,7 +21346,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os2 = __importStar2(require("os")); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -21372,7 +21372,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path12.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path11.delimiter}${process.env["PATH"]}`; } function getInput2(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; @@ -21495,14 +21495,14 @@ var require_helpers = __commonJS({ "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path12, name, argument) { - if (Array.isArray(path12)) { - this.path = path12; - this.property = path12.reduce(function(sum, item) { + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path11, name, argument) { + if (Array.isArray(path11)) { + this.path = path11; + this.property = path11.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); - } else if (path12 !== void 0) { - this.property = path12; + } else if (path11 !== void 0) { + this.property = path11; } if (message) { this.message = message; @@ -21593,28 +21593,28 @@ var require_helpers = __commonJS({ name: { value: "SchemaError", enumerable: false } } ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path12, base, schemas) { + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path11, base, schemas) { this.schema = schema2; this.options = options; - if (Array.isArray(path12)) { - this.path = path12; - this.propertyPath = path12.reduce(function(sum, item) { + if (Array.isArray(path11)) { + this.path = path11; + this.propertyPath = path11.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); } else { - this.propertyPath = path12; + this.propertyPath = path11; } this.base = base; this.schemas = schemas; }; - SchemaContext.prototype.resolve = function resolve6(target) { + SchemaContext.prototype.resolve = function resolve5(target) { return uri.resolve(this.base, target); }; SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path12 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var path11 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); var id = schema2.$id || schema2.id; var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path12, base, Object.create(this.schemas)); + var ctx = new SchemaContext(schema2, this.options, path11, base, Object.create(this.schemas)); if (id && !ctx.schemas[base]) { ctx.schemas[base] = schema2; } @@ -22705,7 +22705,7 @@ var require_validator = __commonJS({ } return schema2; }; - Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) { + Validator3.prototype.resolve = function resolve5(schema2, switchSchema, ctx) { switchSchema = ctx.resolve(switchSchema); if (ctx.schemas[switchSchema]) { return { subschema: ctx.schemas[switchSchema], switchSchema }; @@ -22806,8 +22806,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path12 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path12} does not exist${os_1.EOL}`); + const path11 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path11} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -23632,14 +23632,14 @@ var require_util9 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path12 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path11 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path12 && path12[0] !== "/") { - path12 = `/${path12}`; + if (path11 && path11[0] !== "/") { + path11 = `/${path11}`; } - return new URL(`${origin}${path12}`); + return new URL(`${origin}${path11}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -24090,39 +24090,39 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path12, origin } + request: { method, path: path11, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path12); + debuglog("sending request to %s %s/%s", method, origin, path11); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path12, origin }, + request: { method, path: path11, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path12, + path11, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path12, origin } + request: { method, path: path11, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path12); + debuglog("trailers received from %s %s/%s", method, origin, path11); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path12, origin }, + request: { method, path: path11, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path12, + path11, error3.message ); }); @@ -24171,9 +24171,9 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path12, origin } + request: { method, path: path11, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path12); + debuglog("sending request to %s %s/%s", method, origin, path11); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -24236,7 +24236,7 @@ var require_request3 = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path12, + path: path11, method, body, headers, @@ -24251,11 +24251,11 @@ var require_request3 = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path12 !== "string") { + if (typeof path11 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path12[0] !== "/" && !(path12.startsWith("http://") || path12.startsWith("https://")) && method !== "CONNECT") { + } else if (path11[0] !== "/" && !(path11.startsWith("http://") || path11.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path12)) { + } else if (invalidPathRegex.test(path11)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -24318,7 +24318,7 @@ var require_request3 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path12, query) : path12; + this.path = query ? buildURL(path11, query) : path11; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -24630,9 +24630,9 @@ var require_dispatcher_base2 = __commonJS({ } close(callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { this.close((err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -24670,12 +24670,12 @@ var require_dispatcher_base2 = __commonJS({ err = null; } if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { this.destroy(err, (err2, data) => { return err2 ? ( /* istanbul ignore next: should never error */ reject(err2) - ) : resolve6(data); + ) : resolve5(data); }); }); } @@ -26942,8 +26942,8 @@ var require_util10 = __commonJS({ function createDeferredPromise() { let res; let rej; - const promise = new Promise((resolve6, reject) => { - res = resolve6; + const promise = new Promise((resolve5, reject) => { + res = resolve5; rej = reject; }); return { promise, resolve: res, reject: rej }; @@ -28831,7 +28831,7 @@ var require_client_h12 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path12, host, upgrade, blocking, reset } = request2; + const { method, path: path11, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -28897,7 +28897,7 @@ var require_client_h12 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path12} HTTP/1.1\r + let header = `${method} ${path11} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -29084,12 +29084,12 @@ upgrade: ${upgrade}\r cb(); } } - const waitForDrain = () => new Promise((resolve6, reject) => { + const waitForDrain = () => new Promise((resolve5, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve6; + callback = resolve5; } }); socket.on("close", onDrain).on("drain", onDrain); @@ -29423,7 +29423,7 @@ var require_client_h22 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path12, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path11, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -29490,7 +29490,7 @@ var require_client_h22 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path12; + headers[HTTP2_HEADER_PATH] = path11; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -29726,12 +29726,12 @@ var require_client_h22 = __commonJS({ cb(); } } - const waitForDrain = () => new Promise((resolve6, reject) => { + const waitForDrain = () => new Promise((resolve5, reject) => { assert(callback === null); if (socket[kError]) { reject(socket[kError]); } else { - callback = resolve6; + callback = resolve5; } }); h2stream.on("close", onDrain).on("drain", onDrain); @@ -29843,9 +29843,9 @@ var require_redirect_handler2 = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path12 = search ? `${pathname}${search}` : pathname; + const path11 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path12; + this.opts.path = path11; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -30208,16 +30208,16 @@ var require_client2 = __commonJS({ return this[kNeedDrain] < 2; } async [kClose]() { - return new Promise((resolve6) => { + return new Promise((resolve5) => { if (this[kSize]) { - this[kClosedResolve] = resolve6; + this[kClosedResolve] = resolve5; } else { - resolve6(null); + resolve5(null); } }); } async [kDestroy](err) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i = 0; i < requests.length; i++) { const request2 = requests[i]; @@ -30228,7 +30228,7 @@ var require_client2 = __commonJS({ this[kClosedResolve](); this[kClosedResolve] = null; } - resolve6(null); + resolve5(null); }; if (this[kHTTPContext]) { this[kHTTPContext].destroy(err, callback); @@ -30279,7 +30279,7 @@ var require_client2 = __commonJS({ }); } try { - const socket = await new Promise((resolve6, reject) => { + const socket = await new Promise((resolve5, reject) => { client[kConnector]({ host, hostname, @@ -30291,7 +30291,7 @@ var require_client2 = __commonJS({ if (err) { reject(err); } else { - resolve6(socket2); + resolve5(socket2); } }); }); @@ -30627,8 +30627,8 @@ var require_pool_base2 = __commonJS({ if (this[kQueue].isEmpty()) { await Promise.all(this[kClients].map((c) => c.close())); } else { - await new Promise((resolve6) => { - this[kClosedResolve] = resolve6; + await new Promise((resolve5) => { + this[kClosedResolve] = resolve5; }); } } @@ -31079,10 +31079,10 @@ var require_proxy_agent2 = __commonJS({ }; const { origin, - path: path12 = "/", + path: path11 = "/", headers = {} } = opts; - opts.path = origin + path12; + opts.path = origin + path11; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -31843,7 +31843,7 @@ var require_readable2 = __commonJS({ if (this._readableState.closeEmitted) { return null; } - return await new Promise((resolve6, reject) => { + return await new Promise((resolve5, reject) => { if (this[kContentLength] > limit) { this.destroy(new AbortError()); } @@ -31856,7 +31856,7 @@ var require_readable2 = __commonJS({ if (signal?.aborted) { reject(signal.reason ?? new AbortError()); } else { - resolve6(null); + resolve5(null); } }).on("error", noop3).on("data", function(chunk) { limit -= chunk.length; @@ -31875,7 +31875,7 @@ var require_readable2 = __commonJS({ } async function consume(stream2, type2) { assert(!stream2[kConsume]); - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { if (isUnusable(stream2)) { const rState = stream2._readableState; if (rState.destroyed && rState.closeEmitted === false) { @@ -31892,7 +31892,7 @@ var require_readable2 = __commonJS({ stream2[kConsume] = { type: type2, stream: stream2, - resolve: resolve6, + resolve: resolve5, reject, length: 0, body: [] @@ -31962,18 +31962,18 @@ var require_readable2 = __commonJS({ return buffer; } function consumeEnd(consume2) { - const { type: type2, body, resolve: resolve6, stream: stream2, length } = consume2; + const { type: type2, body, resolve: resolve5, stream: stream2, length } = consume2; try { if (type2 === "text") { - resolve6(chunksDecode(body, length)); + resolve5(chunksDecode(body, length)); } else if (type2 === "json") { - resolve6(JSON.parse(chunksDecode(body, length))); + resolve5(JSON.parse(chunksDecode(body, length))); } else if (type2 === "arrayBuffer") { - resolve6(chunksConcat(body, length).buffer); + resolve5(chunksConcat(body, length).buffer); } else if (type2 === "blob") { - resolve6(new Blob(body, { type: stream2[kContentType] })); + resolve5(new Blob(body, { type: stream2[kContentType] })); } else if (type2 === "bytes") { - resolve6(chunksConcat(body, length)); + resolve5(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { @@ -32230,9 +32230,9 @@ var require_api_request2 = __commonJS({ }; function request2(opts, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { request2.call(this, opts, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -32455,9 +32455,9 @@ var require_api_stream2 = __commonJS({ }; function stream2(opts, factory, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { stream2.call(this, opts, factory, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -32742,9 +32742,9 @@ var require_api_upgrade2 = __commonJS({ }; function upgrade(opts, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { upgrade.call(this, opts, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -32836,9 +32836,9 @@ var require_api_connect2 = __commonJS({ }; function connect(opts, callback) { if (callback === void 0) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { connect.call(this, opts, (err, data) => { - return err ? reject(err) : resolve6(data); + return err ? reject(err) : resolve5(data); }); }); } @@ -33003,20 +33003,20 @@ var require_mock_utils2 = __commonJS({ } return true; } - function safeUrl(path12) { - if (typeof path12 !== "string") { - return path12; + function safeUrl(path11) { + if (typeof path11 !== "string") { + return path11; } - const pathSegments = path12.split("?"); + const pathSegments = path11.split("?"); if (pathSegments.length !== 2) { - return path12; + return path11; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path12, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path12); + function matchKey(mockDispatch2, { path: path11, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path11); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -33038,7 +33038,7 @@ var require_mock_utils2 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path12 }) => matchValue(safeUrl(path12), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path11 }) => matchValue(safeUrl(path11), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -33076,9 +33076,9 @@ var require_mock_utils2 = __commonJS({ } } function buildKey(opts) { - const { path: path12, method, body, headers, query } = opts; + const { path: path11, method, body, headers, query } = opts; return { - path: path12, + path: path11, method, body, headers, @@ -33541,10 +33541,10 @@ var require_pending_interceptors_formatter2 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path12, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path11, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path12, + Path: path11, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -36700,7 +36700,7 @@ var require_fetch2 = __commonJS({ function dispatch({ body }) { const url2 = requestCurrentURL(request2); const agent = fetchParams.controller.dispatcher; - return new Promise((resolve6, reject) => agent.dispatch( + return new Promise((resolve5, reject) => agent.dispatch( { path: url2.pathname + url2.search, origin: url2.origin, @@ -36776,7 +36776,7 @@ var require_fetch2 = __commonJS({ } } const onError = this.onError.bind(this); - resolve6({ + resolve5({ status, statusText, headersList, @@ -36822,7 +36822,7 @@ var require_fetch2 = __commonJS({ for (let i = 0; i < rawHeaders.length; i += 2) { headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString("latin1"), true); } - resolve6({ + resolve5({ status, statusText: STATUS_CODES[status], headersList, @@ -38425,9 +38425,9 @@ var require_util14 = __commonJS({ } } } - function validateCookiePath(path12) { - for (let i = 0; i < path12.length; ++i) { - const code = path12.charCodeAt(i); + function validateCookiePath(path11) { + for (let i = 0; i < path11.length; ++i) { + const code = path11.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -40415,8 +40415,8 @@ var require_util16 = __commonJS({ return true; } function delay2(ms) { - return new Promise((resolve6) => { - setTimeout(resolve6, ms).unref(); + return new Promise((resolve5) => { + setTimeout(resolve5, ms).unref(); }); } module2.exports = { @@ -41021,11 +41021,11 @@ var require_undici2 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path12 = opts.path; + let path11 = opts.path; if (!opts.path.startsWith("/")) { - path12 = `/${path12}`; + path11 = `/${path11}`; } - url2 = new URL(util.parseOrigin(url2).origin + path12); + url2 = new URL(util.parseOrigin(url2).origin + path11); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -41139,11 +41139,11 @@ var require_utils4 = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -41159,7 +41159,7 @@ var require_utils4 = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -47840,8 +47840,8 @@ var require_light = __commonJS({ return this.Promise.resolve(); } yieldLoop(t = 0) { - return new this.Promise(function(resolve6, reject) { - return setTimeout(resolve6, t); + return new this.Promise(function(resolve5, reject) { + return setTimeout(resolve5, t); }); } computePenalty() { @@ -48052,15 +48052,15 @@ var require_light = __commonJS({ return this._queue.length === 0; } async _tryToRun() { - var args, cb, error3, reject, resolve6, returned, task; + var args, cb, error3, reject, resolve5, returned, task; if (this._running < 1 && this._queue.length > 0) { this._running++; - ({ task, args, resolve: resolve6, reject } = this._queue.shift()); + ({ task, args, resolve: resolve5, reject } = this._queue.shift()); cb = await (async function() { try { returned = await task(...args); return function() { - return resolve6(returned); + return resolve5(returned); }; } catch (error1) { error3 = error1; @@ -48075,13 +48075,13 @@ var require_light = __commonJS({ } } schedule(task, ...args) { - var promise, reject, resolve6; - resolve6 = reject = null; + var promise, reject, resolve5; + resolve5 = reject = null; promise = new this.Promise(function(_resolve, _reject) { - resolve6 = _resolve; + resolve5 = _resolve; return reject = _reject; }); - this._queue.push({ task, args, resolve: resolve6, reject }); + this._queue.push({ task, args, resolve: resolve5, reject }); this._tryToRun(); return promise; } @@ -48482,14 +48482,14 @@ var require_light = __commonJS({ counts = this._states.counts; return counts[0] + counts[1] + counts[2] + counts[3] === at; }; - return new this.Promise((resolve6, reject) => { + return new this.Promise((resolve5, reject) => { if (finished()) { - return resolve6(); + return resolve5(); } else { return this.on("done", () => { if (finished()) { this.removeAllListeners("done"); - return resolve6(); + return resolve5(); } }); } @@ -48582,9 +48582,9 @@ var require_light = __commonJS({ options = parser$5.load(options, this.jobDefaults); } task = (...args2) => { - return new this.Promise(function(resolve6, reject) { + return new this.Promise(function(resolve5, reject) { return fn(...args2, function(...args3) { - return (args3[0] != null ? reject : resolve6)(args3); + return (args3[0] != null ? reject : resolve5)(args3); }); }); }; @@ -48830,21 +48830,21 @@ var require_internal_path_helper = __commonJS({ return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.dirname = dirname3; + exports2.dirname = dirname2; exports2.ensureAbsoluteRoot = ensureAbsoluteRoot; exports2.hasAbsoluteRoot = hasAbsoluteRoot; exports2.hasRoot = hasRoot; exports2.normalizeSeparators = normalizeSeparators; exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; - function dirname3(p) { + function dirname2(p) { p = safeTrimTrailingSeparator(p); if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path12.dirname(p); + let result = path11.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -48881,7 +48881,7 @@ var require_internal_path_helper = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path12.sep; + root += path11.sep; } return root + itemPath; } @@ -48915,10 +48915,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path12.sep)) { + if (!p.endsWith(path11.sep)) { return p; } - if (p === path12.sep) { + if (p === path11.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -49263,7 +49263,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path12 = (function() { + var path11 = (function() { try { return require("path"); } catch (e) { @@ -49271,7 +49271,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path12.sep; + minimatch.sep = path11.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand2 = require_brace_expansion(); var plTypes = { @@ -49360,8 +49360,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path12.sep !== "/") { - pattern = pattern.split(path12.sep).join("/"); + if (!options.allowWindowsEscape && path11.sep !== "/") { + pattern = pattern.split(path11.sep).join("/"); } this.options = options; this.set = []; @@ -49730,8 +49730,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path12.sep !== "/") { - f = f.split(path12.sep).join("/"); + if (path11.sep !== "/") { + f = f.split(path11.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -49877,7 +49877,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -49892,12 +49892,12 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path12.sep); + this.segments = itemPath.split(path11.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path12.basename(remaining); + const basename = path11.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -49915,7 +49915,7 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path12.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path11.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -49926,12 +49926,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path12.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path11.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path12.sep; + result += path11.sep; } result += this.segments[i]; } @@ -49989,7 +49989,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os2 = __importStar2(require("os")); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var minimatch_1 = require_minimatch(); @@ -50018,7 +50018,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path12.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path11.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -50042,8 +50042,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path12.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path12.sep}`; + if (!itemPath.endsWith(path11.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path11.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -50078,9 +50078,9 @@ var require_internal_pattern = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path12.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path11.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path12.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path11.sep}`)) { homedir = homedir || os2.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -50164,8 +50164,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path12, level) { - this.path = path12; + constructor(path11, level) { + this.path = path11; this.level = level; } }; @@ -50216,11 +50216,11 @@ var require_internal_globber = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -50236,7 +50236,7 @@ var require_internal_globber = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -50249,14 +50249,14 @@ var require_internal_globber = __commonJS({ }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); + return new Promise(function(resolve5, reject) { + v = o[n](v), settle(resolve5, reject, v.done, v.value); }); }; } - function settle(resolve6, reject, d, v) { + function settle(resolve5, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); + resolve5({ value: v2, done: d }); }, reject); } }; @@ -50307,9 +50307,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core12 = __importStar2(require_core()); - var fs12 = __importStar2(require("fs")); + var fs11 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -50361,7 +50361,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core12.debug(`Search path '${searchPath}'`); try { - yield __await2(fs12.promises.lstat(searchPath)); + yield __await2(fs11.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -50385,7 +50385,7 @@ var require_internal_globber = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path12.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path11.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -50395,7 +50395,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs12.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs11.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path11.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50430,7 +50430,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs12.promises.stat(item.path); + stats = yield fs11.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -50442,10 +50442,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs12.promises.lstat(item.path); + stats = yield fs11.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs12.promises.realpath(item.path); + const realPath = yield fs11.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -50506,11 +50506,11 @@ var require_internal_hash_files = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -50526,7 +50526,7 @@ var require_internal_hash_files = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -50539,14 +50539,14 @@ var require_internal_hash_files = __commonJS({ }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); + return new Promise(function(resolve5, reject) { + v = o[n](v), settle(resolve5, reject, v.done, v.value); }); }; } - function settle(resolve6, reject, d, v) { + function settle(resolve5, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); + resolve5({ value: v2, done: d }); }, reject); } }; @@ -50554,10 +50554,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); var core12 = __importStar2(require_core()); - var fs12 = __importStar2(require("fs")); + var fs11 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); function hashFiles(globber_1, currentWorkspace_1) { return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; @@ -50573,17 +50573,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path12.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path11.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs12.statSync(file).isDirectory()) { + if (fs11.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs12.createReadStream(file), hash2); + yield pipeline(fs11.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -50618,11 +50618,11 @@ var require_glob = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -50638,7 +50638,7 @@ var require_glob = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -51898,11 +51898,11 @@ var require_cacheUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -51918,7 +51918,7 @@ var require_cacheUtils = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -51931,14 +51931,14 @@ var require_cacheUtils = __commonJS({ }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); + return new Promise(function(resolve5, reject) { + v = o[n](v), settle(resolve5, reject, v.done, v.value); }); }; } - function settle(resolve6, reject, d, v) { + function settle(resolve5, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); + resolve5({ value: v2, done: d }); }, reject); } }; @@ -51958,8 +51958,8 @@ var require_cacheUtils = __commonJS({ var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs12 = __importStar2(require("fs")); - var path12 = __importStar2(require("path")); + var fs11 = __importStar2(require("fs")); + var path11 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); var constants_1 = require_constants12(); @@ -51979,15 +51979,15 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path12.join(baseLocation, "actions", "temp"); + tempDirectory = path11.join(baseLocation, "actions", "temp"); } - const dest = path12.join(tempDirectory, crypto2.randomUUID()); + const dest = path11.join(tempDirectory, crypto2.randomUUID()); yield io6.mkdirP(dest); return dest; }); } function getArchiveFileSizeInBytes(filePath) { - return fs12.statSync(filePath).size; + return fs11.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -52003,7 +52003,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path12.relative(workspace, file).replace(new RegExp(`\\${path12.sep}`, "g"), "/"); + const relativeFile = path11.relative(workspace, file).replace(new RegExp(`\\${path11.sep}`, "g"), "/"); core12.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -52025,7 +52025,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs12.unlink)(filePath); + return util.promisify(fs11.unlink)(filePath); }); } function getVersion(app_1) { @@ -52067,7 +52067,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs12.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs11.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -52220,11 +52220,11 @@ function __metadata(metadataKey, metadataValue) { } function __awaiter(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -52240,7 +52240,7 @@ function __awaiter(thisArg, _arguments, P, generator) { } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -52431,14 +52431,14 @@ function __asyncValues(o) { }, i); function verb(n) { i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); + return new Promise(function(resolve5, reject) { + v = o[n](v), settle(resolve5, reject, v.done, v.value); }); }; } - function settle(resolve6, reject, d, v) { + function settle(resolve5, reject, d, v) { Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); + resolve5({ value: v2, done: d }); }, reject); } } @@ -52530,13 +52530,13 @@ function __disposeResources(env) { } return next(); } -function __rewriteRelativeImportExtension(path12, preserveJsx) { - if (typeof path12 === "string" && /^\.\.?\//.test(path12)) { - return path12.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { +function __rewriteRelativeImportExtension(path11, preserveJsx) { + if (typeof path11 === "string" && /^\.\.?\//.test(path11)) { + return path11.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; }); } - return path12; + return path11; } var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; var init_tslib_es6 = __esm({ @@ -53610,9 +53610,9 @@ var require_nodeHttpClient = __commonJS({ if (stream2.readable === false) { return Promise.resolve(); } - return new Promise((resolve6) => { + return new Promise((resolve5) => { const handler2 = () => { - resolve6(); + resolve5(); stream2.removeListener("close", handler2); stream2.removeListener("end", handler2); stream2.removeListener("error", handler2); @@ -53767,8 +53767,8 @@ var require_nodeHttpClient = __commonJS({ headers: request2.headers.toJSON({ preserveCase: true }), ...request2.requestOverrides }; - return new Promise((resolve6, reject) => { - const req = isInsecure ? node_http_1.default.request(options, resolve6) : node_https_1.default.request(options, resolve6); + return new Promise((resolve5, reject) => { + const req = isInsecure ? node_http_1.default.request(options, resolve5) : node_https_1.default.request(options, resolve5); req.once("error", (err) => { reject(new restError_js_1.RestError(err.message, { code: err.code ?? restError_js_1.RestError.REQUEST_SEND_ERROR, request: request2 })); }); @@ -53852,7 +53852,7 @@ var require_nodeHttpClient = __commonJS({ return stream2; } function streamToText(stream2) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { const buffer = []; stream2.on("data", (chunk) => { if (Buffer.isBuffer(chunk)) { @@ -53862,7 +53862,7 @@ var require_nodeHttpClient = __commonJS({ } }); stream2.on("end", () => { - resolve6(Buffer.concat(buffer).toString("utf8")); + resolve5(Buffer.concat(buffer).toString("utf8")); }); stream2.on("error", (e) => { if (e && e?.name === "AbortError") { @@ -54140,7 +54140,7 @@ var require_helpers2 = __commonJS({ var AbortError_js_1 = require_AbortError(); var StandardAbortMessage = "The operation was aborted."; function delay2(delayInMs, value, options) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { let timer = void 0; let onAborted = void 0; const rejectOnAbort = () => { @@ -54163,7 +54163,7 @@ var require_helpers2 = __commonJS({ } timer = setTimeout(() => { removeListeners(); - resolve6(value); + resolve5(value); }, delayInMs); if (options?.abortSignal) { options.abortSignal.addEventListener("abort", onAborted); @@ -55326,8 +55326,8 @@ var require_helpers3 = __commonJS({ function req(url2, opts = {}) { const href = typeof url2 === "string" ? url2 : url2.href; const req2 = (href.startsWith("https:") ? https2 : http).request(url2, opts); - const promise = new Promise((resolve6, reject) => { - req2.once("response", resolve6).once("error", reject).end(); + const promise = new Promise((resolve5, reject) => { + req2.once("response", resolve5).once("error", reject).end(); }); req2.then = promise.then.bind(promise); return req2; @@ -55504,7 +55504,7 @@ var require_parse_proxy_response = __commonJS({ var debug_1 = __importDefault2(require_src()); var debug4 = (0, debug_1.default)("https-proxy-agent:parse-proxy-response"); function parseProxyResponse(socket) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { let buffersLength = 0; const buffers = []; function read() { @@ -55570,7 +55570,7 @@ var require_parse_proxy_response = __commonJS({ } debug4("got proxy server response: %o %o", firstLine, headers); cleanup(); - resolve6({ + resolve5({ connect: { statusCode, statusText, @@ -56950,8 +56950,8 @@ var require_getClient = __commonJS({ } const { allowInsecureConnection, httpClient } = clientOptions; const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path12, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path12, args, { allowInsecureConnection, ...requestOptions }); + const client = (path11, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path11, args, { allowInsecureConnection, ...requestOptions }); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); @@ -57656,7 +57656,7 @@ var require_createAbortablePromise = __commonJS({ var abort_controller_1 = require_commonjs3(); function createAbortablePromise(buildPromise, options) { const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { function rejectOnAbort() { reject(new abort_controller_1.AbortError(abortErrorMsg ?? "The operation was aborted.")); } @@ -57674,7 +57674,7 @@ var require_createAbortablePromise = __commonJS({ try { buildPromise((x) => { removeListeners(); - resolve6(x); + resolve5(x); }, (x) => { removeListeners(); reject(x); @@ -57701,8 +57701,8 @@ var require_delay2 = __commonJS({ function delay2(timeInMs, options) { let token; const { abortSignal, abortErrorMsg } = options ?? {}; - return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve6) => { - token = setTimeout(resolve6, timeInMs); + return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve5) => { + token = setTimeout(resolve5, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, @@ -60822,15 +60822,15 @@ var require_urlHelpers2 = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path12 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path12.startsWith("/")) { - path12 = path12.substring(1); + let path11 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path11.startsWith("/")) { + path11 = path11.substring(1); } - if (isAbsoluteUrl(path12)) { - requestUrl = path12; + if (isAbsoluteUrl(path11)) { + requestUrl = path11; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path12); + requestUrl = appendPath(requestUrl, path11); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -60876,9 +60876,9 @@ var require_urlHelpers2 = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path12 = pathToAppend.substring(0, searchStart); + const path11 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path12; + newPath = newPath + path11; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -63129,10 +63129,10 @@ var require_utils_common = __commonJS({ var constants_js_1 = require_constants15(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path12 = urlParsed.pathname; - path12 = path12 || "/"; - path12 = escape(path12); - urlParsed.pathname = path12; + let path11 = urlParsed.pathname; + path11 = path11 || "/"; + path11 = escape(path11); + urlParsed.pathname = path11; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -63217,9 +63217,9 @@ var require_utils_common = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path12 = urlParsed.pathname; - path12 = path12 ? path12.endsWith("/") ? `${path12}${name}` : `${path12}/${name}` : name; - urlParsed.pathname = path12; + let path11 = urlParsed.pathname; + path11 = path11 ? path11.endsWith("/") ? `${path11}${name}` : `${path11}/${name}` : name; + urlParsed.pathname = path11; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -63334,7 +63334,7 @@ var require_utils_common = __commonJS({ return base64encode(res); } async function delay2(timeInMs, aborter, abortError) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { let timeout; const abortHandler = () => { if (timeout !== void 0) { @@ -63346,7 +63346,7 @@ var require_utils_common = __commonJS({ if (aborter !== void 0) { aborter.removeEventListener("abort", abortHandler); } - resolve6(); + resolve5(); }; timeout = setTimeout(resolveHandler, timeInMs); if (aborter !== void 0) { @@ -64446,9 +64446,9 @@ var require_StorageSharedKeyCredentialPolicy = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path12 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path11 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path12}`; + canonicalizedResourceString += `/${this.factory.accountName}${path11}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -64887,7 +64887,7 @@ var require_BufferScheduler = __commonJS({ * */ async do() { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { this.readable.on("data", (data) => { data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; this.appendUnresolvedData(data); @@ -64915,11 +64915,11 @@ var require_BufferScheduler = __commonJS({ if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve6).catch(reject); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve5).catch(reject); } else if (this.unresolvedLength >= this.bufferSize) { return; } else { - resolve6(); + resolve5(); } } }); @@ -65187,10 +65187,10 @@ var require_utils_common2 = __commonJS({ var constants_js_1 = require_constants16(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path12 = urlParsed.pathname; - path12 = path12 || "/"; - path12 = escape(path12); - urlParsed.pathname = path12; + let path11 = urlParsed.pathname; + path11 = path11 || "/"; + path11 = escape(path11); + urlParsed.pathname = path11; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -65275,9 +65275,9 @@ var require_utils_common2 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path12 = urlParsed.pathname; - path12 = path12 ? path12.endsWith("/") ? `${path12}${name}` : `${path12}/${name}` : name; - urlParsed.pathname = path12; + let path11 = urlParsed.pathname; + path11 = path11 ? path11.endsWith("/") ? `${path11}${name}` : `${path11}/${name}` : name; + urlParsed.pathname = path11; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -65392,7 +65392,7 @@ var require_utils_common2 = __commonJS({ return base64encode(res); } async function delay2(timeInMs, aborter, abortError) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { let timeout; const abortHandler = () => { if (timeout !== void 0) { @@ -65404,7 +65404,7 @@ var require_utils_common2 = __commonJS({ if (aborter !== void 0) { aborter.removeEventListener("abort", abortHandler); } - resolve6(); + resolve5(); }; timeout = setTimeout(resolveHandler, timeInMs); if (aborter !== void 0) { @@ -66198,9 +66198,9 @@ var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path12 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path11 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path12}`; + canonicalizedResourceString += `/${this.factory.accountName}${path11}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -66830,9 +66830,9 @@ var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path12 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path11 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path12}`; + canonicalizedResourceString += `/${options.accountName}${path11}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -67177,9 +67177,9 @@ var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path12 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path11 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path12}`; + canonicalizedResourceString += `/${options.accountName}${path11}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -83602,7 +83602,7 @@ var require_AvroReadableFromStream = __commonJS({ this._position += chunk.length; return this.toUint8Array(chunk); } else { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { const cleanUp = () => { this._readable.removeListener("readable", readableCallback); this._readable.removeListener("error", rejectCallback); @@ -83617,7 +83617,7 @@ var require_AvroReadableFromStream = __commonJS({ if (callbackChunk) { this._position += callbackChunk.length; cleanUp(); - resolve6(this.toUint8Array(callbackChunk)); + resolve5(this.toUint8Array(callbackChunk)); } }; const rejectCallback = () => { @@ -85097,8 +85097,8 @@ var require_poller3 = __commonJS({ this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; - this.promise = new Promise((resolve6, reject) => { - this.resolve = resolve6; + this.promise = new Promise((resolve5, reject) => { + this.resolve = resolve5; this.reject = reject; }); this.promise.catch(() => { @@ -85351,7 +85351,7 @@ var require_lroEngine = __commonJS({ * The method used by the poller to wait before attempting to update its operation. */ delay() { - return new Promise((resolve6) => setTimeout(() => resolve6(), this.config.intervalInMs)); + return new Promise((resolve5) => setTimeout(() => resolve5(), this.config.intervalInMs)); } }; exports2.LroEngine = LroEngine; @@ -85597,8 +85597,8 @@ var require_Batch = __commonJS({ return Promise.resolve(); } this.parallelExecute(); - return new Promise((resolve6, reject) => { - this.emitter.on("finish", resolve6); + return new Promise((resolve5, reject) => { + this.emitter.on("finish", resolve5); this.emitter.on("error", (error3) => { this.state = BatchStates.Error; reject(error3); @@ -85659,12 +85659,12 @@ var require_utils7 = __commonJS({ async function streamToBuffer(stream2, buffer, offset, end, encoding) { let pos = 0; const count = end - offset; - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); stream2.on("readable", () => { if (pos >= count) { clearTimeout(timeout); - resolve6(); + resolve5(); return; } let chunk = stream2.read(); @@ -85683,7 +85683,7 @@ var require_utils7 = __commonJS({ if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } - resolve6(); + resolve5(); }); stream2.on("error", (msg) => { clearTimeout(timeout); @@ -85694,7 +85694,7 @@ var require_utils7 = __commonJS({ async function streamToBuffer2(stream2, buffer, encoding) { let pos = 0; const bufferSize = buffer.length; - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { stream2.on("readable", () => { let chunk = stream2.read(); if (!chunk) { @@ -85711,25 +85711,25 @@ var require_utils7 = __commonJS({ pos += chunk.length; }); stream2.on("end", () => { - resolve6(pos); + resolve5(pos); }); stream2.on("error", reject); }); } async function streamToBuffer3(readableStream, encoding) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { const chunks = []; readableStream.on("data", (data) => { chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); }); readableStream.on("end", () => { - resolve6(Buffer.concat(chunks)); + resolve5(Buffer.concat(chunks)); }); readableStream.on("error", reject); }); } async function readStreamToLocalFile(rs, file) { - return new Promise((resolve6, reject) => { + return new Promise((resolve5, reject) => { const ws = node_fs_1.default.createWriteStream(file); rs.on("error", (err) => { reject(err); @@ -85737,7 +85737,7 @@ var require_utils7 = __commonJS({ ws.on("error", (err) => { reject(err); }); - ws.on("close", resolve6); + ws.on("close", resolve5); rs.pipe(ws); }); } @@ -86676,7 +86676,7 @@ var require_Clients = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } @@ -86687,7 +86687,7 @@ var require_Clients = __commonJS({ versionId: this._versionId, ...options }, this.credential).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve5((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -86726,7 +86726,7 @@ var require_Clients = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ containerName: this._containerName, blobName: this._name, @@ -86734,7 +86734,7 @@ var require_Clients = __commonJS({ versionId: this._versionId, ...options }, userDelegationKey, this.accountName).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve5((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -88589,14 +88589,14 @@ var require_Mutex = __commonJS({ * @param key - lock key */ static async lock(key) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { this.keys[key] = MutexLockStatus.LOCKED; - resolve6(); + resolve5(); } else { this.onUnlockEvent(key, () => { this.keys[key] = MutexLockStatus.LOCKED; - resolve6(); + resolve5(); }); } }); @@ -88607,12 +88607,12 @@ var require_Mutex = __commonJS({ * @param key - */ static async unlock(key) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { if (this.keys[key] === MutexLockStatus.LOCKED) { this.emitUnlockEvent(key); } delete this.keys[key]; - resolve6(); + resolve5(); }); } static keys = {}; @@ -88834,8 +88834,8 @@ var require_BlobBatch = __commonJS({ if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path12 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path12 || path12 === "") { + const path11 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path11 || path11 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -88913,8 +88913,8 @@ var require_BlobBatchClient = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path12 = (0, utils_common_js_1.getURLPath)(url2); - if (path12 && path12 !== "/") { + const path11 = (0, utils_common_js_1.getURLPath)(url2); + if (path11 && path11 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -90215,7 +90215,7 @@ var require_ContainerClient = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } @@ -90223,7 +90223,7 @@ var require_ContainerClient = __commonJS({ containerName: this._containerName, ...options }, this.credential).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve5((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -90258,12 +90258,12 @@ var require_ContainerClient = __commonJS({ * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve6) => { + return new Promise((resolve5) => { const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ containerName: this._containerName, ...options }, userDelegationKey, this.accountName).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + resolve5((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -91659,11 +91659,11 @@ var require_uploadUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -91679,7 +91679,7 @@ var require_uploadUtils = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -91846,11 +91846,11 @@ var require_requestUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -91866,7 +91866,7 @@ var require_requestUtils = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -91906,7 +91906,7 @@ var require_requestUtils = __commonJS({ } function sleep(milliseconds) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => setTimeout(resolve6, milliseconds)); + return new Promise((resolve5) => setTimeout(resolve5, milliseconds)); }); } function retry2(name_1, method_1, getStatusCode_1) { @@ -92167,11 +92167,11 @@ var require_downloadUtils = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -92187,7 +92187,7 @@ var require_downloadUtils = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -92201,7 +92201,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs12 = __importStar2(require("fs")); + var fs11 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); @@ -92312,7 +92312,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs12.createWriteStream(archivePath); + const writeStream = fs11.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -92337,7 +92337,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs12.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs11.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -92453,7 +92453,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs12.openSync(archivePath, "w"); + const fd = fs11.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -92471,20 +92471,20 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs12.writeFileSync(fd, result); + fs11.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs12.closeSync(fd); + fs11.closeSync(fd); } } }); } var promiseWithTimeout = (timeoutMs, promise) => __awaiter2(void 0, void 0, void 0, function* () { let timeoutHandle; - const timeoutPromise = new Promise((resolve6) => { - timeoutHandle = setTimeout(() => resolve6("timeout"), timeoutMs); + const timeoutPromise = new Promise((resolve5) => { + timeoutHandle = setTimeout(() => resolve5("timeout"), timeoutMs); }); return Promise.race([promise, timeoutPromise]).then((result) => { clearTimeout(timeoutHandle); @@ -92765,11 +92765,11 @@ var require_cacheHttpClient = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -92785,7 +92785,7 @@ var require_cacheHttpClient = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -92798,7 +92798,7 @@ var require_cacheHttpClient = __commonJS({ var core12 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs12 = __importStar2(require("fs")); + var fs11 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -92933,7 +92933,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs12.openSync(archivePath, "r"); + const fd = fs11.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -92947,7 +92947,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs12.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs11.createReadStream(archivePath, { fd, start, end, @@ -92958,7 +92958,7 @@ Other caches with similar key:`); } }))); } finally { - fs12.closeSync(fd); + fs11.closeSync(fd); } return; }); @@ -96255,8 +96255,8 @@ var require_deferred = __commonJS({ */ constructor(preventUnhandledRejectionWarning = true) { this._state = DeferredState.PENDING; - this._promise = new Promise((resolve6, reject) => { - this._resolve = resolve6; + this._promise = new Promise((resolve5, reject) => { + this._resolve = resolve5; this._reject = reject; }); if (preventUnhandledRejectionWarning) { @@ -96471,11 +96471,11 @@ var require_unary_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96491,7 +96491,7 @@ var require_unary_call = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96540,11 +96540,11 @@ var require_server_streaming_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96560,7 +96560,7 @@ var require_server_streaming_call = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96610,11 +96610,11 @@ var require_client_streaming_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96630,7 +96630,7 @@ var require_client_streaming_call = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96679,11 +96679,11 @@ var require_duplex_streaming_call = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96699,7 +96699,7 @@ var require_duplex_streaming_call = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96747,11 +96747,11 @@ var require_test_transport = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -96767,7 +96767,7 @@ var require_test_transport = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -96964,11 +96964,11 @@ var require_test_transport = __commonJS({ responseTrailer: "test" }; function delay2(ms, abort) { - return (v) => new Promise((resolve6, reject) => { + return (v) => new Promise((resolve5, reject) => { if (abort === null || abort === void 0 ? void 0 : abort.aborted) { reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); } else { - const id = setTimeout(() => resolve6(v), ms); + const id = setTimeout(() => resolve5(v), ms); if (abort) { abort.addEventListener("abort", (ev) => { clearTimeout(id); @@ -97966,11 +97966,11 @@ var require_cacheTwirpClient = __commonJS({ "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -97986,7 +97986,7 @@ var require_cacheTwirpClient = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98126,7 +98126,7 @@ var require_cacheTwirpClient = __commonJS({ } sleep(milliseconds) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => setTimeout(resolve6, milliseconds)); + return new Promise((resolve5) => setTimeout(resolve5, milliseconds)); }); } getExponentialRetryTimeMilliseconds(attempt) { @@ -98191,11 +98191,11 @@ var require_tar = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -98211,7 +98211,7 @@ var require_tar = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98223,7 +98223,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io6 = __importStar2(require_io()); var fs_1 = require("fs"); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); var IS_WINDOWS = process.platform === "win32"; @@ -98269,13 +98269,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path12.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path11.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -98321,7 +98321,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path12.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path11.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -98330,7 +98330,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path12.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path11.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -98345,7 +98345,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -98354,7 +98354,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path12.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path11.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -98392,7 +98392,7 @@ var require_tar = __commonJS({ } function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path12.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path11.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -98443,11 +98443,11 @@ var require_cache5 = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -98463,7 +98463,7 @@ var require_cache5 = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98474,7 +98474,7 @@ var require_cache5 = __commonJS({ exports2.restoreCache = restoreCache3; exports2.saveCache = saveCache3; var core12 = __importStar2(require_core()); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); @@ -98569,7 +98569,7 @@ var require_cache5 = __commonJS({ core12.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path11.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core12.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core12.isDebug()) { @@ -98638,7 +98638,7 @@ var require_cache5 = __commonJS({ core12.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path11.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core12.debug(`Archive path: ${archivePath}`); core12.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -98700,7 +98700,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path11.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core12.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98764,7 +98764,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path11.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core12.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98881,11 +98881,11 @@ var require_manifest = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -98901,7 +98901,7 @@ var require_manifest = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -98914,7 +98914,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os2 = require("os"); var cp = require("child_process"); - var fs12 = require("fs"); + var fs11 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { const platFilter = os2.platform(); @@ -98976,10 +98976,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs12.existsSync(lsbReleaseFile)) { - contents = fs12.readFileSync(lsbReleaseFile).toString(); - } else if (fs12.existsSync(osReleaseFile)) { - contents = fs12.readFileSync(osReleaseFile).toString(); + if (fs11.existsSync(lsbReleaseFile)) { + contents = fs11.readFileSync(lsbReleaseFile).toString(); + } else if (fs11.existsSync(osReleaseFile)) { + contents = fs11.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99029,11 +99029,11 @@ var require_retry_helper = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -99049,7 +99049,7 @@ var require_retry_helper = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -99094,7 +99094,7 @@ var require_retry_helper = __commonJS({ } sleep(seconds) { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => setTimeout(resolve6, seconds * 1e3)); + return new Promise((resolve5) => setTimeout(resolve5, seconds * 1e3)); }); } }; @@ -99145,11 +99145,11 @@ var require_tool_cache = __commonJS({ })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + return value instanceof P ? value : new P(function(resolve5) { + resolve5(value); }); } - return new (P || (P = Promise))(function(resolve6, reject) { + return new (P || (P = Promise))(function(resolve5, reject) { function fulfilled(value) { try { step(generator.next(value)); @@ -99165,7 +99165,7 @@ var require_tool_cache = __commonJS({ } } function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + result.done ? resolve5(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); @@ -99188,10 +99188,10 @@ var require_tool_cache = __commonJS({ var core12 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs12 = __importStar2(require("fs")); + var fs11 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os2 = __importStar2(require("os")); - var path12 = __importStar2(require("path")); + var path11 = __importStar2(require("path")); var httpm = __importStar2(require_lib()); var semver9 = __importStar2(require_semver2()); var stream2 = __importStar2(require("stream")); @@ -99212,8 +99212,8 @@ var require_tool_cache = __commonJS({ var userAgent2 = "actions/tool-cache"; function downloadTool2(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - dest = dest || path12.join(_getTempDirectory(), crypto2.randomUUID()); - yield io6.mkdirP(path12.dirname(dest)); + dest = dest || path11.join(_getTempDirectory(), crypto2.randomUUID()); + yield io6.mkdirP(path11.dirname(dest)); core12.debug(`Downloading ${url2}`); core12.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -99234,7 +99234,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs12.existsSync(dest)) { + if (fs11.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99258,7 +99258,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs12.createWriteStream(dest)); + yield pipeline(readStream, fs11.createWriteStream(dest)); core12.debug("download complete"); succeeded = true; return dest; @@ -99303,7 +99303,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path12.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path11.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -99470,12 +99470,12 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os2.arch(); core12.debug(`Caching tool ${tool} ${version} ${arch2}`); core12.debug(`source dir: ${sourceDir}`); - if (!fs12.statSync(sourceDir).isDirectory()) { + if (!fs11.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs12.readdirSync(sourceDir)) { - const s = path12.join(sourceDir, itemName); + for (const itemName of fs11.readdirSync(sourceDir)) { + const s = path11.join(sourceDir, itemName); yield io6.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch2); @@ -99488,11 +99488,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os2.arch(); core12.debug(`Caching tool ${tool} ${version} ${arch2}`); core12.debug(`source file: ${sourceFile}`); - if (!fs12.statSync(sourceFile).isFile()) { + if (!fs11.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path12.join(destFolder, targetFile); + const destPath = path11.join(destFolder, targetFile); core12.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); @@ -99515,9 +99515,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path11.join(_getCacheDirectory(), toolName, versionSpec, arch2); core12.debug(`checking cache: ${cachePath}`); - if (fs12.existsSync(cachePath) && fs12.existsSync(`${cachePath}.complete`)) { + if (fs11.existsSync(cachePath) && fs11.existsSync(`${cachePath}.complete`)) { core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -99529,13 +99529,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch2) { const versions = []; arch2 = arch2 || os2.arch(); - const toolPath = path12.join(_getCacheDirectory(), toolName); - if (fs12.existsSync(toolPath)) { - const children = fs12.readdirSync(toolPath); + const toolPath = path11.join(_getCacheDirectory(), toolName); + if (fs11.existsSync(toolPath)) { + const children = fs11.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path12.join(toolPath, child, arch2 || ""); - if (fs12.existsSync(fullPath) && fs12.existsSync(`${fullPath}.complete`)) { + const fullPath = path11.join(toolPath, child, arch2 || ""); + if (fs11.existsSync(fullPath) && fs11.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99586,7 +99586,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter2(this, void 0, void 0, function* () { if (!dest) { - dest = path12.join(_getTempDirectory(), crypto2.randomUUID()); + dest = path11.join(_getTempDirectory(), crypto2.randomUUID()); } yield io6.mkdirP(dest); return dest; @@ -99594,7 +99594,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { - const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path11.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); core12.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); @@ -99604,9 +99604,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch2) { - const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path11.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs12.writeFileSync(markerPath, ""); + fs11.writeFileSync(markerPath, ""); core12.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -103131,8 +103131,8 @@ __export(upload_lib_exports, { writePostProcessedFiles: () => writePostProcessedFiles }); module.exports = __toCommonJS(upload_lib_exports); -var fs11 = __toESM(require("fs")); -var path11 = __toESM(require("path")); +var fs10 = __toESM(require("fs")); +var path10 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); var core11 = __toESM(require_core()); @@ -103160,21 +103160,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs12 = options.fs || await import("node:fs/promises"); + const fs11 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs12.lstat(itemPath, { bigint: true }) : await fs12.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs11.lstat(itemPath, { bigint: true }) : await fs11.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs12.readdir(itemPath) : await fs12.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs11.readdir(itemPath) : await fs11.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -105921,8 +105921,8 @@ function getBaseDatabaseOidsFilePath(config) { } async function delay(milliseconds, opts) { const { allowProcessExit } = opts || {}; - return new Promise((resolve6) => { - const timer = setTimeout(resolve6, milliseconds); + return new Promise((resolve5) => { + const timer = setTimeout(resolve5, milliseconds); if (allowProcessExit) { timer.unref(); } @@ -106459,363 +106459,12 @@ function wrapApiConfigurationError(e) { return e; } -// src/codeql.ts -var fs9 = __toESM(require("fs")); -var path9 = __toESM(require("path")); -var core10 = __toESM(require_core()); -var toolrunner3 = __toESM(require_toolrunner()); - -// src/cli-errors.ts -var SUPPORTED_PLATFORMS = [ - ["linux", "x64"], - ["win32", "x64"], - ["darwin", "x64"], - ["darwin", "arm64"] -]; -var CliError = class extends Error { - exitCode; - stderr; - constructor({ cmd, args, exitCode, stderr }) { - const prettyCommand = prettyPrintInvocation(cmd, args); - const fatalErrors = extractFatalErrors(stderr); - const autobuildErrors = extractAutobuildErrors(stderr); - let message; - if (fatalErrors) { - message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and error was: ${ensureEndsInPeriod( - fatalErrors.trim() - )} See the logs for more details.`; - } else if (autobuildErrors) { - message = `We were unable to automatically build your code. Please provide manual build steps. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information. Encountered the following error: ${autobuildErrors}`; - } else { - const lastLine = ensureEndsInPeriod( - stderr.trim().split("\n").pop()?.trim() || "n/a" - ); - message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`; - } - super(message); - this.exitCode = exitCode; - this.stderr = stderr; - } -}; -function extractFatalErrors(error3) { - const fatalErrorRegex = /.*fatal (internal )?error occurr?ed(. Details)?:/gi; - let fatalErrors = []; - let lastFatalErrorIndex; - let match; - while ((match = fatalErrorRegex.exec(error3)) !== null) { - if (lastFatalErrorIndex !== void 0) { - fatalErrors.push(error3.slice(lastFatalErrorIndex, match.index).trim()); - } - lastFatalErrorIndex = match.index; - } - if (lastFatalErrorIndex !== void 0) { - const lastError = error3.slice(lastFatalErrorIndex).trim(); - if (fatalErrors.length === 0) { - return lastError; - } - const isOneLiner = !fatalErrors.some((e) => e.includes("\n")); - if (isOneLiner) { - fatalErrors = fatalErrors.map(ensureEndsInPeriod); - } - return [ - ensureEndsInPeriod(lastError), - "Context:", - ...fatalErrors.reverse() - ].join(isOneLiner ? " " : "\n"); - } - return void 0; -} -function extractAutobuildErrors(error3) { - const pattern = /.*\[autobuild\] \[ERROR\] (.*)/gi; - let errorLines = [...error3.matchAll(pattern)].map((match) => match[1]); - if (errorLines.length > 10) { - errorLines = errorLines.slice(0, 10); - errorLines.push("(truncated)"); - } - return errorLines.join("\n") || void 0; -} -var cliErrorsConfig = { - ["AutobuildError" /* AutobuildError */]: { - cliErrorMessageCandidates: [ - new RegExp("We were unable to automatically build your code") - ] - }, - ["CouldNotCreateTempDir" /* CouldNotCreateTempDir */]: { - cliErrorMessageCandidates: [new RegExp("Could not create temp directory")] - }, - ["ExternalRepositoryCloneFailed" /* ExternalRepositoryCloneFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("Failed to clone external Git repository") - ] - }, - ["GradleBuildFailed" /* GradleBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception.") - ] - }, - // Version of CodeQL CLI is incompatible with this version of the CodeQL Action - ["IncompatibleWithActionVersion" /* IncompatibleWithActionVersion */]: { - cliErrorMessageCandidates: [ - new RegExp("is not compatible with this CodeQL CLI") - ] - }, - ["InitCalledTwice" /* InitCalledTwice */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Refusing to create databases .* but could not process any of it" - ) - ], - additionalErrorMessageToAppend: `Is the "init" action called twice in the same job?` - }, - ["InvalidConfigFile" /* InvalidConfigFile */]: { - cliErrorMessageCandidates: [ - new RegExp("Config file .* is not valid"), - new RegExp("The supplied config file is empty") - ] - }, - ["InvalidExternalRepoSpecifier" /* InvalidExternalRepoSpecifier */]: { - cliErrorMessageCandidates: [ - new RegExp("Specifier for external repository is invalid") - ] - }, - // Expected source location for database creation does not exist - ["InvalidSourceRoot" /* InvalidSourceRoot */]: { - cliErrorMessageCandidates: [new RegExp("Invalid source root")] - }, - ["MavenBuildFailed" /* MavenBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("\\[autobuild\\] \\[ERROR\\] Failed to execute goal") - ] - }, - ["NoBuildCommandAutodetected" /* NoBuildCommandAutodetected */]: { - cliErrorMessageCandidates: [ - new RegExp("Could not auto-detect a suitable build method") - ] - }, - ["NoBuildMethodAutodetected" /* NoBuildMethodAutodetected */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Could not detect a suitable build command for the source checkout" - ) - ] - }, - // Usually when a manual build script has failed, or if an autodetected language - // was unintended to have CodeQL analysis run on it. - ["NoSourceCodeSeen" /* NoSourceCodeSeen */]: { - exitCode: 32, - cliErrorMessageCandidates: [ - new RegExp( - "CodeQL detected code written in .* but could not process any of it" - ), - new RegExp( - "CodeQL did not detect any code written in languages supported by CodeQL" - ) - ] - }, - ["NoSupportedBuildCommandSucceeded" /* NoSupportedBuildCommandSucceeded */]: { - cliErrorMessageCandidates: [ - new RegExp("No supported build command succeeded") - ] - }, - ["NoSupportedBuildSystemDetected" /* NoSupportedBuildSystemDetected */]: { - cliErrorMessageCandidates: [ - new RegExp("No supported build system detected") - ] - }, - ["OutOfMemoryOrDisk" /* OutOfMemoryOrDisk */]: { - cliErrorMessageCandidates: [ - new RegExp("CodeQL is out of memory."), - new RegExp("out of disk"), - new RegExp("No space left on device") - ], - additionalErrorMessageToAppend: "For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory" - }, - ["PackCannotBeFound" /* PackCannotBeFound */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Query pack .* cannot be found\\. Check the spelling of the pack\\." - ), - new RegExp( - "is not a .ql file, .qls file, a directory, or a query pack specification." - ) - ] - }, - ["PackMissingAuth" /* PackMissingAuth */]: { - cliErrorMessageCandidates: [ - new RegExp("GitHub Container registry .* 403 Forbidden"), - new RegExp( - "Do you need to specify a token to authenticate to the registry?" - ) - ] - }, - ["SwiftBuildFailed" /* SwiftBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp( - "\\[autobuilder/build\\] \\[build-command-failed\\] `autobuild` failed to run the build command" - ) - ] - }, - ["UnsupportedBuildMode" /* UnsupportedBuildMode */]: { - cliErrorMessageCandidates: [ - new RegExp( - "does not support the .* build mode. Please try using one of the following build modes instead" - ) - ] - }, - ["NotFoundInRegistry" /* NotFoundInRegistry */]: { - cliErrorMessageCandidates: [ - new RegExp("'.*' not found in the registry '.*'") - ] - } -}; -function getCliConfigCategoryIfExists(cliError) { - for (const [category, configuration] of Object.entries(cliErrorsConfig)) { - if (cliError.exitCode !== void 0 && configuration.exitCode !== void 0 && cliError.exitCode === configuration.exitCode) { - return category; - } - for (const e of configuration.cliErrorMessageCandidates) { - if (cliError.message.match(e) || cliError.stderr.match(e)) { - return category; - } - } - } - return void 0; -} -function isUnsupportedPlatform() { - return !SUPPORTED_PLATFORMS.some( - ([platform, arch2]) => platform === process.platform && arch2 === process.arch - ); -} -function getUnsupportedPlatformError(cliError) { - return new ConfigurationError( - `The CodeQL CLI does not support the platform/architecture combination of ${process.platform}/${process.arch} (see ${"https://codeql.github.com/docs/codeql-overview/system-requirements/" /* SYSTEM_REQUIREMENTS */}). The underlying error was: ${cliError.message}` - ); -} -function wrapCliConfigurationError(cliError) { - if (isUnsupportedPlatform()) { - return getUnsupportedPlatformError(cliError); - } - const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError); - if (cliConfigErrorCategory === void 0) { - return cliError; - } - let errorMessageBuilder = cliError.message; - const additionalErrorMessageToAppend = cliErrorsConfig[cliConfigErrorCategory].additionalErrorMessageToAppend; - if (additionalErrorMessageToAppend !== void 0) { - errorMessageBuilder = `${errorMessageBuilder} ${additionalErrorMessageToAppend}`; - } - return new ConfigurationError(errorMessageBuilder); -} - -// src/config-utils.ts -var fs5 = __toESM(require("fs")); -var path6 = __toESM(require("path")); - -// src/caching-utils.ts -var core6 = __toESM(require_core()); - -// src/config/db-config.ts -var jsonschema = __toESM(require_lib2()); -var semver2 = __toESM(require_semver2()); -var PACK_IDENTIFIER_PATTERN = (function() { - const alphaNumeric = "[a-z0-9]"; - const alphaNumericDash = "[a-z0-9-]"; - const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`; - return new RegExp(`^${component}/${component}$`); -})(); - -// src/diagnostics.ts -var import_fs = require("fs"); -var import_path = __toESM(require("path")); - -// src/logging.ts -var core7 = __toESM(require_core()); -function getActionsLogger() { - return { - debug: core7.debug, - info: core7.info, - warning: core7.warning, - error: core7.error, - isDebug: core7.isDebug, - startGroup: core7.startGroup, - endGroup: core7.endGroup - }; -} -function formatDuration(durationMs) { - if (durationMs < 1e3) { - return `${durationMs}ms`; - } - if (durationMs < 60 * 1e3) { - return `${(durationMs / 1e3).toFixed(1)}s`; - } - const minutes = Math.floor(durationMs / (60 * 1e3)); - const seconds = Math.floor(durationMs % (60 * 1e3) / 1e3); - return `${minutes}m${seconds}s`; -} - -// src/diagnostics.ts -var unwrittenDiagnostics = []; -var unwrittenDefaultLanguageDiagnostics = []; -function makeDiagnostic(id, name, data = void 0) { - return { - ...data, - timestamp: data?.timestamp ?? (/* @__PURE__ */ new Date()).toISOString(), - source: { ...data?.source, id, name } - }; -} -function addDiagnostic(config, language, diagnostic) { - const logger = getActionsLogger(); - const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; - if ((0, import_fs.existsSync)(databasePath)) { - writeDiagnostic(config, language, diagnostic); - } else { - logger.debug( - `Writing a diagnostic for ${language}, but the database at ${databasePath} does not exist yet.` - ); - unwrittenDiagnostics.push({ diagnostic, language }); - } -} -function addNoLanguageDiagnostic(config, diagnostic) { - if (config !== void 0) { - addDiagnostic( - config, - // Arbitrarily choose the first language. We could also choose all languages, but that - // increases the risk of misinterpreting the data. - config.languages[0], - diagnostic - ); - } else { - unwrittenDefaultLanguageDiagnostics.push(diagnostic); - } -} -function writeDiagnostic(config, language, diagnostic) { - const logger = getActionsLogger(); - const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; - const diagnosticsPath = import_path.default.resolve( - databasePath, - "diagnostic", - "codeql-action" - ); - try { - (0, import_fs.mkdirSync)(diagnosticsPath, { recursive: true }); - const jsonPath = import_path.default.resolve( - diagnosticsPath, - // Remove colons from the timestamp as these are not allowed in Windows filenames. - `codeql-action-${diagnostic.timestamp.replaceAll(":", "")}.json` - ); - (0, import_fs.writeFileSync)(jsonPath, JSON.stringify(diagnostic)); - } catch (err) { - logger.warning(`Unable to write diagnostic message to database: ${err}`); - logger.debug(JSON.stringify(diagnostic)); - } -} - // src/diff-informed-analysis-utils.ts var fs4 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var path4 = __toESM(require("path")); // src/feature-flags.ts -var semver5 = __toESM(require_semver2()); +var semver4 = __toESM(require_semver2()); // src/defaults.json var bundleVersion = "codeql-bundle-v2.24.2"; @@ -106823,18 +106472,21 @@ var cliVersion = "2.24.2"; // src/overlay-database-utils.ts var fs3 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache5()); +// src/caching-utils.ts +var core6 = __toESM(require_core()); + // src/git-utils.ts -var core8 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); -var semver3 = __toESM(require_semver2()); +var semver2 = __toESM(require_semver2()); var runGitCommand = async function(workingDirectory, args, customErrorMessage, options) { let stdout = ""; let stderr = ""; - core8.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -106855,7 +106507,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage, o if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core8.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error3; } }; @@ -106951,8 +106603,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path12 = decodeGitFilePath(match[2]); - fileOidMap[path12] = oid; + const path11 = decodeGitFilePath(match[2]); + fileOidMap[path11] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -107000,7 +106652,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core8.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -107025,6 +106677,31 @@ async function isAnalyzingDefaultBranch() { return currentRef === defaultBranch; } +// src/logging.ts +var core8 = __toESM(require_core()); +function getActionsLogger() { + return { + debug: core8.debug, + info: core8.info, + warning: core8.warning, + error: core8.error, + isDebug: core8.isDebug, + startGroup: core8.startGroup, + endGroup: core8.endGroup + }; +} +function formatDuration(durationMs) { + if (durationMs < 1e3) { + return `${durationMs}ms`; + } + if (durationMs < 60 * 1e3) { + return `${(durationMs / 1e3).toFixed(1)}s`; + } + const minutes = Math.floor(durationMs / (60 * 1e3)); + const seconds = Math.floor(durationMs % (60 * 1e3) / 1e3); + return `${minutes}m${seconds}s`; +} + // src/overlay-database-utils.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; @@ -107058,7 +106735,7 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); - const overlayChangesFile = path4.join( + const overlayChangesFile = path3.join( getTemporaryDirectory(), "overlay-changes.json" ); @@ -107084,7 +106761,7 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } // src/tools-features.ts -var semver4 = __toESM(require_semver2()); +var semver3 = __toESM(require_semver2()); function isSupportedToolsFeature(versionInfo, feature) { return !!versionInfo.features && versionInfo.features[feature]; } @@ -107322,7 +106999,7 @@ var featureConfig = { // src/diff-informed-analysis-utils.ts function getDiffRangesJsonFilePath() { - return path5.join(getTemporaryDirectory(), "pr-diff-range.json"); + return path4.join(getTemporaryDirectory(), "pr-diff-range.json"); } function readDiffRangesJsonFile(logger) { const jsonFilePath = getDiffRangesJsonFilePath(); @@ -107338,1178 +107015,2591 @@ ${jsonContents}` return JSON.parse(jsonContents); } -// src/trap-caching.ts -var actionsCache2 = __toESM(require_cache5()); +// src/fingerprints.ts +var fs5 = __toESM(require("fs")); +var import_path = __toESM(require("path")); -// src/config-utils.ts -var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4; -var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB * 1e6; -var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; -var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, - csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, - go: "overlay_analysis_go" /* OverlayAnalysisGo */, - java: "overlay_analysis_java" /* OverlayAnalysisJava */, - javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, - python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ -}; -var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, - csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, - go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, - java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, - javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, - python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ -}; -function getPathToParsedConfigFile(tempDir) { - return path6.join(tempDir, "config"); -} -async function getConfig(tempDir, logger) { - const configFile = getPathToParsedConfigFile(tempDir); - if (!fs5.existsSync(configFile)) { - return void 0; - } - const configString = fs5.readFileSync(configFile, "utf8"); - logger.debug("Loaded config:"); - logger.debug(configString); - const config = JSON.parse(configString); - if (config.version === void 0) { - throw new ConfigurationError( - `Loaded configuration file, but it does not contain the expected 'version' field.` - ); - } - if (config.version !== getActionVersion()) { - throw new ConfigurationError( - `Loaded a configuration file for version '${config.version}', but running version '${getActionVersion()}'` - ); - } - return config; -} -function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { - const augmentedConfig = cloneObject(cliConfig); - if (extraQueryExclusions.length === 0) { - return augmentedConfig; - } - augmentedConfig["query-filters"] = [ - // Ordering matters. If the first filter is an inclusion, it implicitly - // excludes all queries that are not included. If it is an exclusion, - // it implicitly includes all queries that are not excluded. So user - // filters (if any) should always be first to preserve intent. - ...augmentedConfig["query-filters"] || [], - ...extraQueryExclusions - ]; - if (augmentedConfig["query-filters"]?.length === 0) { - delete augmentedConfig["query-filters"]; - } - return augmentedConfig; +// node_modules/long/index.js +var wasm = null; +try { + wasm = new WebAssembly.Instance( + new WebAssembly.Module( + new Uint8Array([ + // \0asm + 0, + 97, + 115, + 109, + // version 1 + 1, + 0, + 0, + 0, + // section "type" + 1, + 13, + 2, + // 0, () => i32 + 96, + 0, + 1, + 127, + // 1, (i32, i32, i32, i32) => i32 + 96, + 4, + 127, + 127, + 127, + 127, + 1, + 127, + // section "function" + 3, + 7, + 6, + // 0, type 0 + 0, + // 1, type 1 + 1, + // 2, type 1 + 1, + // 3, type 1 + 1, + // 4, type 1 + 1, + // 5, type 1 + 1, + // section "global" + 6, + 6, + 1, + // 0, "high", mutable i32 + 127, + 1, + 65, + 0, + 11, + // section "export" + 7, + 50, + 6, + // 0, "mul" + 3, + 109, + 117, + 108, + 0, + 1, + // 1, "div_s" + 5, + 100, + 105, + 118, + 95, + 115, + 0, + 2, + // 2, "div_u" + 5, + 100, + 105, + 118, + 95, + 117, + 0, + 3, + // 3, "rem_s" + 5, + 114, + 101, + 109, + 95, + 115, + 0, + 4, + // 4, "rem_u" + 5, + 114, + 101, + 109, + 95, + 117, + 0, + 5, + // 5, "get_high" + 8, + 103, + 101, + 116, + 95, + 104, + 105, + 103, + 104, + 0, + 0, + // section "code" + 10, + 191, + 1, + 6, + // 0, "get_high" + 4, + 0, + 35, + 0, + 11, + // 1, "mul" + 36, + 1, + 1, + 126, + 32, + 0, + 173, + 32, + 1, + 173, + 66, + 32, + 134, + 132, + 32, + 2, + 173, + 32, + 3, + 173, + 66, + 32, + 134, + 132, + 126, + 34, + 4, + 66, + 32, + 135, + 167, + 36, + 0, + 32, + 4, + 167, + 11, + // 2, "div_s" + 36, + 1, + 1, + 126, + 32, + 0, + 173, + 32, + 1, + 173, + 66, + 32, + 134, + 132, + 32, + 2, + 173, + 32, + 3, + 173, + 66, + 32, + 134, + 132, + 127, + 34, + 4, + 66, + 32, + 135, + 167, + 36, + 0, + 32, + 4, + 167, + 11, + // 3, "div_u" + 36, + 1, + 1, + 126, + 32, + 0, + 173, + 32, + 1, + 173, + 66, + 32, + 134, + 132, + 32, + 2, + 173, + 32, + 3, + 173, + 66, + 32, + 134, + 132, + 128, + 34, + 4, + 66, + 32, + 135, + 167, + 36, + 0, + 32, + 4, + 167, + 11, + // 4, "rem_s" + 36, + 1, + 1, + 126, + 32, + 0, + 173, + 32, + 1, + 173, + 66, + 32, + 134, + 132, + 32, + 2, + 173, + 32, + 3, + 173, + 66, + 32, + 134, + 132, + 129, + 34, + 4, + 66, + 32, + 135, + 167, + 36, + 0, + 32, + 4, + 167, + 11, + // 5, "rem_u" + 36, + 1, + 1, + 126, + 32, + 0, + 173, + 32, + 1, + 173, + 66, + 32, + 134, + 132, + 32, + 2, + 173, + 32, + 3, + 173, + 66, + 32, + 134, + 132, + 130, + 34, + 4, + 66, + 32, + 135, + 167, + 36, + 0, + 32, + 4, + 167, + 11 + ]) + ), + {} + ).exports; +} catch { } - -// src/setup-codeql.ts -var fs8 = __toESM(require("fs")); -var path8 = __toESM(require("path")); -var toolcache3 = __toESM(require_tool_cache()); -var import_fast_deep_equal = __toESM(require_fast_deep_equal()); -var semver8 = __toESM(require_semver2()); - -// node_modules/uuid/dist-node/stringify.js -var byteToHex = []; -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 256).toString(16).slice(1)); +function Long(low, high, unsigned) { + this.low = low | 0; + this.high = high | 0; + this.unsigned = !!unsigned; } -function unsafeStringify(arr, offset = 0) { - return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +Long.prototype.__isLong__; +Object.defineProperty(Long.prototype, "__isLong__", { value: true }); +function isLong(obj) { + return (obj && obj["__isLong__"]) === true; } - -// node_modules/uuid/dist-node/rng.js -var import_node_crypto = require("node:crypto"); -var rnds8Pool = new Uint8Array(256); -var poolPtr = rnds8Pool.length; -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - (0, import_node_crypto.randomFillSync)(rnds8Pool); - poolPtr = 0; - } - return rnds8Pool.slice(poolPtr, poolPtr += 16); +function ctz32(value) { + var c = Math.clz32(value & -value); + return value ? 31 - c : c; } - -// node_modules/uuid/dist-node/native.js -var import_node_crypto2 = require("node:crypto"); -var native_default = { randomUUID: import_node_crypto2.randomUUID }; - -// node_modules/uuid/dist-node/v4.js -function _v4(options, buf, offset) { - options = options || {}; - const rnds = options.random ?? options.rng?.() ?? rng(); - if (rnds.length < 16) { - throw new Error("Random bytes length must be >= 16"); - } - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - offset = offset || 0; - if (offset < 0 || offset + 16 > buf.length) { - throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`); +Long.isLong = isLong; +var INT_CACHE = {}; +var UINT_CACHE = {}; +function fromInt(value, unsigned) { + var obj, cachedObj, cache; + if (unsigned) { + value >>>= 0; + if (cache = 0 <= value && value < 256) { + cachedObj = UINT_CACHE[value]; + if (cachedObj) return cachedObj; } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; + obj = fromBits(value, 0, true); + if (cache) UINT_CACHE[value] = obj; + return obj; + } else { + value |= 0; + if (cache = -128 <= value && value < 128) { + cachedObj = INT_CACHE[value]; + if (cachedObj) return cachedObj; } - return buf; + obj = fromBits(value, value < 0 ? -1 : 0, false); + if (cache) INT_CACHE[value] = obj; + return obj; } - return unsafeStringify(rnds); } -function v4(options, buf, offset) { - if (native_default.randomUUID && !buf && !options) { - return native_default.randomUUID(); +Long.fromInt = fromInt; +function fromNumber(value, unsigned) { + if (isNaN(value)) return unsigned ? UZERO : ZERO; + if (unsigned) { + if (value < 0) return UZERO; + if (value >= TWO_PWR_64_DBL) return MAX_UNSIGNED_VALUE; + } else { + if (value <= -TWO_PWR_63_DBL) return MIN_VALUE; + if (value + 1 >= TWO_PWR_63_DBL) return MAX_VALUE; } - return _v4(options, buf, offset); + if (value < 0) return fromNumber(-value, unsigned).neg(); + return fromBits( + value % TWO_PWR_32_DBL | 0, + value / TWO_PWR_32_DBL | 0, + unsigned + ); } -var v4_default = v4; - -// src/tar.ts -var import_child_process = require("child_process"); -var fs6 = __toESM(require("fs")); -var stream = __toESM(require("stream")); -var import_toolrunner = __toESM(require_toolrunner()); -var io4 = __toESM(require_io()); -var toolcache = __toESM(require_tool_cache()); -var semver6 = __toESM(require_semver2()); -var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3"; -var MIN_REQUIRED_GNU_TAR_VERSION = "1.31"; -async function getTarVersion() { - const tar = await io4.which("tar", true); - let stdout = ""; - const exitCode = await new import_toolrunner.ToolRunner(tar, ["--version"], { - listeners: { - stdout: (data) => { - stdout += data.toString(); - } - } - }).exec(); - if (exitCode !== 0) { - throw new Error("Failed to call tar --version"); - } - if (stdout.includes("GNU tar")) { - const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); - } - return { type: "gnu", version: match[1] }; - } else if (stdout.includes("bsdtar")) { - const match = stdout.match(/bsdtar ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); - } - return { type: "bsd", version: match[1] }; +Long.fromNumber = fromNumber; +function fromBits(lowBits, highBits, unsigned) { + return new Long(lowBits, highBits, unsigned); +} +Long.fromBits = fromBits; +var pow_dbl = Math.pow; +function fromString(str2, unsigned, radix) { + if (str2.length === 0) throw Error("empty string"); + if (typeof unsigned === "number") { + radix = unsigned; + unsigned = false; } else { - throw new Error("Unknown tar version"); + unsigned = !!unsigned; } -} -async function isZstdAvailable(logger) { - const foundZstdBinary = await isBinaryAccessible("zstd", logger); - try { - const tarVersion = await getTarVersion(); - const { type: type2, version } = tarVersion; - logger.info(`Found ${type2} tar version ${version}.`); - switch (type2) { - case "gnu": - return { - available: foundZstdBinary && // GNU tar only uses major and minor version numbers - semver6.gte( - semver6.coerce(version), - semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION) - ), - foundZstdBinary, - version: tarVersion - }; - case "bsd": - return { - available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain - // a patch version number. - semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION), - foundZstdBinary, - version: tarVersion - }; - default: - assertNever(type2); - } - } catch (e) { - logger.warning( - `Failed to determine tar version, therefore will assume zstd is not available. The underlying error was: ${e}` - ); - return { available: false, foundZstdBinary }; + if (str2 === "NaN" || str2 === "Infinity" || str2 === "+Infinity" || str2 === "-Infinity") + return unsigned ? UZERO : ZERO; + radix = radix || 10; + if (radix < 2 || 36 < radix) throw RangeError("radix"); + var p; + if ((p = str2.indexOf("-")) > 0) throw Error("interior hyphen"); + else if (p === 0) { + return fromString(str2.substring(1), unsigned, radix).neg(); } -} -async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs6.mkdirSync(dest, { recursive: true }); - switch (compressionMethod) { - case "gzip": - return await toolcache.extractTar(tarPath, dest); - case "zstd": { - if (!tarVersion) { - throw new Error( - "Could not determine tar version, which is required to extract a Zstandard archive." - ); - } - await extractTarZst(tarPath, dest, tarVersion, logger); - return dest; + var radixToPower = fromNumber(pow_dbl(radix, 8)); + var result = ZERO; + for (var i = 0; i < str2.length; i += 8) { + var size = Math.min(8, str2.length - i), value = parseInt(str2.substring(i, i + size), radix); + if (size < 8) { + var power = fromNumber(pow_dbl(radix, size)); + result = result.mul(power).add(fromNumber(value)); + } else { + result = result.mul(radixToPower); + result = result.add(fromNumber(value)); } } + result.unsigned = unsigned; + return result; } -async function extractTarZst(tar, dest, tarVersion, logger) { - logger.debug( - `Extracting to ${dest}.${tar instanceof stream.Readable ? ` Input stream has high water mark ${tar.readableHighWaterMark}.` : ""}` +Long.fromString = fromString; +function fromValue(val, unsigned) { + if (typeof val === "number") return fromNumber(val, unsigned); + if (typeof val === "string") return fromString(val, unsigned); + return fromBits( + val.low, + val.high, + typeof unsigned === "boolean" ? unsigned : val.unsigned ); - try { - const args = ["-x", "--zstd", "--ignore-zeros"]; - if (tarVersion.type === "gnu") { - args.push("--warning=no-unknown-keyword"); - args.push("--overwrite"); - } - args.push("-f", tar instanceof stream.Readable ? "-" : tar, "-C", dest); - process.stdout.write(`[command]tar ${args.join(" ")} -`); - await new Promise((resolve6, reject) => { - const tarProcess = (0, import_child_process.spawn)("tar", args, { stdio: "pipe" }); - let stdout = ""; - tarProcess.stdout?.on("data", (data) => { - stdout += data.toString(); - process.stdout.write(data); - }); - let stderr = ""; - tarProcess.stderr?.on("data", (data) => { - stderr += data.toString(); - process.stdout.write(data); - }); - tarProcess.on("error", (err) => { - reject(new Error(`Error while extracting tar: ${err}`)); - }); - if (tar instanceof stream.Readable) { - tar.pipe(tarProcess.stdin).on("error", (err) => { - reject( - new Error(`Error while downloading and extracting tar: ${err}`) - ); - }); - } - tarProcess.on("exit", (code) => { - if (code !== 0) { - reject( - new CommandInvocationError( - "tar", - args, - code ?? void 0, - stdout, - stderr - ) - ); - } - resolve6(); - }); - }); - } catch (e) { - await cleanUpPath(dest, "extraction destination directory", logger); - throw e; - } } -var KNOWN_EXTENSIONS = { - "tar.gz": "gzip", - "tar.zst": "zstd" +Long.fromValue = fromValue; +var TWO_PWR_16_DBL = 1 << 16; +var TWO_PWR_24_DBL = 1 << 24; +var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL; +var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL; +var TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2; +var TWO_PWR_24 = fromInt(TWO_PWR_24_DBL); +var ZERO = fromInt(0); +Long.ZERO = ZERO; +var UZERO = fromInt(0, true); +Long.UZERO = UZERO; +var ONE = fromInt(1); +Long.ONE = ONE; +var UONE = fromInt(1, true); +Long.UONE = UONE; +var NEG_ONE = fromInt(-1); +Long.NEG_ONE = NEG_ONE; +var MAX_VALUE = fromBits(4294967295 | 0, 2147483647 | 0, false); +Long.MAX_VALUE = MAX_VALUE; +var MAX_UNSIGNED_VALUE = fromBits(4294967295 | 0, 4294967295 | 0, true); +Long.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE; +var MIN_VALUE = fromBits(0, 2147483648 | 0, false); +Long.MIN_VALUE = MIN_VALUE; +var LongPrototype = Long.prototype; +LongPrototype.toInt = function toInt() { + return this.unsigned ? this.low >>> 0 : this.low; }; -function inferCompressionMethod(tarPath) { - for (const [ext, method] of Object.entries(KNOWN_EXTENSIONS)) { - if (tarPath.endsWith(`.${ext}`)) { - return method; - } +LongPrototype.toNumber = function toNumber() { + if (this.unsigned) + return (this.high >>> 0) * TWO_PWR_32_DBL + (this.low >>> 0); + return this.high * TWO_PWR_32_DBL + (this.low >>> 0); +}; +LongPrototype.toString = function toString2(radix) { + radix = radix || 10; + if (radix < 2 || 36 < radix) throw RangeError("radix"); + if (this.isZero()) return "0"; + if (this.isNegative()) { + if (this.eq(MIN_VALUE)) { + var radixLong = fromNumber(radix), div = this.div(radixLong), rem1 = div.mul(radixLong).sub(this); + return div.toString(radix) + rem1.toInt().toString(radix); + } else return "-" + this.neg().toString(radix); } - return void 0; -} - -// src/tools-download.ts -var fs7 = __toESM(require("fs")); -var os = __toESM(require("os")); -var path7 = __toESM(require("path")); -var import_perf_hooks = require("perf_hooks"); -var core9 = __toESM(require_core()); -var import_http_client = __toESM(require_lib()); -var toolcache2 = __toESM(require_tool_cache()); -var import_follow_redirects = __toESM(require_follow_redirects()); -var semver7 = __toESM(require_semver2()); -var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; -var TOOLCACHE_TOOL_NAME = "CodeQL"; -function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) { - return { - combinedDurationMs: downloadDurationMs + extractionDurationMs, - downloadDurationMs, - extractionDurationMs, - streamExtraction: false - }; -} -function makeStreamedToolsDownloadDurations(combinedDurationMs) { - return { - combinedDurationMs, - downloadDurationMs: void 0, - extractionDurationMs: void 0, - streamExtraction: true - }; -} -async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorization, headers, tarVersion, logger) { - logger.info( - `Downloading CodeQL tools from ${codeqlURL} . This may take a while.` - ); - try { - if (compressionMethod === "zstd" && process.platform === "linux") { - logger.info(`Streaming the extraction of the CodeQL bundle.`); - const toolsInstallStart = import_perf_hooks.performance.now(); - await downloadAndExtractZstdWithStreaming( - codeqlURL, - dest, - authorization, - headers, - tarVersion, - logger - ); - const combinedDurationMs = Math.round( - import_perf_hooks.performance.now() - toolsInstallStart - ); - logger.info( - `Finished downloading and extracting CodeQL bundle to ${dest} (${formatDuration( - combinedDurationMs - )}).` - ); - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeStreamedToolsDownloadDurations(combinedDurationMs) - }; + var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned), rem = this; + var result = ""; + while (true) { + var remDiv = rem.div(radixToPower), intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0, digits = intval.toString(radix); + rem = remDiv; + if (rem.isZero()) return digits + result; + else { + while (digits.length < 6) digits = "0" + digits; + result = "" + digits + result; } - } catch (e) { - core9.warning( - `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` - ); - core9.warning(`Falling back to downloading the bundle before extracting.`); - await cleanUpPath(dest, "CodeQL bundle", logger); } - const toolsDownloadStart = import_perf_hooks.performance.now(); - const archivedBundlePath = await toolcache2.downloadTool( - codeqlURL, - void 0, - authorization, - headers - ); - const downloadDurationMs = Math.round(import_perf_hooks.performance.now() - toolsDownloadStart); - logger.info( - `Finished downloading CodeQL bundle to ${archivedBundlePath} (${formatDuration( - downloadDurationMs - )}).` +}; +LongPrototype.getHighBits = function getHighBits() { + return this.high; +}; +LongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() { + return this.high >>> 0; +}; +LongPrototype.getLowBits = function getLowBits() { + return this.low; +}; +LongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() { + return this.low >>> 0; +}; +LongPrototype.getNumBitsAbs = function getNumBitsAbs() { + if (this.isNegative()) + return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs(); + var val = this.high != 0 ? this.high : this.low; + for (var bit = 31; bit > 0; bit--) if ((val & 1 << bit) != 0) break; + return this.high != 0 ? bit + 33 : bit + 1; +}; +LongPrototype.isSafeInteger = function isSafeInteger() { + var top11Bits = this.high >> 21; + if (!top11Bits) return true; + if (this.unsigned) return false; + return top11Bits === -1 && !(this.low === 0 && this.high === -2097152); +}; +LongPrototype.isZero = function isZero() { + return this.high === 0 && this.low === 0; +}; +LongPrototype.eqz = LongPrototype.isZero; +LongPrototype.isNegative = function isNegative() { + return !this.unsigned && this.high < 0; +}; +LongPrototype.isPositive = function isPositive() { + return this.unsigned || this.high >= 0; +}; +LongPrototype.isOdd = function isOdd() { + return (this.low & 1) === 1; +}; +LongPrototype.isEven = function isEven() { + return (this.low & 1) === 0; +}; +LongPrototype.equals = function equals(other) { + if (!isLong(other)) other = fromValue(other); + if (this.unsigned !== other.unsigned && this.high >>> 31 === 1 && other.high >>> 31 === 1) + return false; + return this.high === other.high && this.low === other.low; +}; +LongPrototype.eq = LongPrototype.equals; +LongPrototype.notEquals = function notEquals(other) { + return !this.eq( + /* validates */ + other ); - let extractionDurationMs; - try { - logger.info("Extracting CodeQL bundle."); - const extractionStart = import_perf_hooks.performance.now(); - await extract( - archivedBundlePath, - dest, - compressionMethod, - tarVersion, - logger - ); - extractionDurationMs = Math.round(import_perf_hooks.performance.now() - extractionStart); - logger.info( - `Finished extracting CodeQL bundle to ${dest} (${formatDuration( - extractionDurationMs - )}).` +}; +LongPrototype.neq = LongPrototype.notEquals; +LongPrototype.ne = LongPrototype.notEquals; +LongPrototype.lessThan = function lessThan(other) { + return this.comp( + /* validates */ + other + ) < 0; +}; +LongPrototype.lt = LongPrototype.lessThan; +LongPrototype.lessThanOrEqual = function lessThanOrEqual(other) { + return this.comp( + /* validates */ + other + ) <= 0; +}; +LongPrototype.lte = LongPrototype.lessThanOrEqual; +LongPrototype.le = LongPrototype.lessThanOrEqual; +LongPrototype.greaterThan = function greaterThan(other) { + return this.comp( + /* validates */ + other + ) > 0; +}; +LongPrototype.gt = LongPrototype.greaterThan; +LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) { + return this.comp( + /* validates */ + other + ) >= 0; +}; +LongPrototype.gte = LongPrototype.greaterThanOrEqual; +LongPrototype.ge = LongPrototype.greaterThanOrEqual; +LongPrototype.compare = function compare(other) { + if (!isLong(other)) other = fromValue(other); + if (this.eq(other)) return 0; + var thisNeg = this.isNegative(), otherNeg = other.isNegative(); + if (thisNeg && !otherNeg) return -1; + if (!thisNeg && otherNeg) return 1; + if (!this.unsigned) return this.sub(other).isNegative() ? -1 : 1; + return other.high >>> 0 > this.high >>> 0 || other.high === this.high && other.low >>> 0 > this.low >>> 0 ? -1 : 1; +}; +LongPrototype.comp = LongPrototype.compare; +LongPrototype.negate = function negate() { + if (!this.unsigned && this.eq(MIN_VALUE)) return MIN_VALUE; + return this.not().add(ONE); +}; +LongPrototype.neg = LongPrototype.negate; +LongPrototype.add = function add(addend) { + if (!isLong(addend)) addend = fromValue(addend); + var a48 = this.high >>> 16; + var a32 = this.high & 65535; + var a16 = this.low >>> 16; + var a00 = this.low & 65535; + var b48 = addend.high >>> 16; + var b32 = addend.high & 65535; + var b16 = addend.low >>> 16; + var b00 = addend.low & 65535; + var c48 = 0, c32 = 0, c16 = 0, c00 = 0; + c00 += a00 + b00; + c16 += c00 >>> 16; + c00 &= 65535; + c16 += a16 + b16; + c32 += c16 >>> 16; + c16 &= 65535; + c32 += a32 + b32; + c48 += c32 >>> 16; + c32 &= 65535; + c48 += a48 + b48; + c48 &= 65535; + return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); +}; +LongPrototype.subtract = function subtract(subtrahend) { + if (!isLong(subtrahend)) subtrahend = fromValue(subtrahend); + return this.add(subtrahend.neg()); +}; +LongPrototype.sub = LongPrototype.subtract; +LongPrototype.multiply = function multiply(multiplier) { + if (this.isZero()) return this; + if (!isLong(multiplier)) multiplier = fromValue(multiplier); + if (wasm) { + var low = wasm["mul"](this.low, this.high, multiplier.low, multiplier.high); + return fromBits(low, wasm["get_high"](), this.unsigned); + } + if (multiplier.isZero()) return this.unsigned ? UZERO : ZERO; + if (this.eq(MIN_VALUE)) return multiplier.isOdd() ? MIN_VALUE : ZERO; + if (multiplier.eq(MIN_VALUE)) return this.isOdd() ? MIN_VALUE : ZERO; + if (this.isNegative()) { + if (multiplier.isNegative()) return this.neg().mul(multiplier.neg()); + else return this.neg().mul(multiplier).neg(); + } else if (multiplier.isNegative()) return this.mul(multiplier.neg()).neg(); + if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24)) + return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned); + var a48 = this.high >>> 16; + var a32 = this.high & 65535; + var a16 = this.low >>> 16; + var a00 = this.low & 65535; + var b48 = multiplier.high >>> 16; + var b32 = multiplier.high & 65535; + var b16 = multiplier.low >>> 16; + var b00 = multiplier.low & 65535; + var c48 = 0, c32 = 0, c16 = 0, c00 = 0; + c00 += a00 * b00; + c16 += c00 >>> 16; + c00 &= 65535; + c16 += a16 * b00; + c32 += c16 >>> 16; + c16 &= 65535; + c16 += a00 * b16; + c32 += c16 >>> 16; + c16 &= 65535; + c32 += a32 * b00; + c48 += c32 >>> 16; + c32 &= 65535; + c32 += a16 * b16; + c48 += c32 >>> 16; + c32 &= 65535; + c32 += a00 * b32; + c48 += c32 >>> 16; + c32 &= 65535; + c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48; + c48 &= 65535; + return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); +}; +LongPrototype.mul = LongPrototype.multiply; +LongPrototype.divide = function divide(divisor) { + if (!isLong(divisor)) divisor = fromValue(divisor); + if (divisor.isZero()) throw Error("division by zero"); + if (wasm) { + if (!this.unsigned && this.high === -2147483648 && divisor.low === -1 && divisor.high === -1) { + return this; + } + var low = (this.unsigned ? wasm["div_u"] : wasm["div_s"])( + this.low, + this.high, + divisor.low, + divisor.high ); - } finally { - await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger); + return fromBits(low, wasm["get_high"](), this.unsigned); } - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeDownloadFirstToolsDownloadDurations( - downloadDurationMs, - extractionDurationMs - ) - }; -} -async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs7.mkdirSync(dest, { recursive: true }); - const agent = new import_http_client.HttpClient().getAgent(codeqlURL); - headers = Object.assign( - { "User-Agent": "CodeQL Action" }, - authorization ? { authorization } : {}, - headers - ); - const response = await new Promise( - (resolve6) => import_follow_redirects.https.get( - codeqlURL, - { - headers, - // Increase the high water mark to improve performance. - highWaterMark: STREAMING_HIGH_WATERMARK_BYTES, - // Use the agent to respect proxy settings. - agent - }, - (r) => resolve6(r) - ) + if (this.isZero()) return this.unsigned ? UZERO : ZERO; + var approx, rem, res; + if (!this.unsigned) { + if (this.eq(MIN_VALUE)) { + if (divisor.eq(ONE) || divisor.eq(NEG_ONE)) + return MIN_VALUE; + else if (divisor.eq(MIN_VALUE)) return ONE; + else { + var halfThis = this.shr(1); + approx = halfThis.div(divisor).shl(1); + if (approx.eq(ZERO)) { + return divisor.isNegative() ? ONE : NEG_ONE; + } else { + rem = this.sub(divisor.mul(approx)); + res = approx.add(rem.div(divisor)); + return res; + } + } + } else if (divisor.eq(MIN_VALUE)) return this.unsigned ? UZERO : ZERO; + if (this.isNegative()) { + if (divisor.isNegative()) return this.neg().div(divisor.neg()); + return this.neg().div(divisor).neg(); + } else if (divisor.isNegative()) return this.div(divisor.neg()).neg(); + res = ZERO; + } else { + if (!divisor.unsigned) divisor = divisor.toUnsigned(); + if (divisor.gt(this)) return UZERO; + if (divisor.gt(this.shru(1))) + return UONE; + res = UZERO; + } + rem = this; + while (rem.gte(divisor)) { + approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber())); + var log2 = Math.ceil(Math.log(approx) / Math.LN2), delta = log2 <= 48 ? 1 : pow_dbl(2, log2 - 48), approxRes = fromNumber(approx), approxRem = approxRes.mul(divisor); + while (approxRem.isNegative() || approxRem.gt(rem)) { + approx -= delta; + approxRes = fromNumber(approx, this.unsigned); + approxRem = approxRes.mul(divisor); + } + if (approxRes.isZero()) approxRes = ONE; + res = res.add(approxRes); + rem = rem.sub(approxRem); + } + return res; +}; +LongPrototype.div = LongPrototype.divide; +LongPrototype.modulo = function modulo(divisor) { + if (!isLong(divisor)) divisor = fromValue(divisor); + if (wasm) { + var low = (this.unsigned ? wasm["rem_u"] : wasm["rem_s"])( + this.low, + this.high, + divisor.low, + divisor.high + ); + return fromBits(low, wasm["get_high"](), this.unsigned); + } + return this.sub(this.div(divisor).mul(divisor)); +}; +LongPrototype.mod = LongPrototype.modulo; +LongPrototype.rem = LongPrototype.modulo; +LongPrototype.not = function not() { + return fromBits(~this.low, ~this.high, this.unsigned); +}; +LongPrototype.countLeadingZeros = function countLeadingZeros() { + return this.high ? Math.clz32(this.high) : Math.clz32(this.low) + 32; +}; +LongPrototype.clz = LongPrototype.countLeadingZeros; +LongPrototype.countTrailingZeros = function countTrailingZeros() { + return this.low ? ctz32(this.low) : ctz32(this.high) + 32; +}; +LongPrototype.ctz = LongPrototype.countTrailingZeros; +LongPrototype.and = function and(other) { + if (!isLong(other)) other = fromValue(other); + return fromBits(this.low & other.low, this.high & other.high, this.unsigned); +}; +LongPrototype.or = function or(other) { + if (!isLong(other)) other = fromValue(other); + return fromBits(this.low | other.low, this.high | other.high, this.unsigned); +}; +LongPrototype.xor = function xor(other) { + if (!isLong(other)) other = fromValue(other); + return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned); +}; +LongPrototype.shiftLeft = function shiftLeft(numBits) { + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + else if (numBits < 32) + return fromBits( + this.low << numBits, + this.high << numBits | this.low >>> 32 - numBits, + this.unsigned + ); + else return fromBits(0, this.low << numBits - 32, this.unsigned); +}; +LongPrototype.shl = LongPrototype.shiftLeft; +LongPrototype.shiftRight = function shiftRight(numBits) { + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + else if (numBits < 32) + return fromBits( + this.low >>> numBits | this.high << 32 - numBits, + this.high >> numBits, + this.unsigned + ); + else + return fromBits( + this.high >> numBits - 32, + this.high >= 0 ? 0 : -1, + this.unsigned + ); +}; +LongPrototype.shr = LongPrototype.shiftRight; +LongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) { + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + if (numBits < 32) + return fromBits( + this.low >>> numBits | this.high << 32 - numBits, + this.high >>> numBits, + this.unsigned + ); + if (numBits === 32) return fromBits(this.high, 0, this.unsigned); + return fromBits(this.high >>> numBits - 32, 0, this.unsigned); +}; +LongPrototype.shru = LongPrototype.shiftRightUnsigned; +LongPrototype.shr_u = LongPrototype.shiftRightUnsigned; +LongPrototype.rotateLeft = function rotateLeft(numBits) { + var b; + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); + if (numBits < 32) { + b = 32 - numBits; + return fromBits( + this.low << numBits | this.high >>> b, + this.high << numBits | this.low >>> b, + this.unsigned + ); + } + numBits -= 32; + b = 32 - numBits; + return fromBits( + this.high << numBits | this.low >>> b, + this.low << numBits | this.high >>> b, + this.unsigned ); - if (response.statusCode !== 200) { - throw new Error( - `Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.` +}; +LongPrototype.rotl = LongPrototype.rotateLeft; +LongPrototype.rotateRight = function rotateRight(numBits) { + var b; + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); + if (numBits < 32) { + b = 32 - numBits; + return fromBits( + this.high << b | this.low >>> numBits, + this.low << b | this.high >>> numBits, + this.unsigned ); } - await extractTarZst(response, dest, tarVersion, logger); -} -function getToolcacheDirectory(version) { - return path7.join( - getRequiredEnvParam("RUNNER_TOOL_CACHE"), - TOOLCACHE_TOOL_NAME, - semver7.clean(version) || version, - os.arch() || "" + numBits -= 32; + b = 32 - numBits; + return fromBits( + this.low << b | this.high >>> numBits, + this.high << b | this.low >>> numBits, + this.unsigned ); +}; +LongPrototype.rotr = LongPrototype.rotateRight; +LongPrototype.toSigned = function toSigned() { + if (!this.unsigned) return this; + return fromBits(this.low, this.high, false); +}; +LongPrototype.toUnsigned = function toUnsigned() { + if (this.unsigned) return this; + return fromBits(this.low, this.high, true); +}; +LongPrototype.toBytes = function toBytes(le) { + return le ? this.toBytesLE() : this.toBytesBE(); +}; +LongPrototype.toBytesLE = function toBytesLE() { + var hi = this.high, lo = this.low; + return [ + lo & 255, + lo >>> 8 & 255, + lo >>> 16 & 255, + lo >>> 24, + hi & 255, + hi >>> 8 & 255, + hi >>> 16 & 255, + hi >>> 24 + ]; +}; +LongPrototype.toBytesBE = function toBytesBE() { + var hi = this.high, lo = this.low; + return [ + hi >>> 24, + hi >>> 16 & 255, + hi >>> 8 & 255, + hi & 255, + lo >>> 24, + lo >>> 16 & 255, + lo >>> 8 & 255, + lo & 255 + ]; +}; +Long.fromBytes = function fromBytes(bytes, unsigned, le) { + return le ? Long.fromBytesLE(bytes, unsigned) : Long.fromBytesBE(bytes, unsigned); +}; +Long.fromBytesLE = function fromBytesLE(bytes, unsigned) { + return new Long( + bytes[0] | bytes[1] << 8 | bytes[2] << 16 | bytes[3] << 24, + bytes[4] | bytes[5] << 8 | bytes[6] << 16 | bytes[7] << 24, + unsigned + ); +}; +Long.fromBytesBE = function fromBytesBE(bytes, unsigned) { + return new Long( + bytes[4] << 24 | bytes[5] << 16 | bytes[6] << 8 | bytes[7], + bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], + unsigned + ); +}; +if (typeof BigInt === "function") { + Long.fromBigInt = function fromBigInt(value, unsigned) { + var lowBits = Number(BigInt.asIntN(32, value)); + var highBits = Number(BigInt.asIntN(32, value >> BigInt(32))); + return fromBits(lowBits, highBits, unsigned); + }; + Long.fromValue = function fromValueWithBigInt(value, unsigned) { + if (typeof value === "bigint") return Long.fromBigInt(value, unsigned); + return fromValue(value, unsigned); + }; + LongPrototype.toBigInt = function toBigInt() { + var lowBigInt = BigInt(this.low >>> 0); + var highBigInt = BigInt(this.unsigned ? this.high >>> 0 : this.high); + return highBigInt << BigInt(32) | lowBigInt; + }; } -function writeToolcacheMarkerFile(extractedPath, logger) { - const markerFilePath = `${extractedPath}.complete`; - fs7.writeFileSync(markerFilePath, ""); - logger.info(`Created toolcache marker file ${markerFilePath}`); -} -function sanitizeUrlForStatusReport(url2) { - return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some( - (repo) => url2.startsWith(`https://github.com/${repo}/releases/download/`) - ) ? url2 : "sanitized-value"; -} +var long_default = Long; -// src/setup-codeql.ts -var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; -var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing"; -var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies"; -var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"]; -var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"]; -var CODEQL_TOOLCACHE_INPUT = "toolcache"; -function getCodeQLBundleExtension(compressionMethod) { - switch (compressionMethod) { - case "gzip": - return ".tar.gz"; - case "zstd": - return ".tar.zst"; - default: - assertNever(compressionMethod); +// src/fingerprints.ts +var tab = " ".charCodeAt(0); +var space = " ".charCodeAt(0); +var lf = "\n".charCodeAt(0); +var cr = "\r".charCodeAt(0); +var EOF = 65535; +var BLOCK_SIZE = 100; +var MOD = long_default.fromInt(37); +function computeFirstMod() { + let firstMod = long_default.ONE; + for (let i = 0; i < BLOCK_SIZE; i++) { + firstMod = firstMod.multiply(MOD); } + return firstMod; } -function getCodeQLBundleName(compressionMethod) { - const extension = getCodeQLBundleExtension(compressionMethod); - let platform; - if (process.platform === "win32") { - platform = "win64"; - } else if (process.platform === "linux") { - platform = "linux64"; - } else if (process.platform === "darwin") { - platform = "osx64"; - } else { - return `codeql-bundle${extension}`; +async function hash(callback, filepath) { + const window2 = Array(BLOCK_SIZE).fill(0); + const lineNumbers = Array(BLOCK_SIZE).fill(-1); + let hashRaw = long_default.ZERO; + const firstMod = computeFirstMod(); + let index = 0; + let lineNumber = 0; + let lineStart = true; + let prevCR = false; + const hashCounts = {}; + const outputHash = function() { + const hashValue = hashRaw.toUnsigned().toString(16); + if (!hashCounts[hashValue]) { + hashCounts[hashValue] = 0; + } + hashCounts[hashValue]++; + callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`); + lineNumbers[index] = -1; + }; + const updateHash = function(current) { + const begin = window2[index]; + window2[index] = current; + hashRaw = MOD.multiply(hashRaw).add(long_default.fromInt(current)).subtract(firstMod.multiply(long_default.fromInt(begin))); + index = (index + 1) % BLOCK_SIZE; + }; + const processCharacter = function(current) { + if (current === space || current === tab || prevCR && current === lf) { + prevCR = false; + return; + } + if (current === cr) { + current = lf; + prevCR = true; + } else { + prevCR = false; + } + if (lineNumbers[index] !== -1) { + outputHash(); + } + if (lineStart) { + lineStart = false; + lineNumber++; + lineNumbers[index] = lineNumber; + } + if (current === lf) { + lineStart = true; + } + updateHash(current); + }; + const readStream = fs5.createReadStream(filepath, "utf8"); + for await (const data of readStream) { + for (let i = 0; i < data.length; ++i) { + processCharacter(data.charCodeAt(i)); + } } - return `codeql-bundle-${platform}${extension}`; -} -function getCodeQLActionRepository(logger) { - if (isRunningLocalAction()) { - logger.info( - "The CodeQL Action is checked out locally. Using the default CodeQL Action repository." - ); - return CODEQL_DEFAULT_ACTION_REPOSITORY; + processCharacter(EOF); + for (let i = 0; i < BLOCK_SIZE; i++) { + if (lineNumbers[index] !== -1) { + outputHash(); + } + updateHash(0); } - return getRequiredEnvParam("GITHUB_ACTION_REPOSITORY"); } -async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod, logger) { - const codeQLActionRepository = getCodeQLActionRepository(logger); - const potentialDownloadSources = [ - // This GitHub instance, and this Action. - [apiDetails.url, codeQLActionRepository], - // This GitHub instance, and the canonical Action. - [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY], - // GitHub.com, and the canonical Action. - [GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY] - ]; - const uniqueDownloadSources = potentialDownloadSources.filter( - (source, index, self2) => { - return !self2.slice(0, index).some((other) => (0, import_fast_deep_equal.default)(source, other)); +function locationUpdateCallback(result, location, logger) { + let locationStartLine = location.physicalLocation?.region?.startLine; + if (locationStartLine === void 0) { + locationStartLine = 1; + } + return function(lineNumber, hashValue) { + if (locationStartLine !== lineNumber) { + return; } - ); - const codeQLBundleName = getCodeQLBundleName(compressionMethod); - for (const downloadSource of uniqueDownloadSources) { - const [apiURL, repository] = downloadSource; - if (apiURL === GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { - break; + if (!result.partialFingerprints) { + result.partialFingerprints = {}; } - const [repositoryOwner, repositoryName] = repository.split("/"); - try { - const release = await getApiClient().rest.repos.getReleaseByTag({ - owner: repositoryOwner, - repo: repositoryName, - tag: tagName - }); - for (const asset of release.data.assets) { - if (asset.name === codeQLBundleName) { - logger.info( - `Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.` - ); - return asset.url; - } - } - } catch (e) { - logger.info( - `Looked for CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} but got error ${e}.` + const existingFingerprint = result.partialFingerprints.primaryLocationLineHash; + if (!existingFingerprint) { + result.partialFingerprints.primaryLocationLineHash = hashValue; + } else if (existingFingerprint !== hashValue) { + logger.warning( + `Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}` ); } - } - return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`; + }; } -function tryGetBundleVersionFromTagName(tagName, logger) { - const match = tagName.match(/^codeql-bundle-(.*)$/); - if (match === null || match.length < 2) { - logger.debug(`Could not determine bundle version from tag ${tagName}.`); +function resolveUriToFile(location, artifacts, sourceRoot, logger) { + if (!location.uri && location.index !== void 0) { + if (typeof location.index !== "number" || location.index < 0 || location.index >= artifacts.length || typeof artifacts[location.index].location !== "object") { + logger.debug(`Ignoring location as index "${location.index}" is invalid`); + return void 0; + } + location = artifacts[location.index].location; + } + if (typeof location.uri !== "string") { + logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); return void 0; } - return match[1]; -} -function tryGetTagNameFromUrl(url2, logger) { - const matches = [...url2.matchAll(/\/(codeql-bundle-[^/]*)\//g)]; - if (matches.length === 0) { - logger.debug(`Could not determine tag name for URL ${url2}.`); + let uri; + try { + uri = decodeURIComponent(location.uri); + } catch { + logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); return void 0; } - const match = matches[matches.length - 1]; - if (match?.length !== 2) { + const fileUriPrefix = "file://"; + if (uri.startsWith(fileUriPrefix)) { + uri = uri.substring(fileUriPrefix.length); + } + if (uri.indexOf("://") !== -1) { logger.debug( - `Could not determine tag name for URL ${url2}. Matched ${JSON.stringify( - match - )}.` + `Ignoring location URI "${uri}" as the scheme is not recognised` ); return void 0; } - return match[1]; -} -function convertToSemVer(version, logger) { - if (!semver8.valid(version)) { + const srcRootPrefix = `${sourceRoot}/`; + if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) { logger.debug( - `Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.` + `Ignoring location URI "${uri}" as it is outside of the src root` ); - version = `0.0.0-${version}`; + return void 0; } - const s = semver8.clean(version); - if (!s) { - throw new Error(`Bundle version ${version} is not in SemVer format.`); + if (!import_path.default.isAbsolute(uri)) { + uri = srcRootPrefix + uri; } - return s; + if (!fs5.existsSync(uri)) { + logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); + return void 0; + } + if (fs5.statSync(uri).isDirectory()) { + logger.debug(`Unable to compute fingerprint for directory: ${uri}`); + return void 0; + } + return uri; } -async function findOverridingToolsInCache(humanReadableVersion, logger) { - const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ - folder: toolcache3.find("CodeQL", version), - version - })).filter(({ folder }) => fs8.existsSync(path8.join(folder, "pinned-version"))); - if (candidates.length === 1) { - const candidate = candidates[0]; - logger.debug( - `CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.` - ); - return { - codeqlFolder: candidate.folder, - sourceType: "toolcache", - toolsVersion: candidate.version - }; - } else if (candidates.length === 0) { - logger.debug( - "Did not find any candidate pinned versions of the CodeQL tools in the toolcache." - ); - } else { - logger.debug( - "Could not use CodeQL tools from the toolcache since more than one candidate pinned version was found in the toolcache." - ); - } - return void 0; -} -async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) { - if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) { - logger.info(`Using CodeQL CLI from local path ${toolsInput}`); - const compressionMethod2 = inferCompressionMethod(toolsInput); - if (compressionMethod2 === void 0) { - throw new ConfigurationError( - `Could not infer compression method from path ${toolsInput}. Please specify a path ending in '.tar.gz' or '.tar.zst'.` +async function addFingerprints(sarif, sourceRoot, logger) { + logger.info( + `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/enterprise-cloud@latest/code-security/code-scanning/integrating-with-code-scanning/sarif-support-for-code-scanning#providing-data-to-track-code-scanning-alerts-across-runs" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` + ); + const callbacksByFile = {}; + for (const run of sarif.runs || []) { + const artifacts = run.artifacts || []; + for (const result of run.results || []) { + const primaryLocation = (result.locations || [])[0]; + if (!primaryLocation?.physicalLocation?.artifactLocation) { + logger.debug( + `Unable to compute fingerprint for invalid location: ${JSON.stringify( + primaryLocation + )}` + ); + continue; + } + if (primaryLocation?.physicalLocation?.region?.startLine === void 0) { + continue; + } + const filepath = resolveUriToFile( + primaryLocation.physicalLocation.artifactLocation, + artifacts, + sourceRoot, + logger + ); + if (!filepath) { + continue; + } + if (!callbacksByFile[filepath]) { + callbacksByFile[filepath] = []; + } + callbacksByFile[filepath].push( + locationUpdateCallback(result, primaryLocation, logger) ); } - return { - codeqlTarPath: toolsInput, - compressionMethod: compressionMethod2, - sourceType: "local", - toolsVersion: "local" + } + for (const [filepath, callbacks] of Object.entries(callbacksByFile)) { + const teeCallback = function(lineNumber, hashValue) { + for (const c of Object.values(callbacks)) { + c(lineNumber, hashValue); + } }; + await hash(teeCallback, filepath); } - let cliVersion2; - let tagName; - let url2; - const canForceNightlyWithFF = isDynamicWorkflow() || isInTestMode(); - const forceNightlyValueFF = await features.getValue("force_nightly" /* ForceNightly */); - const forceNightly = forceNightlyValueFF && canForceNightlyWithFF; - const nightlyRequestedByToolsInput = toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput); - if (forceNightly || nightlyRequestedByToolsInput) { - if (forceNightly) { - logger.info( - `Using the latest CodeQL CLI nightly, as forced by the ${"force_nightly" /* ForceNightly */} feature flag.` - ); - addNoLanguageDiagnostic( - void 0, - makeDiagnostic( - "codeql-action/forced-nightly-cli", - "A nightly release of CodeQL was used", - { - markdownMessage: "GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\nNightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\nIf use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.", - visibility: { - cliSummaryTable: true, - statusPage: true, - telemetry: true - }, - severity: "note" - } - ) - ); + return sarif; +} + +// src/init.ts +var toolrunner4 = __toESM(require_toolrunner()); +var io5 = __toESM(require_io()); + +// src/codeql.ts +var fs9 = __toESM(require("fs")); +var path9 = __toESM(require("path")); +var core10 = __toESM(require_core()); +var toolrunner3 = __toESM(require_toolrunner()); + +// src/cli-errors.ts +var SUPPORTED_PLATFORMS = [ + ["linux", "x64"], + ["win32", "x64"], + ["darwin", "x64"], + ["darwin", "arm64"] +]; +var CliError = class extends Error { + exitCode; + stderr; + constructor({ cmd, args, exitCode, stderr }) { + const prettyCommand = prettyPrintInvocation(cmd, args); + const fatalErrors = extractFatalErrors(stderr); + const autobuildErrors = extractAutobuildErrors(stderr); + let message; + if (fatalErrors) { + message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and error was: ${ensureEndsInPeriod( + fatalErrors.trim() + )} See the logs for more details.`; + } else if (autobuildErrors) { + message = `We were unable to automatically build your code. Please provide manual build steps. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information. Encountered the following error: ${autobuildErrors}`; } else { - logger.info( - `Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.` + const lastLine = ensureEndsInPeriod( + stderr.trim().split("\n").pop()?.trim() || "n/a" ); + message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`; } - toolsInput = await getNightlyToolsUrl(logger); + super(message); + this.exitCode = exitCode; + this.stderr = stderr; } - const forceShippedTools = toolsInput && CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput); - if (forceShippedTools) { - cliVersion2 = cliVersion; - tagName = bundleVersion; - logger.info( - `'tools: ${toolsInput}' was requested, so using CodeQL version ${cliVersion2}, the version shipped with the Action.` - ); - if (toolsInput === "latest") { - logger.warning( - "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required." - ); +}; +function extractFatalErrors(error3) { + const fatalErrorRegex = /.*fatal (internal )?error occurr?ed(. Details)?:/gi; + let fatalErrors = []; + let lastFatalErrorIndex; + let match; + while ((match = fatalErrorRegex.exec(error3)) !== null) { + if (lastFatalErrorIndex !== void 0) { + fatalErrors.push(error3.slice(lastFatalErrorIndex, match.index).trim()); } - } else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) { - let latestToolcacheVersion; - const allowToolcacheValueFF = await features.getValue( - "allow_toolcache_input" /* AllowToolcacheInput */ - ); - const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode()); - if (allowToolcacheValue) { - logger.info( - `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.` - ); - latestToolcacheVersion = getLatestToolcacheVersion(logger); - if (latestToolcacheVersion) { - cliVersion2 = latestToolcacheVersion; - } + lastFatalErrorIndex = match.index; + } + if (lastFatalErrorIndex !== void 0) { + const lastError = error3.slice(lastFatalErrorIndex).trim(); + if (fatalErrors.length === 0) { + return lastError; } - if (latestToolcacheVersion === void 0) { - if (allowToolcacheValue) { - logger.info( - `Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...` - ); - } else { - if (allowToolcacheValueFF) { - logger.warning( - `Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.` - ); - } else { - logger.info( - `Ignoring 'tools: ${toolsInput}' because the feature is not enabled.` - ); - } - } - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; + const isOneLiner = !fatalErrors.some((e) => e.includes("\n")); + if (isOneLiner) { + fatalErrors = fatalErrors.map(ensureEndsInPeriod); } - } else if (toolsInput !== void 0) { - tagName = tryGetTagNameFromUrl(toolsInput, logger); - url2 = toolsInput; - if (tagName) { - const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger); - if (bundleVersion3 && semver8.valid(bundleVersion3)) { - cliVersion2 = convertToSemVer(bundleVersion3, logger); + return [ + ensureEndsInPeriod(lastError), + "Context:", + ...fatalErrors.reverse() + ].join(isOneLiner ? " " : "\n"); + } + return void 0; +} +function extractAutobuildErrors(error3) { + const pattern = /.*\[autobuild\] \[ERROR\] (.*)/gi; + let errorLines = [...error3.matchAll(pattern)].map((match) => match[1]); + if (errorLines.length > 10) { + errorLines = errorLines.slice(0, 10); + errorLines.push("(truncated)"); + } + return errorLines.join("\n") || void 0; +} +var cliErrorsConfig = { + ["AutobuildError" /* AutobuildError */]: { + cliErrorMessageCandidates: [ + new RegExp("We were unable to automatically build your code") + ] + }, + ["CouldNotCreateTempDir" /* CouldNotCreateTempDir */]: { + cliErrorMessageCandidates: [new RegExp("Could not create temp directory")] + }, + ["ExternalRepositoryCloneFailed" /* ExternalRepositoryCloneFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("Failed to clone external Git repository") + ] + }, + ["GradleBuildFailed" /* GradleBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception.") + ] + }, + // Version of CodeQL CLI is incompatible with this version of the CodeQL Action + ["IncompatibleWithActionVersion" /* IncompatibleWithActionVersion */]: { + cliErrorMessageCandidates: [ + new RegExp("is not compatible with this CodeQL CLI") + ] + }, + ["InitCalledTwice" /* InitCalledTwice */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Refusing to create databases .* but could not process any of it" + ) + ], + additionalErrorMessageToAppend: `Is the "init" action called twice in the same job?` + }, + ["InvalidConfigFile" /* InvalidConfigFile */]: { + cliErrorMessageCandidates: [ + new RegExp("Config file .* is not valid"), + new RegExp("The supplied config file is empty") + ] + }, + ["InvalidExternalRepoSpecifier" /* InvalidExternalRepoSpecifier */]: { + cliErrorMessageCandidates: [ + new RegExp("Specifier for external repository is invalid") + ] + }, + // Expected source location for database creation does not exist + ["InvalidSourceRoot" /* InvalidSourceRoot */]: { + cliErrorMessageCandidates: [new RegExp("Invalid source root")] + }, + ["MavenBuildFailed" /* MavenBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("\\[autobuild\\] \\[ERROR\\] Failed to execute goal") + ] + }, + ["NoBuildCommandAutodetected" /* NoBuildCommandAutodetected */]: { + cliErrorMessageCandidates: [ + new RegExp("Could not auto-detect a suitable build method") + ] + }, + ["NoBuildMethodAutodetected" /* NoBuildMethodAutodetected */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Could not detect a suitable build command for the source checkout" + ) + ] + }, + // Usually when a manual build script has failed, or if an autodetected language + // was unintended to have CodeQL analysis run on it. + ["NoSourceCodeSeen" /* NoSourceCodeSeen */]: { + exitCode: 32, + cliErrorMessageCandidates: [ + new RegExp( + "CodeQL detected code written in .* but could not process any of it" + ), + new RegExp( + "CodeQL did not detect any code written in languages supported by CodeQL" + ) + ] + }, + ["NoSupportedBuildCommandSucceeded" /* NoSupportedBuildCommandSucceeded */]: { + cliErrorMessageCandidates: [ + new RegExp("No supported build command succeeded") + ] + }, + ["NoSupportedBuildSystemDetected" /* NoSupportedBuildSystemDetected */]: { + cliErrorMessageCandidates: [ + new RegExp("No supported build system detected") + ] + }, + ["OutOfMemoryOrDisk" /* OutOfMemoryOrDisk */]: { + cliErrorMessageCandidates: [ + new RegExp("CodeQL is out of memory."), + new RegExp("out of disk"), + new RegExp("No space left on device") + ], + additionalErrorMessageToAppend: "For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory" + }, + ["PackCannotBeFound" /* PackCannotBeFound */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Query pack .* cannot be found\\. Check the spelling of the pack\\." + ), + new RegExp( + "is not a .ql file, .qls file, a directory, or a query pack specification." + ) + ] + }, + ["PackMissingAuth" /* PackMissingAuth */]: { + cliErrorMessageCandidates: [ + new RegExp("GitHub Container registry .* 403 Forbidden"), + new RegExp( + "Do you need to specify a token to authenticate to the registry?" + ) + ] + }, + ["SwiftBuildFailed" /* SwiftBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp( + "\\[autobuilder/build\\] \\[build-command-failed\\] `autobuild` failed to run the build command" + ) + ] + }, + ["UnsupportedBuildMode" /* UnsupportedBuildMode */]: { + cliErrorMessageCandidates: [ + new RegExp( + "does not support the .* build mode. Please try using one of the following build modes instead" + ) + ] + }, + ["NotFoundInRegistry" /* NotFoundInRegistry */]: { + cliErrorMessageCandidates: [ + new RegExp("'.*' not found in the registry '.*'") + ] + } +}; +function getCliConfigCategoryIfExists(cliError) { + for (const [category, configuration] of Object.entries(cliErrorsConfig)) { + if (cliError.exitCode !== void 0 && configuration.exitCode !== void 0 && cliError.exitCode === configuration.exitCode) { + return category; + } + for (const e of configuration.cliErrorMessageCandidates) { + if (cliError.message.match(e) || cliError.stderr.match(e)) { + return category; } } - } else { - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; } - const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger); - const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown"; - logger.debug( - `Attempting to obtain CodeQL tools. CLI version: ${cliVersion2 ?? "unknown"}, bundle tag name: ${tagName ?? "unknown"}, URL: ${url2 ?? "unspecified"}.` + return void 0; +} +function isUnsupportedPlatform() { + return !SUPPORTED_PLATFORMS.some( + ([platform, arch2]) => platform === process.platform && arch2 === process.arch ); - let codeqlFolder; - if (cliVersion2) { - codeqlFolder = toolcache3.find("CodeQL", cliVersion2); - if (!codeqlFolder) { - logger.debug( - `Didn't find a version of the CodeQL tools in the toolcache with a version number exactly matching ${cliVersion2}.` - ); - const allVersions = toolcache3.findAllVersions("CodeQL"); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - const candidateVersions = allVersions.filter( - (version) => version.startsWith(`${cliVersion2}-`) - ); - if (candidateVersions.length === 1) { - logger.debug( - `Exactly one version of the CodeQL tools starting with ${cliVersion2} found in the toolcache, using that.` - ); - codeqlFolder = toolcache3.find("CodeQL", candidateVersions[0]); - } else if (candidateVersions.length === 0) { - logger.debug( - `Didn't find any versions of the CodeQL tools starting with ${cliVersion2} in the toolcache. Trying next fallback method.` - ); - } else { - logger.warning( - `Found ${candidateVersions.length} versions of the CodeQL tools starting with ${cliVersion2} in the toolcache, but at most one was expected.` - ); - logger.debug("Trying next fallback method."); - } - } +} +function getUnsupportedPlatformError(cliError) { + return new ConfigurationError( + `The CodeQL CLI does not support the platform/architecture combination of ${process.platform}/${process.arch} (see ${"https://codeql.github.com/docs/codeql-overview/system-requirements/" /* SYSTEM_REQUIREMENTS */}). The underlying error was: ${cliError.message}` + ); +} +function wrapCliConfigurationError(cliError) { + if (isUnsupportedPlatform()) { + return getUnsupportedPlatformError(cliError); } - if (!codeqlFolder && tagName) { - const fallbackVersion = await tryGetFallbackToolcacheVersion( - cliVersion2, - tagName, - logger + const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError); + if (cliConfigErrorCategory === void 0) { + return cliError; + } + let errorMessageBuilder = cliError.message; + const additionalErrorMessageToAppend = cliErrorsConfig[cliConfigErrorCategory].additionalErrorMessageToAppend; + if (additionalErrorMessageToAppend !== void 0) { + errorMessageBuilder = `${errorMessageBuilder} ${additionalErrorMessageToAppend}`; + } + return new ConfigurationError(errorMessageBuilder); +} + +// src/config/db-config.ts +var jsonschema = __toESM(require_lib2()); +var semver5 = __toESM(require_semver2()); +var PACK_IDENTIFIER_PATTERN = (function() { + const alphaNumeric = "[a-z0-9]"; + const alphaNumericDash = "[a-z0-9-]"; + const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`; + return new RegExp(`^${component}/${component}$`); +})(); + +// src/diagnostics.ts +var import_fs = require("fs"); +var import_path2 = __toESM(require("path")); +var unwrittenDiagnostics = []; +var unwrittenDefaultLanguageDiagnostics = []; +function makeDiagnostic(id, name, data = void 0) { + return { + ...data, + timestamp: data?.timestamp ?? (/* @__PURE__ */ new Date()).toISOString(), + source: { ...data?.source, id, name } + }; +} +function addDiagnostic(config, language, diagnostic) { + const logger = getActionsLogger(); + const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; + if ((0, import_fs.existsSync)(databasePath)) { + writeDiagnostic(config, language, diagnostic); + } else { + logger.debug( + `Writing a diagnostic for ${language}, but the database at ${databasePath} does not exist yet.` ); - if (fallbackVersion) { - codeqlFolder = toolcache3.find("CodeQL", fallbackVersion); - } else { - logger.debug( - `Could not determine a fallback toolcache version number for CodeQL tools version ${humanReadableVersion}.` - ); - } + unwrittenDiagnostics.push({ diagnostic, language }); } - if (codeqlFolder) { - logger.info( - `Found CodeQL tools version ${humanReadableVersion} in the toolcache.` +} +function addNoLanguageDiagnostic(config, diagnostic) { + if (config !== void 0) { + addDiagnostic( + config, + // Arbitrarily choose the first language. We could also choose all languages, but that + // increases the risk of misinterpreting the data. + config.languages[0], + diagnostic ); } else { - logger.info( - `Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.` + unwrittenDefaultLanguageDiagnostics.push(diagnostic); + } +} +function writeDiagnostic(config, language, diagnostic) { + const logger = getActionsLogger(); + const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; + const diagnosticsPath = import_path2.default.resolve( + databasePath, + "diagnostic", + "codeql-action" + ); + try { + (0, import_fs.mkdirSync)(diagnosticsPath, { recursive: true }); + const jsonPath = import_path2.default.resolve( + diagnosticsPath, + // Remove colons from the timestamp as these are not allowed in Windows filenames. + `codeql-action-${diagnostic.timestamp.replaceAll(":", "")}.json` ); + (0, import_fs.writeFileSync)(jsonPath, JSON.stringify(diagnostic)); + } catch (err) { + logger.warning(`Unable to write diagnostic message to database: ${err}`); + logger.debug(JSON.stringify(diagnostic)); } - if (codeqlFolder) { - if (cliVersion2) { - logger.info( - `Using CodeQL CLI version ${cliVersion2} from toolcache at ${codeqlFolder}` - ); - } else { - logger.info(`Using CodeQL CLI from toolcache at ${codeqlFolder}`); - } - return { - codeqlFolder, - sourceType: "toolcache", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; - } - if (variant === "GitHub Enterprise Server" /* GHES */ && !forceShippedTools && !toolsInput) { - const result = await findOverridingToolsInCache( - humanReadableVersion, - logger - ); - if (result !== void 0) { - return result; - } - } - let compressionMethod; - if (!url2) { - compressionMethod = cliVersion2 !== void 0 && await useZstdBundle(cliVersion2, tarSupportsZstd) ? "zstd" : "gzip"; - url2 = await getCodeQLBundleDownloadURL( - tagName, - apiDetails, - compressionMethod, - logger - ); - } else { - const method = inferCompressionMethod(url2); - if (method === void 0) { - throw new ConfigurationError( - `Could not infer compression method from URL ${url2}. Please specify a URL ending in '.tar.gz' or '.tar.zst'.` - ); - } - compressionMethod = method; +} + +// src/trap-caching.ts +var actionsCache2 = __toESM(require_cache5()); + +// src/config-utils.ts +var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4; +var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB * 1e6; +var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; +var OVERLAY_ANALYSIS_FEATURES = { + actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, + cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, + csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, + go: "overlay_analysis_go" /* OverlayAnalysisGo */, + java: "overlay_analysis_java" /* OverlayAnalysisJava */, + javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, + python: "overlay_analysis_python" /* OverlayAnalysisPython */, + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, + rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, + swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ +}; +var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { + actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, + cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, + csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, + go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, + java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, + javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, + python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, + rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, + swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ +}; +function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { + const augmentedConfig = cloneObject(cliConfig); + if (extraQueryExclusions.length === 0) { + return augmentedConfig; } - if (cliVersion2) { - logger.info(`Using CodeQL CLI version ${cliVersion2} sourced from ${url2} .`); - } else { - logger.info(`Using CodeQL CLI sourced from ${url2} .`); + augmentedConfig["query-filters"] = [ + // Ordering matters. If the first filter is an inclusion, it implicitly + // excludes all queries that are not included. If it is an exclusion, + // it implicitly includes all queries that are not excluded. So user + // filters (if any) should always be first to preserve intent. + ...augmentedConfig["query-filters"] || [], + ...extraQueryExclusions + ]; + if (augmentedConfig["query-filters"]?.length === 0) { + delete augmentedConfig["query-filters"]; } - return { - bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger), - cliVersion: cliVersion2, - codeqlURL: url2, - compressionMethod, - sourceType: "download", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; + return augmentedConfig; } -async function tryGetFallbackToolcacheVersion(cliVersion2, tagName, logger) { - const bundleVersion2 = tryGetBundleVersionFromTagName(tagName, logger); - if (!bundleVersion2) { - return void 0; - } - const fallbackVersion = convertToSemVer(bundleVersion2, logger); - logger.debug( - `Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ${cliVersion2 ?? tagName}.` - ); - return fallbackVersion; + +// src/setup-codeql.ts +var fs8 = __toESM(require("fs")); +var path8 = __toESM(require("path")); +var toolcache3 = __toESM(require_tool_cache()); +var import_fast_deep_equal = __toESM(require_fast_deep_equal()); +var semver8 = __toESM(require_semver2()); + +// node_modules/uuid/dist-node/stringify.js +var byteToHex = []; +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 256).toString(16).slice(1)); } -var downloadCodeQL = async function(codeqlURL, compressionMethod, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) { - const parsedCodeQLURL = new URL(codeqlURL); - const searchParams = new URLSearchParams(parsedCodeQLURL.search); - const headers = { - accept: "application/octet-stream" - }; - let authorization = void 0; - if (searchParams.has("token")) { - logger.debug("CodeQL tools URL contains an authorization token."); - } else { - authorization = getAuthorizationHeaderFor( - logger, - apiDetails, - codeqlURL - ); +function unsafeStringify(arr, offset = 0) { + return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +} + +// node_modules/uuid/dist-node/rng.js +var import_node_crypto = require("node:crypto"); +var rnds8Pool = new Uint8Array(256); +var poolPtr = rnds8Pool.length; +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + (0, import_node_crypto.randomFillSync)(rnds8Pool); + poolPtr = 0; } - const toolcacheInfo = getToolcacheDestinationInfo( - maybeBundleVersion, - maybeCliVersion, - logger - ); - const extractedBundlePath = toolcacheInfo?.path ?? getTempExtractionDir(tempDir); - const statusReport = await downloadAndExtract( - codeqlURL, - compressionMethod, - extractedBundlePath, - authorization, - { "User-Agent": "CodeQL Action", ...headers }, - tarVersion, - logger - ); - if (!toolcacheInfo) { - logger.debug( - `Could not cache CodeQL tools because we could not determine the bundle version from the URL ${codeqlURL}.` - ); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? "unknown" - }; + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +// node_modules/uuid/dist-node/native.js +var import_node_crypto2 = require("node:crypto"); +var native_default = { randomUUID: import_node_crypto2.randomUUID }; + +// node_modules/uuid/dist-node/v4.js +function _v4(options, buf, offset) { + options = options || {}; + const rnds = options.random ?? options.rng?.() ?? rng(); + if (rnds.length < 16) { + throw new Error("Random bytes length must be >= 16"); } - writeToolcacheMarkerFile(toolcacheInfo.path, logger); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? toolcacheInfo.version - }; -}; -function getToolcacheDestinationInfo(maybeBundleVersion, maybeCliVersion, logger) { - if (maybeBundleVersion) { - const version = getCanonicalToolcacheVersion( - maybeCliVersion, - maybeBundleVersion, - logger - ); - return { - path: getToolcacheDirectory(version), - version - }; + rnds[6] = rnds[6] & 15 | 64; + rnds[8] = rnds[8] & 63 | 128; + if (buf) { + offset = offset || 0; + if (offset < 0 || offset + 16 > buf.length) { + throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`); + } + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + return buf; } - return void 0; + return unsafeStringify(rnds); } -function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) { - if (!cliVersion2?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) { - return convertToSemVer(bundleVersion2, logger); +function v4(options, buf, offset) { + if (native_default.randomUUID && !buf && !options) { + return native_default.randomUUID(); } - return cliVersion2; + return _v4(options, buf, offset); } -async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { - if (!await isBinaryAccessible("tar", logger)) { - throw new ConfigurationError( - "Could not find tar in PATH, so unable to extract CodeQL bundle." - ); +var v4_default = v4; + +// src/tar.ts +var import_child_process = require("child_process"); +var fs6 = __toESM(require("fs")); +var stream = __toESM(require("stream")); +var import_toolrunner = __toESM(require_toolrunner()); +var io4 = __toESM(require_io()); +var toolcache = __toESM(require_tool_cache()); +var semver6 = __toESM(require_semver2()); +var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3"; +var MIN_REQUIRED_GNU_TAR_VERSION = "1.31"; +async function getTarVersion() { + const tar = await io4.which("tar", true); + let stdout = ""; + const exitCode = await new import_toolrunner.ToolRunner(tar, ["--version"], { + listeners: { + stdout: (data) => { + stdout += data.toString(); + } + } + }).exec(); + if (exitCode !== 0) { + throw new Error("Failed to call tar --version"); } - const zstdAvailability = await isZstdAvailable(logger); - const source = await getCodeQLSource( - toolsInput, - defaultCliVersion, - apiDetails, - variant, - zstdAvailability.available, - features, - logger - ); - let codeqlFolder; - let toolsVersion = source.toolsVersion; - let toolsDownloadStatusReport; - let toolsSource; - switch (source.sourceType) { - case "local": { - codeqlFolder = await extract( - source.codeqlTarPath, - getTempExtractionDir(tempDir), - source.compressionMethod, - zstdAvailability.version, - logger - ); - toolsSource = "LOCAL" /* Local */; - break; + if (stdout.includes("GNU tar")) { + const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/); + if (!match?.[1]) { + throw new Error("Failed to parse output of tar --version."); } - case "toolcache": - codeqlFolder = source.codeqlFolder; - logger.debug(`CodeQL found in cache ${codeqlFolder}`); - toolsSource = "TOOLCACHE" /* Toolcache */; - break; - case "download": { - const result = await downloadCodeQL( - source.codeqlURL, - source.compressionMethod, - source.bundleVersion, - source.cliVersion, - apiDetails, - zstdAvailability.version, - tempDir, - logger - ); - toolsVersion = result.toolsVersion; - codeqlFolder = result.codeqlFolder; - toolsDownloadStatusReport = result.statusReport; - toolsSource = "DOWNLOAD" /* Download */; - break; + return { type: "gnu", version: match[1] }; + } else if (stdout.includes("bsdtar")) { + const match = stdout.match(/bsdtar ([0-9.]+)/); + if (!match?.[1]) { + throw new Error("Failed to parse output of tar --version."); } - default: - assertNever(source); + return { type: "bsd", version: match[1] }; + } else { + throw new Error("Unknown tar version"); } - return { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; -} -async function useZstdBundle(cliVersion2, tarSupportsZstd) { - return ( - // In testing, gzip performs better than zstd on Windows. - process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE) - ); -} -function getTempExtractionDir(tempDir) { - return path8.join(tempDir, v4_default()); } -async function getNightlyToolsUrl(logger) { - const zstdAvailability = await isZstdAvailable(logger); - const compressionMethod = await useZstdBundle( - CODEQL_VERSION_ZSTD_BUNDLE, - zstdAvailability.available - ) ? "zstd" : "gzip"; +async function isZstdAvailable(logger) { + const foundZstdBinary = await isBinaryAccessible("zstd", logger); try { - const release = await getApiClient().rest.repos.listReleases({ - owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER, - repo: CODEQL_NIGHTLIES_REPOSITORY_NAME, - per_page: 1, - page: 1, - prerelease: true - }); - const latestRelease = release.data[0]; - if (!latestRelease) { - throw new Error("Could not find the latest nightly release."); + const tarVersion = await getTarVersion(); + const { type: type2, version } = tarVersion; + logger.info(`Found ${type2} tar version ${version}.`); + switch (type2) { + case "gnu": + return { + available: foundZstdBinary && // GNU tar only uses major and minor version numbers + semver6.gte( + semver6.coerce(version), + semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION) + ), + foundZstdBinary, + version: tarVersion + }; + case "bsd": + return { + available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain + // a patch version number. + semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION), + foundZstdBinary, + version: tarVersion + }; + default: + assertNever(type2); } - return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`; } catch (e) { - throw new Error( - `Failed to retrieve the latest nightly release: ${wrapError(e)}` + logger.warning( + `Failed to determine tar version, therefore will assume zstd is not available. The underlying error was: ${e}` ); + return { available: false, foundZstdBinary }; } } -function getLatestToolcacheVersion(logger) { - const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a)); +async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { + fs6.mkdirSync(dest, { recursive: true }); + switch (compressionMethod) { + case "gzip": + return await toolcache.extractTar(tarPath, dest); + case "zstd": { + if (!tarVersion) { + throw new Error( + "Could not determine tar version, which is required to extract a Zstandard archive." + ); + } + await extractTarZst(tarPath, dest, tarVersion, logger); + return dest; + } + } +} +async function extractTarZst(tar, dest, tarVersion, logger) { logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` + `Extracting to ${dest}.${tar instanceof stream.Readable ? ` Input stream has high water mark ${tar.readableHighWaterMark}.` : ""}` ); - if (allVersions.length > 0) { - const latestToolcacheVersion = allVersions[0]; - logger.info( - `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.` - ); - return latestToolcacheVersion; + try { + const args = ["-x", "--zstd", "--ignore-zeros"]; + if (tarVersion.type === "gnu") { + args.push("--warning=no-unknown-keyword"); + args.push("--overwrite"); + } + args.push("-f", tar instanceof stream.Readable ? "-" : tar, "-C", dest); + process.stdout.write(`[command]tar ${args.join(" ")} +`); + await new Promise((resolve5, reject) => { + const tarProcess = (0, import_child_process.spawn)("tar", args, { stdio: "pipe" }); + let stdout = ""; + tarProcess.stdout?.on("data", (data) => { + stdout += data.toString(); + process.stdout.write(data); + }); + let stderr = ""; + tarProcess.stderr?.on("data", (data) => { + stderr += data.toString(); + process.stdout.write(data); + }); + tarProcess.on("error", (err) => { + reject(new Error(`Error while extracting tar: ${err}`)); + }); + if (tar instanceof stream.Readable) { + tar.pipe(tarProcess.stdin).on("error", (err) => { + reject( + new Error(`Error while downloading and extracting tar: ${err}`) + ); + }); + } + tarProcess.on("exit", (code) => { + if (code !== 0) { + reject( + new CommandInvocationError( + "tar", + args, + code ?? void 0, + stdout, + stderr + ) + ); + } + resolve5(); + }); + }); + } catch (e) { + await cleanUpPath(dest, "extraction destination directory", logger); + throw e; } - return void 0; -} -function isReservedToolsValue(tools) { - return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT; } - -// src/tracer-config.ts -async function shouldEnableIndirectTracing(codeql, config) { - if (config.buildMode === "none" /* None */) { - return false; - } - if (config.buildMode === "autobuild" /* Autobuild */) { - return false; +var KNOWN_EXTENSIONS = { + "tar.gz": "gzip", + "tar.zst": "zstd" +}; +function inferCompressionMethod(tarPath) { + for (const [ext, method] of Object.entries(KNOWN_EXTENSIONS)) { + if (tarPath.endsWith(`.${ext}`)) { + return method; + } } - return asyncSome(config.languages, (l) => codeql.isTracedLanguage(l)); + return void 0; } -// src/codeql.ts -var cachedCodeQL = void 0; -var CODEQL_MINIMUM_VERSION = "2.17.6"; -var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6"; -var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13"; -var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19"; -var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++"; -var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1"; -async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) { - try { - const { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - } = await setupCodeQLBundle( - toolsInput, - apiDetails, - tempDir, - variant, - defaultCliVersion, - features, - logger - ); - logger.debug( - `Bundle download status report: ${JSON.stringify( - toolsDownloadStatusReport - )}` - ); - let codeqlCmd = path9.join(codeqlFolder, "codeql", "codeql"); - if (process.platform === "win32") { - codeqlCmd += ".exe"; - } else if (process.platform !== "linux" && process.platform !== "darwin") { - throw new ConfigurationError( - `Unsupported platform: ${process.platform}` +// src/tools-download.ts +var fs7 = __toESM(require("fs")); +var os = __toESM(require("os")); +var path7 = __toESM(require("path")); +var import_perf_hooks = require("perf_hooks"); +var core9 = __toESM(require_core()); +var import_http_client = __toESM(require_lib()); +var toolcache2 = __toESM(require_tool_cache()); +var import_follow_redirects = __toESM(require_follow_redirects()); +var semver7 = __toESM(require_semver2()); +var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; +var TOOLCACHE_TOOL_NAME = "CodeQL"; +function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) { + return { + combinedDurationMs: downloadDurationMs + extractionDurationMs, + downloadDurationMs, + extractionDurationMs, + streamExtraction: false + }; +} +function makeStreamedToolsDownloadDurations(combinedDurationMs) { + return { + combinedDurationMs, + downloadDurationMs: void 0, + extractionDurationMs: void 0, + streamExtraction: true + }; +} +async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorization, headers, tarVersion, logger) { + logger.info( + `Downloading CodeQL tools from ${codeqlURL} . This may take a while.` + ); + try { + if (compressionMethod === "zstd" && process.platform === "linux") { + logger.info(`Streaming the extraction of the CodeQL bundle.`); + const toolsInstallStart = import_perf_hooks.performance.now(); + await downloadAndExtractZstdWithStreaming( + codeqlURL, + dest, + authorization, + headers, + tarVersion, + logger + ); + const combinedDurationMs = Math.round( + import_perf_hooks.performance.now() - toolsInstallStart + ); + logger.info( + `Finished downloading and extracting CodeQL bundle to ${dest} (${formatDuration( + combinedDurationMs + )}).` ); + return { + compressionMethod, + toolsUrl: sanitizeUrlForStatusReport(codeqlURL), + ...makeStreamedToolsDownloadDurations(combinedDurationMs) + }; } - cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion); - return { - codeql: cachedCodeQL, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; - } catch (rawError) { - const e = wrapApiConfigurationError(rawError); - const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error; - throw new ErrorClass( - `Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? ` - -Details: ${e.stack}` : ""}` + } catch (e) { + core9.warning( + `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` + ); + core9.warning(`Falling back to downloading the bundle before extracting.`); + await cleanUpPath(dest, "CodeQL bundle", logger); + } + const toolsDownloadStart = import_perf_hooks.performance.now(); + const archivedBundlePath = await toolcache2.downloadTool( + codeqlURL, + void 0, + authorization, + headers + ); + const downloadDurationMs = Math.round(import_perf_hooks.performance.now() - toolsDownloadStart); + logger.info( + `Finished downloading CodeQL bundle to ${archivedBundlePath} (${formatDuration( + downloadDurationMs + )}).` + ); + let extractionDurationMs; + try { + logger.info("Extracting CodeQL bundle."); + const extractionStart = import_perf_hooks.performance.now(); + await extract( + archivedBundlePath, + dest, + compressionMethod, + tarVersion, + logger + ); + extractionDurationMs = Math.round(import_perf_hooks.performance.now() - extractionStart); + logger.info( + `Finished extracting CodeQL bundle to ${dest} (${formatDuration( + extractionDurationMs + )}).` + ); + } finally { + await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger); + } + return { + compressionMethod, + toolsUrl: sanitizeUrlForStatusReport(codeqlURL), + ...makeDownloadFirstToolsDownloadDurations( + downloadDurationMs, + extractionDurationMs + ) + }; +} +async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { + fs7.mkdirSync(dest, { recursive: true }); + const agent = new import_http_client.HttpClient().getAgent(codeqlURL); + headers = Object.assign( + { "User-Agent": "CodeQL Action" }, + authorization ? { authorization } : {}, + headers + ); + const response = await new Promise( + (resolve5) => import_follow_redirects.https.get( + codeqlURL, + { + headers, + // Increase the high water mark to improve performance. + highWaterMark: STREAMING_HIGH_WATERMARK_BYTES, + // Use the agent to respect proxy settings. + agent + }, + (r) => resolve5(r) + ) + ); + if (response.statusCode !== 200) { + throw new Error( + `Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.` ); } + await extractTarZst(response, dest, tarVersion, logger); +} +function getToolcacheDirectory(version) { + return path7.join( + getRequiredEnvParam("RUNNER_TOOL_CACHE"), + TOOLCACHE_TOOL_NAME, + semver7.clean(version) || version, + os.arch() || "" + ); +} +function writeToolcacheMarkerFile(extractedPath, logger) { + const markerFilePath = `${extractedPath}.complete`; + fs7.writeFileSync(markerFilePath, ""); + logger.info(`Created toolcache marker file ${markerFilePath}`); +} +function sanitizeUrlForStatusReport(url2) { + return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some( + (repo) => url2.startsWith(`https://github.com/${repo}/releases/download/`) + ) ? url2 : "sanitized-value"; } -async function getCodeQL(cmd) { - if (cachedCodeQL === void 0) { - cachedCodeQL = await getCodeQLForCmd(cmd, true); + +// src/setup-codeql.ts +var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; +var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing"; +var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies"; +var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"]; +var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"]; +var CODEQL_TOOLCACHE_INPUT = "toolcache"; +function getCodeQLBundleExtension(compressionMethod) { + switch (compressionMethod) { + case "gzip": + return ".tar.gz"; + case "zstd": + return ".tar.zst"; + default: + assertNever(compressionMethod); } - return cachedCodeQL; } -async function getCodeQLForCmd(cmd, checkVersion) { - const codeql = { - getPath() { - return cmd; - }, - async getVersion() { - let result = getCachedCodeQlVersion(); - if (result === void 0) { - const output = await runCli(cmd, ["version", "--format=json"], { - noStreamStdout: true - }); - try { - result = JSON.parse(output); - } catch { - throw Error( - `Invalid JSON output from \`version --format=json\`: ${output}` +function getCodeQLBundleName(compressionMethod) { + const extension = getCodeQLBundleExtension(compressionMethod); + let platform; + if (process.platform === "win32") { + platform = "win64"; + } else if (process.platform === "linux") { + platform = "linux64"; + } else if (process.platform === "darwin") { + platform = "osx64"; + } else { + return `codeql-bundle${extension}`; + } + return `codeql-bundle-${platform}${extension}`; +} +function getCodeQLActionRepository(logger) { + if (isRunningLocalAction()) { + logger.info( + "The CodeQL Action is checked out locally. Using the default CodeQL Action repository." + ); + return CODEQL_DEFAULT_ACTION_REPOSITORY; + } + return getRequiredEnvParam("GITHUB_ACTION_REPOSITORY"); +} +async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod, logger) { + const codeQLActionRepository = getCodeQLActionRepository(logger); + const potentialDownloadSources = [ + // This GitHub instance, and this Action. + [apiDetails.url, codeQLActionRepository], + // This GitHub instance, and the canonical Action. + [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY], + // GitHub.com, and the canonical Action. + [GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY] + ]; + const uniqueDownloadSources = potentialDownloadSources.filter( + (source, index, self2) => { + return !self2.slice(0, index).some((other) => (0, import_fast_deep_equal.default)(source, other)); + } + ); + const codeQLBundleName = getCodeQLBundleName(compressionMethod); + for (const downloadSource of uniqueDownloadSources) { + const [apiURL, repository] = downloadSource; + if (apiURL === GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { + break; + } + const [repositoryOwner, repositoryName] = repository.split("/"); + try { + const release = await getApiClient().rest.repos.getReleaseByTag({ + owner: repositoryOwner, + repo: repositoryName, + tag: tagName + }); + for (const asset of release.data.assets) { + if (asset.name === codeQLBundleName) { + logger.info( + `Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.` ); + return asset.url; } - cacheCodeQlVersion(result); } - return result; - }, - async printVersion() { - await runCli(cmd, ["version", "--format=json"]); - }, - async supportsFeature(feature) { - return isSupportedToolsFeature(await this.getVersion(), feature); - }, - async isTracedLanguage(language) { - const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path9.join( - extractorPath, - "tools", - "tracing-config.lua" + } catch (e) { + logger.info( + `Looked for CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} but got error ${e}.` ); - return fs9.existsSync(tracingConfigPath); - }, - async isScannedLanguage(language) { - return !await this.isTracedLanguage(language); - }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { - const extraArgs = config.languages.map( - (language) => `--language=${language}` + } + } + return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`; +} +function tryGetBundleVersionFromTagName(tagName, logger) { + const match = tagName.match(/^codeql-bundle-(.*)$/); + if (match === null || match.length < 2) { + logger.debug(`Could not determine bundle version from tag ${tagName}.`); + return void 0; + } + return match[1]; +} +function tryGetTagNameFromUrl(url2, logger) { + const matches = [...url2.matchAll(/\/(codeql-bundle-[^/]*)\//g)]; + if (matches.length === 0) { + logger.debug(`Could not determine tag name for URL ${url2}.`); + return void 0; + } + const match = matches[matches.length - 1]; + if (match?.length !== 2) { + logger.debug( + `Could not determine tag name for URL ${url2}. Matched ${JSON.stringify( + match + )}.` + ); + return void 0; + } + return match[1]; +} +function convertToSemVer(version, logger) { + if (!semver8.valid(version)) { + logger.debug( + `Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.` + ); + version = `0.0.0-${version}`; + } + const s = semver8.clean(version); + if (!s) { + throw new Error(`Bundle version ${version} is not in SemVer format.`); + } + return s; +} +async function findOverridingToolsInCache(humanReadableVersion, logger) { + const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ + folder: toolcache3.find("CodeQL", version), + version + })).filter(({ folder }) => fs8.existsSync(path8.join(folder, "pinned-version"))); + if (candidates.length === 1) { + const candidate = candidates[0]; + logger.debug( + `CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.` + ); + return { + codeqlFolder: candidate.folder, + sourceType: "toolcache", + toolsVersion: candidate.version + }; + } else if (candidates.length === 0) { + logger.debug( + "Did not find any candidate pinned versions of the CodeQL tools in the toolcache." + ); + } else { + logger.debug( + "Could not use CodeQL tools from the toolcache since more than one candidate pinned version was found in the toolcache." + ); + } + return void 0; +} +async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) { + if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) { + logger.info(`Using CodeQL CLI from local path ${toolsInput}`); + const compressionMethod2 = inferCompressionMethod(toolsInput); + if (compressionMethod2 === void 0) { + throw new ConfigurationError( + `Could not infer compression method from path ${toolsInput}. Please specify a path ending in '.tar.gz' or '.tar.zst'.` ); - if (await shouldEnableIndirectTracing(codeql, config)) { - extraArgs.push("--begin-tracing"); - extraArgs.push(...await getTrapCachingExtractorConfigArgs(config)); - extraArgs.push(`--trace-process-name=${processName}`); - } - const codeScanningConfigFile = await writeCodeScanningConfigFile( - config, - logger + } + return { + codeqlTarPath: toolsInput, + compressionMethod: compressionMethod2, + sourceType: "local", + toolsVersion: "local" + }; + } + let cliVersion2; + let tagName; + let url2; + const canForceNightlyWithFF = isDynamicWorkflow() || isInTestMode(); + const forceNightlyValueFF = await features.getValue("force_nightly" /* ForceNightly */); + const forceNightly = forceNightlyValueFF && canForceNightlyWithFF; + const nightlyRequestedByToolsInput = toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput); + if (forceNightly || nightlyRequestedByToolsInput) { + if (forceNightly) { + logger.info( + `Using the latest CodeQL CLI nightly, as forced by the ${"force_nightly" /* ForceNightly */} feature flag.` ); - const externalRepositoryToken = getOptionalInput( - "external-repository-token" + addNoLanguageDiagnostic( + void 0, + makeDiagnostic( + "codeql-action/forced-nightly-cli", + "A nightly release of CodeQL was used", + { + markdownMessage: "GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\nNightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\nIf use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.", + visibility: { + cliSummaryTable: true, + statusPage: true, + telemetry: true + }, + severity: "note" + } + ) ); - extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`); - if (externalRepositoryToken) { - extraArgs.push("--external-repository-token-stdin"); + } else { + logger.info( + `Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.` + ); + } + toolsInput = await getNightlyToolsUrl(logger); + } + const forceShippedTools = toolsInput && CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput); + if (forceShippedTools) { + cliVersion2 = cliVersion; + tagName = bundleVersion; + logger.info( + `'tools: ${toolsInput}' was requested, so using CodeQL version ${cliVersion2}, the version shipped with the Action.` + ); + if (toolsInput === "latest") { + logger.warning( + "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required." + ); + } + } else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) { + let latestToolcacheVersion; + const allowToolcacheValueFF = await features.getValue( + "allow_toolcache_input" /* AllowToolcacheInput */ + ); + const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode()); + if (allowToolcacheValue) { + logger.info( + `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.` + ); + latestToolcacheVersion = getLatestToolcacheVersion(logger); + if (latestToolcacheVersion) { + cliVersion2 = latestToolcacheVersion; } - if (config.buildMode !== void 0) { - extraArgs.push(`--build-mode=${config.buildMode}`); + } + if (latestToolcacheVersion === void 0) { + if (allowToolcacheValue) { + logger.info( + `Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...` + ); + } else { + if (allowToolcacheValueFF) { + logger.warning( + `Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.` + ); + } else { + logger.info( + `Ignoring 'tools: ${toolsInput}' because the feature is not enabled.` + ); + } } - if (qlconfigFile !== void 0) { - extraArgs.push(`--qlconfig-file=${qlconfigFile}`); + cliVersion2 = defaultCliVersion.cliVersion; + tagName = defaultCliVersion.tagName; + } + } else if (toolsInput !== void 0) { + tagName = tryGetTagNameFromUrl(toolsInput, logger); + url2 = toolsInput; + if (tagName) { + const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger); + if (bundleVersion3 && semver8.valid(bundleVersion3)) { + cliVersion2 = convertToSemVer(bundleVersion3, logger); } - const overwriteFlag = isSupportedToolsFeature( - await this.getVersion(), - "forceOverwrite" /* ForceOverwrite */ - ) ? "--force-overwrite" : "--overwrite"; - const overlayDatabaseMode = config.overlayDatabaseMode; - if (overlayDatabaseMode === "overlay" /* Overlay */) { - const overlayChangesFile = await writeOverlayChangesFile( - config, - sourceRoot, - logger + } + } else { + cliVersion2 = defaultCliVersion.cliVersion; + tagName = defaultCliVersion.tagName; + } + const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger); + const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown"; + logger.debug( + `Attempting to obtain CodeQL tools. CLI version: ${cliVersion2 ?? "unknown"}, bundle tag name: ${tagName ?? "unknown"}, URL: ${url2 ?? "unspecified"}.` + ); + let codeqlFolder; + if (cliVersion2) { + codeqlFolder = toolcache3.find("CodeQL", cliVersion2); + if (!codeqlFolder) { + logger.debug( + `Didn't find a version of the CodeQL tools in the toolcache with a version number exactly matching ${cliVersion2}.` + ); + const allVersions = toolcache3.findAllVersions("CodeQL"); + logger.debug( + `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( + allVersions + )}.` + ); + const candidateVersions = allVersions.filter( + (version) => version.startsWith(`${cliVersion2}-`) + ); + if (candidateVersions.length === 1) { + logger.debug( + `Exactly one version of the CodeQL tools starting with ${cliVersion2} found in the toolcache, using that.` ); - extraArgs.push(`--overlay-changes=${overlayChangesFile}`); - } else if (overlayDatabaseMode === "overlay-base" /* OverlayBase */) { - extraArgs.push("--overlay-base"); + codeqlFolder = toolcache3.find("CodeQL", candidateVersions[0]); + } else if (candidateVersions.length === 0) { + logger.debug( + `Didn't find any versions of the CodeQL tools starting with ${cliVersion2} in the toolcache. Trying next fallback method.` + ); + } else { + logger.warning( + `Found ${candidateVersions.length} versions of the CodeQL tools starting with ${cliVersion2} in the toolcache, but at most one was expected.` + ); + logger.debug("Trying next fallback method."); + } + } + } + if (!codeqlFolder && tagName) { + const fallbackVersion = await tryGetFallbackToolcacheVersion( + cliVersion2, + tagName, + logger + ); + if (fallbackVersion) { + codeqlFolder = toolcache3.find("CodeQL", fallbackVersion); + } else { + logger.debug( + `Could not determine a fallback toolcache version number for CodeQL tools version ${humanReadableVersion}.` + ); + } + } + if (codeqlFolder) { + logger.info( + `Found CodeQL tools version ${humanReadableVersion} in the toolcache.` + ); + } else { + logger.info( + `Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.` + ); + } + if (codeqlFolder) { + if (cliVersion2) { + logger.info( + `Using CodeQL CLI version ${cliVersion2} from toolcache at ${codeqlFolder}` + ); + } else { + logger.info(`Using CodeQL CLI from toolcache at ${codeqlFolder}`); + } + return { + codeqlFolder, + sourceType: "toolcache", + toolsVersion: cliVersion2 ?? humanReadableVersion + }; + } + if (variant === "GitHub Enterprise Server" /* GHES */ && !forceShippedTools && !toolsInput) { + const result = await findOverridingToolsInCache( + humanReadableVersion, + logger + ); + if (result !== void 0) { + return result; + } + } + let compressionMethod; + if (!url2) { + compressionMethod = cliVersion2 !== void 0 && await useZstdBundle(cliVersion2, tarSupportsZstd) ? "zstd" : "gzip"; + url2 = await getCodeQLBundleDownloadURL( + tagName, + apiDetails, + compressionMethod, + logger + ); + } else { + const method = inferCompressionMethod(url2); + if (method === void 0) { + throw new ConfigurationError( + `Could not infer compression method from URL ${url2}. Please specify a URL ending in '.tar.gz' or '.tar.zst'.` + ); + } + compressionMethod = method; + } + if (cliVersion2) { + logger.info(`Using CodeQL CLI version ${cliVersion2} sourced from ${url2} .`); + } else { + logger.info(`Using CodeQL CLI sourced from ${url2} .`); + } + return { + bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger), + cliVersion: cliVersion2, + codeqlURL: url2, + compressionMethod, + sourceType: "download", + toolsVersion: cliVersion2 ?? humanReadableVersion + }; +} +async function tryGetFallbackToolcacheVersion(cliVersion2, tagName, logger) { + const bundleVersion2 = tryGetBundleVersionFromTagName(tagName, logger); + if (!bundleVersion2) { + return void 0; + } + const fallbackVersion = convertToSemVer(bundleVersion2, logger); + logger.debug( + `Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ${cliVersion2 ?? tagName}.` + ); + return fallbackVersion; +} +var downloadCodeQL = async function(codeqlURL, compressionMethod, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) { + const parsedCodeQLURL = new URL(codeqlURL); + const searchParams = new URLSearchParams(parsedCodeQLURL.search); + const headers = { + accept: "application/octet-stream" + }; + let authorization = void 0; + if (searchParams.has("token")) { + logger.debug("CodeQL tools URL contains an authorization token."); + } else { + authorization = getAuthorizationHeaderFor( + logger, + apiDetails, + codeqlURL + ); + } + const toolcacheInfo = getToolcacheDestinationInfo( + maybeBundleVersion, + maybeCliVersion, + logger + ); + const extractedBundlePath = toolcacheInfo?.path ?? getTempExtractionDir(tempDir); + const statusReport = await downloadAndExtract( + codeqlURL, + compressionMethod, + extractedBundlePath, + authorization, + { "User-Agent": "CodeQL Action", ...headers }, + tarVersion, + logger + ); + if (!toolcacheInfo) { + logger.debug( + `Could not cache CodeQL tools because we could not determine the bundle version from the URL ${codeqlURL}.` + ); + return { + codeqlFolder: extractedBundlePath, + statusReport, + toolsVersion: maybeCliVersion ?? "unknown" + }; + } + writeToolcacheMarkerFile(toolcacheInfo.path, logger); + return { + codeqlFolder: extractedBundlePath, + statusReport, + toolsVersion: maybeCliVersion ?? toolcacheInfo.version + }; +}; +function getToolcacheDestinationInfo(maybeBundleVersion, maybeCliVersion, logger) { + if (maybeBundleVersion) { + const version = getCanonicalToolcacheVersion( + maybeCliVersion, + maybeBundleVersion, + logger + ); + return { + path: getToolcacheDirectory(version), + version + }; + } + return void 0; +} +function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) { + if (!cliVersion2?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) { + return convertToSemVer(bundleVersion2, logger); + } + return cliVersion2; +} +async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { + if (!await isBinaryAccessible("tar", logger)) { + throw new ConfigurationError( + "Could not find tar in PATH, so unable to extract CodeQL bundle." + ); + } + const zstdAvailability = await isZstdAvailable(logger); + const source = await getCodeQLSource( + toolsInput, + defaultCliVersion, + apiDetails, + variant, + zstdAvailability.available, + features, + logger + ); + let codeqlFolder; + let toolsVersion = source.toolsVersion; + let toolsDownloadStatusReport; + let toolsSource; + switch (source.sourceType) { + case "local": { + codeqlFolder = await extract( + source.codeqlTarPath, + getTempExtractionDir(tempDir), + source.compressionMethod, + zstdAvailability.version, + logger + ); + toolsSource = "LOCAL" /* Local */; + break; + } + case "toolcache": + codeqlFolder = source.codeqlFolder; + logger.debug(`CodeQL found in cache ${codeqlFolder}`); + toolsSource = "TOOLCACHE" /* Toolcache */; + break; + case "download": { + const result = await downloadCodeQL( + source.codeqlURL, + source.compressionMethod, + source.bundleVersion, + source.cliVersion, + apiDetails, + zstdAvailability.version, + tempDir, + logger + ); + toolsVersion = result.toolsVersion; + codeqlFolder = result.codeqlFolder; + toolsDownloadStatusReport = result.statusReport; + toolsSource = "DOWNLOAD" /* Download */; + break; + } + default: + assertNever(source); + } + return { + codeqlFolder, + toolsDownloadStatusReport, + toolsSource, + toolsVersion, + zstdAvailability + }; +} +async function useZstdBundle(cliVersion2, tarSupportsZstd) { + return ( + // In testing, gzip performs better than zstd on Windows. + process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE) + ); +} +function getTempExtractionDir(tempDir) { + return path8.join(tempDir, v4_default()); +} +async function getNightlyToolsUrl(logger) { + const zstdAvailability = await isZstdAvailable(logger); + const compressionMethod = await useZstdBundle( + CODEQL_VERSION_ZSTD_BUNDLE, + zstdAvailability.available + ) ? "zstd" : "gzip"; + try { + const release = await getApiClient().rest.repos.listReleases({ + owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER, + repo: CODEQL_NIGHTLIES_REPOSITORY_NAME, + per_page: 1, + page: 1, + prerelease: true + }); + const latestRelease = release.data[0]; + if (!latestRelease) { + throw new Error("Could not find the latest nightly release."); + } + return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`; + } catch (e) { + throw new Error( + `Failed to retrieve the latest nightly release: ${wrapError(e)}` + ); + } +} +function getLatestToolcacheVersion(logger) { + const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a)); + logger.debug( + `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( + allVersions + )}.` + ); + if (allVersions.length > 0) { + const latestToolcacheVersion = allVersions[0]; + logger.info( + `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.` + ); + return latestToolcacheVersion; + } + return void 0; +} +function isReservedToolsValue(tools) { + return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT; +} + +// src/tracer-config.ts +async function shouldEnableIndirectTracing(codeql, config) { + if (config.buildMode === "none" /* None */) { + return false; + } + if (config.buildMode === "autobuild" /* Autobuild */) { + return false; + } + return asyncSome(config.languages, (l) => codeql.isTracedLanguage(l)); +} + +// src/codeql.ts +var cachedCodeQL = void 0; +var CODEQL_MINIMUM_VERSION = "2.17.6"; +var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6"; +var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13"; +var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19"; +var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++"; +var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1"; +async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) { + try { + const { + codeqlFolder, + toolsDownloadStatusReport, + toolsSource, + toolsVersion, + zstdAvailability + } = await setupCodeQLBundle( + toolsInput, + apiDetails, + tempDir, + variant, + defaultCliVersion, + features, + logger + ); + logger.debug( + `Bundle download status report: ${JSON.stringify( + toolsDownloadStatusReport + )}` + ); + let codeqlCmd = path9.join(codeqlFolder, "codeql", "codeql"); + if (process.platform === "win32") { + codeqlCmd += ".exe"; + } else if (process.platform !== "linux" && process.platform !== "darwin") { + throw new ConfigurationError( + `Unsupported platform: ${process.platform}` + ); + } + cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion); + return { + codeql: cachedCodeQL, + toolsDownloadStatusReport, + toolsSource, + toolsVersion, + zstdAvailability + }; + } catch (rawError) { + const e = wrapApiConfigurationError(rawError); + const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error; + throw new ErrorClass( + `Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? ` + +Details: ${e.stack}` : ""}` + ); + } +} +async function getCodeQLForCmd(cmd, checkVersion) { + const codeql = { + getPath() { + return cmd; + }, + async getVersion() { + let result = getCachedCodeQlVersion(); + if (result === void 0) { + const output = await runCli(cmd, ["version", "--format=json"], { + noStreamStdout: true + }); + try { + result = JSON.parse(output); + } catch { + throw Error( + `Invalid JSON output from \`version --format=json\`: ${output}` + ); + } + cacheCodeQlVersion(result); + } + return result; + }, + async printVersion() { + await runCli(cmd, ["version", "--format=json"]); + }, + async supportsFeature(feature) { + return isSupportedToolsFeature(await this.getVersion(), feature); + }, + async isTracedLanguage(language) { + const extractorPath = await this.resolveExtractor(language); + const tracingConfigPath = path9.join( + extractorPath, + "tools", + "tracing-config.lua" + ); + return fs9.existsSync(tracingConfigPath); + }, + async isScannedLanguage(language) { + return !await this.isTracedLanguage(language); + }, + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + const extraArgs = config.languages.map( + (language) => `--language=${language}` + ); + if (await shouldEnableIndirectTracing(codeql, config)) { + extraArgs.push("--begin-tracing"); + extraArgs.push(...await getTrapCachingExtractorConfigArgs(config)); + extraArgs.push(`--trace-process-name=${processName}`); + } + const codeScanningConfigFile = await writeCodeScanningConfigFile( + config, + logger + ); + const externalRepositoryToken = getOptionalInput( + "external-repository-token" + ); + extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`); + if (externalRepositoryToken) { + extraArgs.push("--external-repository-token-stdin"); + } + if (config.buildMode !== void 0) { + extraArgs.push(`--build-mode=${config.buildMode}`); + } + if (qlconfigFile !== void 0) { + extraArgs.push(`--qlconfig-file=${qlconfigFile}`); + } + const overwriteFlag = isSupportedToolsFeature( + await this.getVersion(), + "forceOverwrite" /* ForceOverwrite */ + ) ? "--force-overwrite" : "--overwrite"; + const overlayDatabaseMode = config.overlayDatabaseMode; + if (overlayDatabaseMode === "overlay" /* Overlay */) { + const overlayChangesFile = await writeOverlayChangesFile( + config, + sourceRoot, + logger + ); + extraArgs.push(`--overlay-changes=${overlayChangesFile}`); + } else if (overlayDatabaseMode === "overlay-base" /* OverlayBase */) { + extraArgs.push("--overlay-base"); } const baselineFilesOptions = config.enableFileCoverageInformation ? [ "--calculate-language-specific-baseline", @@ -108865,1250 +109955,126 @@ ${output}` if (mergeRunsFromEqualCategory) { args.push("--sarif-merge-runs-from-equal-category"); } - await runCli(cmd, args); - } - }; - if (checkVersion && !await codeQlVersionAtLeast(codeql, CODEQL_MINIMUM_VERSION)) { - throw new ConfigurationError( - `Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${(await codeql.getVersion()).version}` - ); - } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { - const result = await codeql.getVersion(); - core10.warning( - `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. - -Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` - ); - core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); - } - return codeql; -} -function getExtraOptionsFromEnv(paths, { ignoringOptions } = {}) { - const options = getExtraOptionsEnvParam(); - return getExtraOptions(options, paths, []).filter( - (option) => !ignoringOptions?.includes(option) - ); -} -function asExtraOptions(options, pathInfo) { - if (options === void 0) { - return []; - } - if (!Array.isArray(options)) { - const msg = `The extra options for '${pathInfo.join( - "." - )}' ('${JSON.stringify(options)}') are not in an array.`; - throw new Error(msg); - } - return options.map((o) => { - const t = typeof o; - if (t !== "string" && t !== "number" && t !== "boolean") { - const msg = `The extra option for '${pathInfo.join( - "." - )}' ('${JSON.stringify(o)}') is not a primitive value.`; - throw new Error(msg); - } - return `${o}`; - }); -} -function getExtraOptions(options, paths, pathInfo) { - const all = asExtraOptions(options?.["*"], pathInfo.concat("*")); - const specific = paths.length === 0 ? asExtraOptions(options, pathInfo) : getExtraOptions( - options?.[paths[0]], - paths?.slice(1), - pathInfo.concat(paths[0]) - ); - return all.concat(specific); -} -async function runCli(cmd, args = [], opts = {}) { - try { - return await runTool(cmd, args, opts); - } catch (e) { - if (e instanceof CommandInvocationError) { - throw wrapCliConfigurationError(new CliError(e)); - } - throw e; - } -} -async function writeCodeScanningConfigFile(config, logger) { - const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config); - const augmentedConfig = appendExtraQueryExclusions( - config.extraQueryExclusions, - config.computedConfig - ); - logger.info( - `Writing augmented user configuration file to ${codeScanningConfigFile}` - ); - logger.startGroup("Augmented user configuration file contents"); - logger.info(dump(augmentedConfig)); - logger.endGroup(); - fs9.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); - return codeScanningConfigFile; -} -var TRAP_CACHE_SIZE_MB = 1024; -async function getTrapCachingExtractorConfigArgs(config) { - const result = []; - for (const language of config.languages) - result.push( - await getTrapCachingExtractorConfigArgsForLang(config, language) - ); - return result.flat(); -} -async function getTrapCachingExtractorConfigArgsForLang(config, language) { - const cacheDir = config.trapCaches[language]; - if (cacheDir === void 0) return []; - const write = await isAnalyzingDefaultBranch(); - return [ - `-O=${language}.trap.cache.dir=${cacheDir}`, - `-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`, - `-O=${language}.trap.cache.write=${write}` - ]; -} -function getGeneratedCodeScanningConfigPath(config) { - return path9.resolve(config.tempDir, "user-config.yaml"); -} -function getExtractionVerbosityArguments(enableDebugLogging) { - return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; -} -function applyAutobuildAzurePipelinesTimeoutFix() { - const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || ""; - process.env["JAVA_TOOL_OPTIONS"] = [ - ...javaToolOptions.split(/\s+/), - "-Dhttp.keepAlive=false", - "-Dmaven.wagon.http.pool=false" - ].join(" "); -} -async function getJobRunUuidSarifOptions(codeql) { - const jobRunUuid = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */]; - return jobRunUuid && await codeql.supportsFeature( - "databaseInterpretResultsSupportsSarifRunProperty" /* DatabaseInterpretResultsSupportsSarifRunProperty */ - ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; -} - -// src/fingerprints.ts -var fs10 = __toESM(require("fs")); -var import_path2 = __toESM(require("path")); - -// node_modules/long/index.js -var wasm = null; -try { - wasm = new WebAssembly.Instance( - new WebAssembly.Module( - new Uint8Array([ - // \0asm - 0, - 97, - 115, - 109, - // version 1 - 1, - 0, - 0, - 0, - // section "type" - 1, - 13, - 2, - // 0, () => i32 - 96, - 0, - 1, - 127, - // 1, (i32, i32, i32, i32) => i32 - 96, - 4, - 127, - 127, - 127, - 127, - 1, - 127, - // section "function" - 3, - 7, - 6, - // 0, type 0 - 0, - // 1, type 1 - 1, - // 2, type 1 - 1, - // 3, type 1 - 1, - // 4, type 1 - 1, - // 5, type 1 - 1, - // section "global" - 6, - 6, - 1, - // 0, "high", mutable i32 - 127, - 1, - 65, - 0, - 11, - // section "export" - 7, - 50, - 6, - // 0, "mul" - 3, - 109, - 117, - 108, - 0, - 1, - // 1, "div_s" - 5, - 100, - 105, - 118, - 95, - 115, - 0, - 2, - // 2, "div_u" - 5, - 100, - 105, - 118, - 95, - 117, - 0, - 3, - // 3, "rem_s" - 5, - 114, - 101, - 109, - 95, - 115, - 0, - 4, - // 4, "rem_u" - 5, - 114, - 101, - 109, - 95, - 117, - 0, - 5, - // 5, "get_high" - 8, - 103, - 101, - 116, - 95, - 104, - 105, - 103, - 104, - 0, - 0, - // section "code" - 10, - 191, - 1, - 6, - // 0, "get_high" - 4, - 0, - 35, - 0, - 11, - // 1, "mul" - 36, - 1, - 1, - 126, - 32, - 0, - 173, - 32, - 1, - 173, - 66, - 32, - 134, - 132, - 32, - 2, - 173, - 32, - 3, - 173, - 66, - 32, - 134, - 132, - 126, - 34, - 4, - 66, - 32, - 135, - 167, - 36, - 0, - 32, - 4, - 167, - 11, - // 2, "div_s" - 36, - 1, - 1, - 126, - 32, - 0, - 173, - 32, - 1, - 173, - 66, - 32, - 134, - 132, - 32, - 2, - 173, - 32, - 3, - 173, - 66, - 32, - 134, - 132, - 127, - 34, - 4, - 66, - 32, - 135, - 167, - 36, - 0, - 32, - 4, - 167, - 11, - // 3, "div_u" - 36, - 1, - 1, - 126, - 32, - 0, - 173, - 32, - 1, - 173, - 66, - 32, - 134, - 132, - 32, - 2, - 173, - 32, - 3, - 173, - 66, - 32, - 134, - 132, - 128, - 34, - 4, - 66, - 32, - 135, - 167, - 36, - 0, - 32, - 4, - 167, - 11, - // 4, "rem_s" - 36, - 1, - 1, - 126, - 32, - 0, - 173, - 32, - 1, - 173, - 66, - 32, - 134, - 132, - 32, - 2, - 173, - 32, - 3, - 173, - 66, - 32, - 134, - 132, - 129, - 34, - 4, - 66, - 32, - 135, - 167, - 36, - 0, - 32, - 4, - 167, - 11, - // 5, "rem_u" - 36, - 1, - 1, - 126, - 32, - 0, - 173, - 32, - 1, - 173, - 66, - 32, - 134, - 132, - 32, - 2, - 173, - 32, - 3, - 173, - 66, - 32, - 134, - 132, - 130, - 34, - 4, - 66, - 32, - 135, - 167, - 36, - 0, - 32, - 4, - 167, - 11 - ]) - ), - {} - ).exports; -} catch { -} -function Long(low, high, unsigned) { - this.low = low | 0; - this.high = high | 0; - this.unsigned = !!unsigned; -} -Long.prototype.__isLong__; -Object.defineProperty(Long.prototype, "__isLong__", { value: true }); -function isLong(obj) { - return (obj && obj["__isLong__"]) === true; -} -function ctz32(value) { - var c = Math.clz32(value & -value); - return value ? 31 - c : c; -} -Long.isLong = isLong; -var INT_CACHE = {}; -var UINT_CACHE = {}; -function fromInt(value, unsigned) { - var obj, cachedObj, cache; - if (unsigned) { - value >>>= 0; - if (cache = 0 <= value && value < 256) { - cachedObj = UINT_CACHE[value]; - if (cachedObj) return cachedObj; - } - obj = fromBits(value, 0, true); - if (cache) UINT_CACHE[value] = obj; - return obj; - } else { - value |= 0; - if (cache = -128 <= value && value < 128) { - cachedObj = INT_CACHE[value]; - if (cachedObj) return cachedObj; - } - obj = fromBits(value, value < 0 ? -1 : 0, false); - if (cache) INT_CACHE[value] = obj; - return obj; - } -} -Long.fromInt = fromInt; -function fromNumber(value, unsigned) { - if (isNaN(value)) return unsigned ? UZERO : ZERO; - if (unsigned) { - if (value < 0) return UZERO; - if (value >= TWO_PWR_64_DBL) return MAX_UNSIGNED_VALUE; - } else { - if (value <= -TWO_PWR_63_DBL) return MIN_VALUE; - if (value + 1 >= TWO_PWR_63_DBL) return MAX_VALUE; - } - if (value < 0) return fromNumber(-value, unsigned).neg(); - return fromBits( - value % TWO_PWR_32_DBL | 0, - value / TWO_PWR_32_DBL | 0, - unsigned - ); -} -Long.fromNumber = fromNumber; -function fromBits(lowBits, highBits, unsigned) { - return new Long(lowBits, highBits, unsigned); -} -Long.fromBits = fromBits; -var pow_dbl = Math.pow; -function fromString(str2, unsigned, radix) { - if (str2.length === 0) throw Error("empty string"); - if (typeof unsigned === "number") { - radix = unsigned; - unsigned = false; - } else { - unsigned = !!unsigned; - } - if (str2 === "NaN" || str2 === "Infinity" || str2 === "+Infinity" || str2 === "-Infinity") - return unsigned ? UZERO : ZERO; - radix = radix || 10; - if (radix < 2 || 36 < radix) throw RangeError("radix"); - var p; - if ((p = str2.indexOf("-")) > 0) throw Error("interior hyphen"); - else if (p === 0) { - return fromString(str2.substring(1), unsigned, radix).neg(); - } - var radixToPower = fromNumber(pow_dbl(radix, 8)); - var result = ZERO; - for (var i = 0; i < str2.length; i += 8) { - var size = Math.min(8, str2.length - i), value = parseInt(str2.substring(i, i + size), radix); - if (size < 8) { - var power = fromNumber(pow_dbl(radix, size)); - result = result.mul(power).add(fromNumber(value)); - } else { - result = result.mul(radixToPower); - result = result.add(fromNumber(value)); - } - } - result.unsigned = unsigned; - return result; -} -Long.fromString = fromString; -function fromValue(val, unsigned) { - if (typeof val === "number") return fromNumber(val, unsigned); - if (typeof val === "string") return fromString(val, unsigned); - return fromBits( - val.low, - val.high, - typeof unsigned === "boolean" ? unsigned : val.unsigned - ); -} -Long.fromValue = fromValue; -var TWO_PWR_16_DBL = 1 << 16; -var TWO_PWR_24_DBL = 1 << 24; -var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL; -var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL; -var TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2; -var TWO_PWR_24 = fromInt(TWO_PWR_24_DBL); -var ZERO = fromInt(0); -Long.ZERO = ZERO; -var UZERO = fromInt(0, true); -Long.UZERO = UZERO; -var ONE = fromInt(1); -Long.ONE = ONE; -var UONE = fromInt(1, true); -Long.UONE = UONE; -var NEG_ONE = fromInt(-1); -Long.NEG_ONE = NEG_ONE; -var MAX_VALUE = fromBits(4294967295 | 0, 2147483647 | 0, false); -Long.MAX_VALUE = MAX_VALUE; -var MAX_UNSIGNED_VALUE = fromBits(4294967295 | 0, 4294967295 | 0, true); -Long.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE; -var MIN_VALUE = fromBits(0, 2147483648 | 0, false); -Long.MIN_VALUE = MIN_VALUE; -var LongPrototype = Long.prototype; -LongPrototype.toInt = function toInt() { - return this.unsigned ? this.low >>> 0 : this.low; -}; -LongPrototype.toNumber = function toNumber() { - if (this.unsigned) - return (this.high >>> 0) * TWO_PWR_32_DBL + (this.low >>> 0); - return this.high * TWO_PWR_32_DBL + (this.low >>> 0); -}; -LongPrototype.toString = function toString2(radix) { - radix = radix || 10; - if (radix < 2 || 36 < radix) throw RangeError("radix"); - if (this.isZero()) return "0"; - if (this.isNegative()) { - if (this.eq(MIN_VALUE)) { - var radixLong = fromNumber(radix), div = this.div(radixLong), rem1 = div.mul(radixLong).sub(this); - return div.toString(radix) + rem1.toInt().toString(radix); - } else return "-" + this.neg().toString(radix); - } - var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned), rem = this; - var result = ""; - while (true) { - var remDiv = rem.div(radixToPower), intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0, digits = intval.toString(radix); - rem = remDiv; - if (rem.isZero()) return digits + result; - else { - while (digits.length < 6) digits = "0" + digits; - result = "" + digits + result; - } - } -}; -LongPrototype.getHighBits = function getHighBits() { - return this.high; -}; -LongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() { - return this.high >>> 0; -}; -LongPrototype.getLowBits = function getLowBits() { - return this.low; -}; -LongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() { - return this.low >>> 0; -}; -LongPrototype.getNumBitsAbs = function getNumBitsAbs() { - if (this.isNegative()) - return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs(); - var val = this.high != 0 ? this.high : this.low; - for (var bit = 31; bit > 0; bit--) if ((val & 1 << bit) != 0) break; - return this.high != 0 ? bit + 33 : bit + 1; -}; -LongPrototype.isSafeInteger = function isSafeInteger() { - var top11Bits = this.high >> 21; - if (!top11Bits) return true; - if (this.unsigned) return false; - return top11Bits === -1 && !(this.low === 0 && this.high === -2097152); -}; -LongPrototype.isZero = function isZero() { - return this.high === 0 && this.low === 0; -}; -LongPrototype.eqz = LongPrototype.isZero; -LongPrototype.isNegative = function isNegative() { - return !this.unsigned && this.high < 0; -}; -LongPrototype.isPositive = function isPositive() { - return this.unsigned || this.high >= 0; -}; -LongPrototype.isOdd = function isOdd() { - return (this.low & 1) === 1; -}; -LongPrototype.isEven = function isEven() { - return (this.low & 1) === 0; -}; -LongPrototype.equals = function equals(other) { - if (!isLong(other)) other = fromValue(other); - if (this.unsigned !== other.unsigned && this.high >>> 31 === 1 && other.high >>> 31 === 1) - return false; - return this.high === other.high && this.low === other.low; -}; -LongPrototype.eq = LongPrototype.equals; -LongPrototype.notEquals = function notEquals(other) { - return !this.eq( - /* validates */ - other - ); -}; -LongPrototype.neq = LongPrototype.notEquals; -LongPrototype.ne = LongPrototype.notEquals; -LongPrototype.lessThan = function lessThan(other) { - return this.comp( - /* validates */ - other - ) < 0; -}; -LongPrototype.lt = LongPrototype.lessThan; -LongPrototype.lessThanOrEqual = function lessThanOrEqual(other) { - return this.comp( - /* validates */ - other - ) <= 0; -}; -LongPrototype.lte = LongPrototype.lessThanOrEqual; -LongPrototype.le = LongPrototype.lessThanOrEqual; -LongPrototype.greaterThan = function greaterThan(other) { - return this.comp( - /* validates */ - other - ) > 0; -}; -LongPrototype.gt = LongPrototype.greaterThan; -LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) { - return this.comp( - /* validates */ - other - ) >= 0; -}; -LongPrototype.gte = LongPrototype.greaterThanOrEqual; -LongPrototype.ge = LongPrototype.greaterThanOrEqual; -LongPrototype.compare = function compare2(other) { - if (!isLong(other)) other = fromValue(other); - if (this.eq(other)) return 0; - var thisNeg = this.isNegative(), otherNeg = other.isNegative(); - if (thisNeg && !otherNeg) return -1; - if (!thisNeg && otherNeg) return 1; - if (!this.unsigned) return this.sub(other).isNegative() ? -1 : 1; - return other.high >>> 0 > this.high >>> 0 || other.high === this.high && other.low >>> 0 > this.low >>> 0 ? -1 : 1; -}; -LongPrototype.comp = LongPrototype.compare; -LongPrototype.negate = function negate() { - if (!this.unsigned && this.eq(MIN_VALUE)) return MIN_VALUE; - return this.not().add(ONE); -}; -LongPrototype.neg = LongPrototype.negate; -LongPrototype.add = function add(addend) { - if (!isLong(addend)) addend = fromValue(addend); - var a48 = this.high >>> 16; - var a32 = this.high & 65535; - var a16 = this.low >>> 16; - var a00 = this.low & 65535; - var b48 = addend.high >>> 16; - var b32 = addend.high & 65535; - var b16 = addend.low >>> 16; - var b00 = addend.low & 65535; - var c48 = 0, c32 = 0, c16 = 0, c00 = 0; - c00 += a00 + b00; - c16 += c00 >>> 16; - c00 &= 65535; - c16 += a16 + b16; - c32 += c16 >>> 16; - c16 &= 65535; - c32 += a32 + b32; - c48 += c32 >>> 16; - c32 &= 65535; - c48 += a48 + b48; - c48 &= 65535; - return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); -}; -LongPrototype.subtract = function subtract(subtrahend) { - if (!isLong(subtrahend)) subtrahend = fromValue(subtrahend); - return this.add(subtrahend.neg()); -}; -LongPrototype.sub = LongPrototype.subtract; -LongPrototype.multiply = function multiply(multiplier) { - if (this.isZero()) return this; - if (!isLong(multiplier)) multiplier = fromValue(multiplier); - if (wasm) { - var low = wasm["mul"](this.low, this.high, multiplier.low, multiplier.high); - return fromBits(low, wasm["get_high"](), this.unsigned); - } - if (multiplier.isZero()) return this.unsigned ? UZERO : ZERO; - if (this.eq(MIN_VALUE)) return multiplier.isOdd() ? MIN_VALUE : ZERO; - if (multiplier.eq(MIN_VALUE)) return this.isOdd() ? MIN_VALUE : ZERO; - if (this.isNegative()) { - if (multiplier.isNegative()) return this.neg().mul(multiplier.neg()); - else return this.neg().mul(multiplier).neg(); - } else if (multiplier.isNegative()) return this.mul(multiplier.neg()).neg(); - if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24)) - return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned); - var a48 = this.high >>> 16; - var a32 = this.high & 65535; - var a16 = this.low >>> 16; - var a00 = this.low & 65535; - var b48 = multiplier.high >>> 16; - var b32 = multiplier.high & 65535; - var b16 = multiplier.low >>> 16; - var b00 = multiplier.low & 65535; - var c48 = 0, c32 = 0, c16 = 0, c00 = 0; - c00 += a00 * b00; - c16 += c00 >>> 16; - c00 &= 65535; - c16 += a16 * b00; - c32 += c16 >>> 16; - c16 &= 65535; - c16 += a00 * b16; - c32 += c16 >>> 16; - c16 &= 65535; - c32 += a32 * b00; - c48 += c32 >>> 16; - c32 &= 65535; - c32 += a16 * b16; - c48 += c32 >>> 16; - c32 &= 65535; - c32 += a00 * b32; - c48 += c32 >>> 16; - c32 &= 65535; - c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48; - c48 &= 65535; - return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); -}; -LongPrototype.mul = LongPrototype.multiply; -LongPrototype.divide = function divide(divisor) { - if (!isLong(divisor)) divisor = fromValue(divisor); - if (divisor.isZero()) throw Error("division by zero"); - if (wasm) { - if (!this.unsigned && this.high === -2147483648 && divisor.low === -1 && divisor.high === -1) { - return this; - } - var low = (this.unsigned ? wasm["div_u"] : wasm["div_s"])( - this.low, - this.high, - divisor.low, - divisor.high - ); - return fromBits(low, wasm["get_high"](), this.unsigned); - } - if (this.isZero()) return this.unsigned ? UZERO : ZERO; - var approx, rem, res; - if (!this.unsigned) { - if (this.eq(MIN_VALUE)) { - if (divisor.eq(ONE) || divisor.eq(NEG_ONE)) - return MIN_VALUE; - else if (divisor.eq(MIN_VALUE)) return ONE; - else { - var halfThis = this.shr(1); - approx = halfThis.div(divisor).shl(1); - if (approx.eq(ZERO)) { - return divisor.isNegative() ? ONE : NEG_ONE; - } else { - rem = this.sub(divisor.mul(approx)); - res = approx.add(rem.div(divisor)); - return res; - } - } - } else if (divisor.eq(MIN_VALUE)) return this.unsigned ? UZERO : ZERO; - if (this.isNegative()) { - if (divisor.isNegative()) return this.neg().div(divisor.neg()); - return this.neg().div(divisor).neg(); - } else if (divisor.isNegative()) return this.div(divisor.neg()).neg(); - res = ZERO; - } else { - if (!divisor.unsigned) divisor = divisor.toUnsigned(); - if (divisor.gt(this)) return UZERO; - if (divisor.gt(this.shru(1))) - return UONE; - res = UZERO; - } - rem = this; - while (rem.gte(divisor)) { - approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber())); - var log2 = Math.ceil(Math.log(approx) / Math.LN2), delta = log2 <= 48 ? 1 : pow_dbl(2, log2 - 48), approxRes = fromNumber(approx), approxRem = approxRes.mul(divisor); - while (approxRem.isNegative() || approxRem.gt(rem)) { - approx -= delta; - approxRes = fromNumber(approx, this.unsigned); - approxRem = approxRes.mul(divisor); + await runCli(cmd, args); } - if (approxRes.isZero()) approxRes = ONE; - res = res.add(approxRes); - rem = rem.sub(approxRem); - } - return res; -}; -LongPrototype.div = LongPrototype.divide; -LongPrototype.modulo = function modulo(divisor) { - if (!isLong(divisor)) divisor = fromValue(divisor); - if (wasm) { - var low = (this.unsigned ? wasm["rem_u"] : wasm["rem_s"])( - this.low, - this.high, - divisor.low, - divisor.high - ); - return fromBits(low, wasm["get_high"](), this.unsigned); - } - return this.sub(this.div(divisor).mul(divisor)); -}; -LongPrototype.mod = LongPrototype.modulo; -LongPrototype.rem = LongPrototype.modulo; -LongPrototype.not = function not() { - return fromBits(~this.low, ~this.high, this.unsigned); -}; -LongPrototype.countLeadingZeros = function countLeadingZeros() { - return this.high ? Math.clz32(this.high) : Math.clz32(this.low) + 32; -}; -LongPrototype.clz = LongPrototype.countLeadingZeros; -LongPrototype.countTrailingZeros = function countTrailingZeros() { - return this.low ? ctz32(this.low) : ctz32(this.high) + 32; -}; -LongPrototype.ctz = LongPrototype.countTrailingZeros; -LongPrototype.and = function and(other) { - if (!isLong(other)) other = fromValue(other); - return fromBits(this.low & other.low, this.high & other.high, this.unsigned); -}; -LongPrototype.or = function or(other) { - if (!isLong(other)) other = fromValue(other); - return fromBits(this.low | other.low, this.high | other.high, this.unsigned); -}; -LongPrototype.xor = function xor(other) { - if (!isLong(other)) other = fromValue(other); - return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned); -}; -LongPrototype.shiftLeft = function shiftLeft(numBits) { - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - else if (numBits < 32) - return fromBits( - this.low << numBits, - this.high << numBits | this.low >>> 32 - numBits, - this.unsigned - ); - else return fromBits(0, this.low << numBits - 32, this.unsigned); -}; -LongPrototype.shl = LongPrototype.shiftLeft; -LongPrototype.shiftRight = function shiftRight(numBits) { - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - else if (numBits < 32) - return fromBits( - this.low >>> numBits | this.high << 32 - numBits, - this.high >> numBits, - this.unsigned - ); - else - return fromBits( - this.high >> numBits - 32, - this.high >= 0 ? 0 : -1, - this.unsigned - ); -}; -LongPrototype.shr = LongPrototype.shiftRight; -LongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) { - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - if (numBits < 32) - return fromBits( - this.low >>> numBits | this.high << 32 - numBits, - this.high >>> numBits, - this.unsigned - ); - if (numBits === 32) return fromBits(this.high, 0, this.unsigned); - return fromBits(this.high >>> numBits - 32, 0, this.unsigned); -}; -LongPrototype.shru = LongPrototype.shiftRightUnsigned; -LongPrototype.shr_u = LongPrototype.shiftRightUnsigned; -LongPrototype.rotateLeft = function rotateLeft(numBits) { - var b; - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); - if (numBits < 32) { - b = 32 - numBits; - return fromBits( - this.low << numBits | this.high >>> b, - this.high << numBits | this.low >>> b, - this.unsigned + }; + if (checkVersion && !await codeQlVersionAtLeast(codeql, CODEQL_MINIMUM_VERSION)) { + throw new ConfigurationError( + `Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${(await codeql.getVersion()).version}` ); - } - numBits -= 32; - b = 32 - numBits; - return fromBits( - this.high << numBits | this.low >>> b, - this.low << numBits | this.high >>> b, - this.unsigned - ); -}; -LongPrototype.rotl = LongPrototype.rotateLeft; -LongPrototype.rotateRight = function rotateRight(numBits) { - var b; - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); - if (numBits < 32) { - b = 32 - numBits; - return fromBits( - this.high << b | this.low >>> numBits, - this.low << b | this.high >>> numBits, - this.unsigned + } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { + const result = await codeql.getVersion(); + core10.warning( + `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. + +Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } - numBits -= 32; - b = 32 - numBits; - return fromBits( - this.low << b | this.high >>> numBits, - this.high << b | this.low >>> numBits, - this.unsigned - ); -}; -LongPrototype.rotr = LongPrototype.rotateRight; -LongPrototype.toSigned = function toSigned() { - if (!this.unsigned) return this; - return fromBits(this.low, this.high, false); -}; -LongPrototype.toUnsigned = function toUnsigned() { - if (this.unsigned) return this; - return fromBits(this.low, this.high, true); -}; -LongPrototype.toBytes = function toBytes(le) { - return le ? this.toBytesLE() : this.toBytesBE(); -}; -LongPrototype.toBytesLE = function toBytesLE() { - var hi = this.high, lo = this.low; - return [ - lo & 255, - lo >>> 8 & 255, - lo >>> 16 & 255, - lo >>> 24, - hi & 255, - hi >>> 8 & 255, - hi >>> 16 & 255, - hi >>> 24 - ]; -}; -LongPrototype.toBytesBE = function toBytesBE() { - var hi = this.high, lo = this.low; - return [ - hi >>> 24, - hi >>> 16 & 255, - hi >>> 8 & 255, - hi & 255, - lo >>> 24, - lo >>> 16 & 255, - lo >>> 8 & 255, - lo & 255 - ]; -}; -Long.fromBytes = function fromBytes(bytes, unsigned, le) { - return le ? Long.fromBytesLE(bytes, unsigned) : Long.fromBytesBE(bytes, unsigned); -}; -Long.fromBytesLE = function fromBytesLE(bytes, unsigned) { - return new Long( - bytes[0] | bytes[1] << 8 | bytes[2] << 16 | bytes[3] << 24, - bytes[4] | bytes[5] << 8 | bytes[6] << 16 | bytes[7] << 24, - unsigned - ); -}; -Long.fromBytesBE = function fromBytesBE(bytes, unsigned) { - return new Long( - bytes[4] << 24 | bytes[5] << 16 | bytes[6] << 8 | bytes[7], - bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], - unsigned + return codeql; +} +function getExtraOptionsFromEnv(paths, { ignoringOptions } = {}) { + const options = getExtraOptionsEnvParam(); + return getExtraOptions(options, paths, []).filter( + (option) => !ignoringOptions?.includes(option) ); -}; -if (typeof BigInt === "function") { - Long.fromBigInt = function fromBigInt(value, unsigned) { - var lowBits = Number(BigInt.asIntN(32, value)); - var highBits = Number(BigInt.asIntN(32, value >> BigInt(32))); - return fromBits(lowBits, highBits, unsigned); - }; - Long.fromValue = function fromValueWithBigInt(value, unsigned) { - if (typeof value === "bigint") return Long.fromBigInt(value, unsigned); - return fromValue(value, unsigned); - }; - LongPrototype.toBigInt = function toBigInt() { - var lowBigInt = BigInt(this.low >>> 0); - var highBigInt = BigInt(this.unsigned ? this.high >>> 0 : this.high); - return highBigInt << BigInt(32) | lowBigInt; - }; } -var long_default = Long; - -// src/fingerprints.ts -var tab = " ".charCodeAt(0); -var space = " ".charCodeAt(0); -var lf = "\n".charCodeAt(0); -var cr = "\r".charCodeAt(0); -var EOF = 65535; -var BLOCK_SIZE = 100; -var MOD = long_default.fromInt(37); -function computeFirstMod() { - let firstMod = long_default.ONE; - for (let i = 0; i < BLOCK_SIZE; i++) { - firstMod = firstMod.multiply(MOD); +function asExtraOptions(options, pathInfo) { + if (options === void 0) { + return []; } - return firstMod; -} -async function hash(callback, filepath) { - const window2 = Array(BLOCK_SIZE).fill(0); - const lineNumbers = Array(BLOCK_SIZE).fill(-1); - let hashRaw = long_default.ZERO; - const firstMod = computeFirstMod(); - let index = 0; - let lineNumber = 0; - let lineStart = true; - let prevCR = false; - const hashCounts = {}; - const outputHash = function() { - const hashValue = hashRaw.toUnsigned().toString(16); - if (!hashCounts[hashValue]) { - hashCounts[hashValue] = 0; - } - hashCounts[hashValue]++; - callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`); - lineNumbers[index] = -1; - }; - const updateHash = function(current) { - const begin = window2[index]; - window2[index] = current; - hashRaw = MOD.multiply(hashRaw).add(long_default.fromInt(current)).subtract(firstMod.multiply(long_default.fromInt(begin))); - index = (index + 1) % BLOCK_SIZE; - }; - const processCharacter = function(current) { - if (current === space || current === tab || prevCR && current === lf) { - prevCR = false; - return; - } - if (current === cr) { - current = lf; - prevCR = true; - } else { - prevCR = false; - } - if (lineNumbers[index] !== -1) { - outputHash(); - } - if (lineStart) { - lineStart = false; - lineNumber++; - lineNumbers[index] = lineNumber; - } - if (current === lf) { - lineStart = true; - } - updateHash(current); - }; - const readStream = fs10.createReadStream(filepath, "utf8"); - for await (const data of readStream) { - for (let i = 0; i < data.length; ++i) { - processCharacter(data.charCodeAt(i)); - } + if (!Array.isArray(options)) { + const msg = `The extra options for '${pathInfo.join( + "." + )}' ('${JSON.stringify(options)}') are not in an array.`; + throw new Error(msg); } - processCharacter(EOF); - for (let i = 0; i < BLOCK_SIZE; i++) { - if (lineNumbers[index] !== -1) { - outputHash(); + return options.map((o) => { + const t = typeof o; + if (t !== "string" && t !== "number" && t !== "boolean") { + const msg = `The extra option for '${pathInfo.join( + "." + )}' ('${JSON.stringify(o)}') is not a primitive value.`; + throw new Error(msg); } - updateHash(0); - } + return `${o}`; + }); } -function locationUpdateCallback(result, location, logger) { - let locationStartLine = location.physicalLocation?.region?.startLine; - if (locationStartLine === void 0) { - locationStartLine = 1; - } - return function(lineNumber, hashValue) { - if (locationStartLine !== lineNumber) { - return; - } - if (!result.partialFingerprints) { - result.partialFingerprints = {}; - } - const existingFingerprint = result.partialFingerprints.primaryLocationLineHash; - if (!existingFingerprint) { - result.partialFingerprints.primaryLocationLineHash = hashValue; - } else if (existingFingerprint !== hashValue) { - logger.warning( - `Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}` - ); - } - }; +function getExtraOptions(options, paths, pathInfo) { + const all = asExtraOptions(options?.["*"], pathInfo.concat("*")); + const specific = paths.length === 0 ? asExtraOptions(options, pathInfo) : getExtraOptions( + options?.[paths[0]], + paths?.slice(1), + pathInfo.concat(paths[0]) + ); + return all.concat(specific); } -function resolveUriToFile(location, artifacts, sourceRoot, logger) { - if (!location.uri && location.index !== void 0) { - if (typeof location.index !== "number" || location.index < 0 || location.index >= artifacts.length || typeof artifacts[location.index].location !== "object") { - logger.debug(`Ignoring location as index "${location.index}" is invalid`); - return void 0; - } - location = artifacts[location.index].location; - } - if (typeof location.uri !== "string") { - logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); - return void 0; - } - let uri; +async function runCli(cmd, args = [], opts = {}) { try { - uri = decodeURIComponent(location.uri); - } catch { - logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); - return void 0; - } - const fileUriPrefix = "file://"; - if (uri.startsWith(fileUriPrefix)) { - uri = uri.substring(fileUriPrefix.length); - } - if (uri.indexOf("://") !== -1) { - logger.debug( - `Ignoring location URI "${uri}" as the scheme is not recognised` - ); - return void 0; - } - const srcRootPrefix = `${sourceRoot}/`; - if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) { - logger.debug( - `Ignoring location URI "${uri}" as it is outside of the src root` - ); - return void 0; - } - if (!import_path2.default.isAbsolute(uri)) { - uri = srcRootPrefix + uri; - } - if (!fs10.existsSync(uri)) { - logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); - return void 0; - } - if (fs10.statSync(uri).isDirectory()) { - logger.debug(`Unable to compute fingerprint for directory: ${uri}`); - return void 0; + return await runTool(cmd, args, opts); + } catch (e) { + if (e instanceof CommandInvocationError) { + throw wrapCliConfigurationError(new CliError(e)); + } + throw e; } - return uri; } -async function addFingerprints(sarif, sourceRoot, logger) { +async function writeCodeScanningConfigFile(config, logger) { + const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config); + const augmentedConfig = appendExtraQueryExclusions( + config.extraQueryExclusions, + config.computedConfig + ); logger.info( - `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/enterprise-cloud@latest/code-security/code-scanning/integrating-with-code-scanning/sarif-support-for-code-scanning#providing-data-to-track-code-scanning-alerts-across-runs" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` + `Writing augmented user configuration file to ${codeScanningConfigFile}` ); - const callbacksByFile = {}; - for (const run of sarif.runs || []) { - const artifacts = run.artifacts || []; - for (const result of run.results || []) { - const primaryLocation = (result.locations || [])[0]; - if (!primaryLocation?.physicalLocation?.artifactLocation) { - logger.debug( - `Unable to compute fingerprint for invalid location: ${JSON.stringify( - primaryLocation - )}` - ); - continue; - } - if (primaryLocation?.physicalLocation?.region?.startLine === void 0) { - continue; - } - const filepath = resolveUriToFile( - primaryLocation.physicalLocation.artifactLocation, - artifacts, - sourceRoot, - logger - ); - if (!filepath) { - continue; - } - if (!callbacksByFile[filepath]) { - callbacksByFile[filepath] = []; - } - callbacksByFile[filepath].push( - locationUpdateCallback(result, primaryLocation, logger) - ); - } - } - for (const [filepath, callbacks] of Object.entries(callbacksByFile)) { - const teeCallback = function(lineNumber, hashValue) { - for (const c of Object.values(callbacks)) { - c(lineNumber, hashValue); - } - }; - await hash(teeCallback, filepath); - } - return sarif; + logger.startGroup("Augmented user configuration file contents"); + logger.info(dump(augmentedConfig)); + logger.endGroup(); + fs9.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + return codeScanningConfigFile; +} +var TRAP_CACHE_SIZE_MB = 1024; +async function getTrapCachingExtractorConfigArgs(config) { + const result = []; + for (const language of config.languages) + result.push( + await getTrapCachingExtractorConfigArgsForLang(config, language) + ); + return result.flat(); +} +async function getTrapCachingExtractorConfigArgsForLang(config, language) { + const cacheDir = config.trapCaches[language]; + if (cacheDir === void 0) return []; + const write = await isAnalyzingDefaultBranch(); + return [ + `-O=${language}.trap.cache.dir=${cacheDir}`, + `-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`, + `-O=${language}.trap.cache.write=${write}` + ]; +} +function getGeneratedCodeScanningConfigPath(config) { + return path9.resolve(config.tempDir, "user-config.yaml"); +} +function getExtractionVerbosityArguments(enableDebugLogging) { + return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; +} +function applyAutobuildAzurePipelinesTimeoutFix() { + const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || ""; + process.env["JAVA_TOOL_OPTIONS"] = [ + ...javaToolOptions.split(/\s+/), + "-Dhttp.keepAlive=false", + "-Dmaven.wagon.http.pool=false" + ].join(" "); +} +async function getJobRunUuidSarifOptions(codeql) { + const jobRunUuid = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */]; + return jobRunUuid && await codeql.supportsFeature( + "databaseInterpretResultsSupportsSarifRunProperty" /* DatabaseInterpretResultsSupportsSarifRunProperty */ + ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } // src/init.ts -var toolrunner4 = __toESM(require_toolrunner()); -var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -110150,7 +110116,7 @@ function combineSarifFiles(sarifFiles, logger) { for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); const sarifObject = JSON.parse( - fs11.readFileSync(sarifFile, "utf8") + fs10.readFileSync(sarifFile, "utf8") ); if (combinedSarif.version === null) { combinedSarif.version = sarifObject.version; @@ -110244,10 +110210,10 @@ async function minimalInitCodeQL(logger, gitHubVersion, features) { ); return initCodeQLResult.codeql; } -async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { +async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, _features, logger, getCodeQL, tempDir) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs11.readFileSync(sarifFile, "utf8")); + return JSON.parse(fs10.readFileSync(sarifFile, "utf8")); }); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; @@ -110267,23 +110233,15 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo } return combineSarifFiles(sarifFiles, logger); } - let codeQL; - let tempDir = getTemporaryDirectory(); - const config = await getConfig(tempDir, logger); - if (config !== void 0) { - codeQL = await getCodeQL(config.codeQLCmd); - tempDir = config.tempDir; - } else { - codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); - } - const baseTempDir = path11.resolve(tempDir, "combined-sarif"); - fs11.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs11.mkdtempSync(path11.resolve(baseTempDir, "output-")); - const outputFile = path11.resolve(outputDirectory, "combined-sarif.sarif"); + const codeQL = await getCodeQL(); + const baseTempDir = path10.resolve(tempDir, "combined-sarif"); + fs10.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs10.mkdtempSync(path10.resolve(baseTempDir, "output-")); + const outputFile = path10.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs11.readFileSync(outputFile, "utf8")); + return JSON.parse(fs10.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); @@ -110312,7 +110270,7 @@ function getAutomationID2(category, analysis_key, environment) { async function uploadPayload(payload, repositoryNwo, logger, analysis) { logger.info("Uploading results"); if (shouldSkipSarifUpload()) { - const payloadSaveFile = path11.join( + const payloadSaveFile = path10.join( getTemporaryDirectory(), `payload-${analysis.kind}.json` ); @@ -110320,7 +110278,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs11.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs10.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -110354,12 +110312,12 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs11.readdirSync(dir, { withFileTypes: true }); + const entries = fs10.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { - sarifFiles.push(path11.resolve(dir, entry.name)); + sarifFiles.push(path10.resolve(dir, entry.name)); } else if (entry.isDirectory()) { - walkSarifFiles(path11.resolve(dir, entry.name)); + walkSarifFiles(path10.resolve(dir, entry.name)); } } }; @@ -110367,11 +110325,11 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } function getSarifFilePaths(sarifPath, isSarif) { - if (!fs11.existsSync(sarifPath)) { + if (!fs10.existsSync(sarifPath)) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } let sarifFiles; - if (fs11.lstatSync(sarifPath).isDirectory()) { + if (fs10.lstatSync(sarifPath).isDirectory()) { sarifFiles = findSarifFilesInDir(sarifPath, isSarif); if (sarifFiles.length === 0) { throw new ConfigurationError( @@ -110384,7 +110342,7 @@ function getSarifFilePaths(sarifPath, isSarif) { return sarifFiles; } async function getGroupedSarifFilePaths(logger, sarifPath) { - const stats = fs11.statSync(sarifPath, { throwIfNoEntry: false }); + const stats = fs10.statSync(sarifPath, { throwIfNoEntry: false }); if (stats === void 0) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } @@ -110392,7 +110350,7 @@ async function getGroupedSarifFilePaths(logger, sarifPath) { if (stats.isDirectory()) { let unassignedSarifFiles = findSarifFilesInDir( sarifPath, - (name) => path11.extname(name) === ".sarif" + (name) => path10.extname(name) === ".sarif" ); logger.debug( `Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}` @@ -110449,7 +110407,7 @@ function countResultsInSarif(sarif) { } function readSarifFile(sarifFilePath) { try { - return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8")); + return JSON.parse(fs10.readFileSync(sarifFilePath, "utf8")); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` @@ -110518,7 +110476,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs11.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs10.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -110526,7 +110484,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { +async function postProcessSarifFiles(logger, features, getCodeQL, tempPath, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; @@ -110540,7 +110498,9 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, sarifPaths, gitHubVersion, features, - logger + logger, + getCodeQL, + tempPath ); } else { const sarifPath = sarifPaths[0]; @@ -110573,12 +110533,14 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc logger.debug(`Not writing post-processed SARIF files.`); } } -async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { +async function uploadFiles(tempDir, codeql, inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { const sarifPaths = getSarifFilePaths( inputSarifPath, uploadTarget.sarifPredicate ); return uploadSpecifiedFiles( + tempDir, + codeql, sarifPaths, checkoutPath, category, @@ -110587,10 +110549,12 @@ async function uploadFiles(inputSarifPath, checkoutPath, category, features, log uploadTarget ); } -async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) { +async function uploadSpecifiedFiles(tempDir, codeql, sarifPaths, checkoutPath, category, features, logger, uploadTarget) { const processingResults = await postProcessSarifFiles( logger, features, + async () => codeql, + tempDir, checkoutPath, sarifPaths, category, @@ -110652,19 +110616,19 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post }; } function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { - if (!fs11.existsSync(outputDir)) { - fs11.mkdirSync(outputDir, { recursive: true }); - } else if (!fs11.lstatSync(outputDir).isDirectory()) { + if (!fs10.existsSync(outputDir)) { + fs10.mkdirSync(outputDir, { recursive: true }); + } else if (!fs10.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); } - const outputFile = path11.resolve( + const outputFile = path10.resolve( outputDir, `upload${uploadTarget.sarifExtension}` ); logger.info(`Writing processed SARIF file to ${outputFile}`); - fs11.writeFileSync(outputFile, sarifPayload); + fs10.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3; @@ -110807,7 +110771,7 @@ function filterAlertsByDiffRange(logger, sarif) { if (!locationUri || locationStartLine === void 0) { return false; } - const locationPath = path11.join(checkoutPath, locationUri).replaceAll(path11.sep, "/"); + const locationPath = path10.join(checkoutPath, locationUri).replaceAll(path10.sep, "/"); return diffRanges.some( (range) => range.path === locationPath && (range.startLine <= locationStartLine && range.endLine >= locationStartLine || range.startLine === 0 && range.endLine === 0) ); diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index f4635e27da..2190120dce 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -47409,1245 +47409,1306 @@ var require_light = __commonJS({ } }); -// node_modules/@actions/glob/lib/internal-glob-options-helper.js -var require_internal_glob_options_helper = __commonJS({ - "node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports2) { +// node_modules/jsonschema/lib/helpers.js +var require_helpers = __commonJS({ + "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + var uri = require("url"); + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path13, name, argument) { + if (Array.isArray(path13)) { + this.path = path13; + this.property = path13.reduce(function(sum, item) { + return sum + makeSuffix(item); + }, "instance"); + } else if (path13 !== void 0) { + this.property = path13; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getOptions = getOptions; - var core14 = __importStar2(require_core()); - function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - matchDirectories: true, - omitBrokenSymbolicLinks: true, - excludeHiddenFiles: false - }; - if (copy) { - if (typeof copy.followSymbolicLinks === "boolean") { - result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === "boolean") { - result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.matchDirectories === "boolean") { - result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === "boolean") { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } - if (typeof copy.excludeHiddenFiles === "boolean") { - result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); - } + if (message) { + this.message = message; } - return result; + if (schema2) { + var id = schema2.$id || schema2.id; + this.schema = id || schema2; + } + if (instance !== void 0) { + this.instance = instance; + } + this.name = name; + this.argument = argument; + this.stack = this.toString(); + }; + ValidationError.prototype.toString = function toString3() { + return this.property + " " + this.message; + }; + var ValidatorResult = exports2.ValidatorResult = function ValidatorResult2(instance, schema2, options, ctx) { + this.instance = instance; + this.schema = schema2; + this.options = options; + this.path = ctx.path; + this.propertyPath = ctx.propertyPath; + this.errors = []; + this.throwError = options && options.throwError; + this.throwFirst = options && options.throwFirst; + this.throwAll = options && options.throwAll; + this.disableFormat = options && options.disableFormat === true; + }; + ValidatorResult.prototype.addError = function addError(detail) { + var err; + if (typeof detail == "string") { + err = new ValidationError(detail, this.instance, this.schema, this.path); + } else { + if (!detail) throw new Error("Missing error detail"); + if (!detail.message) throw new Error("Missing error message"); + if (!detail.name) throw new Error("Missing validator type"); + err = new ValidationError(detail.message, this.instance, this.schema, this.path, detail.name, detail.argument); + } + this.errors.push(err); + if (this.throwFirst) { + throw new ValidatorResultError(this); + } else if (this.throwError) { + throw err; + } + return err; + }; + ValidatorResult.prototype.importErrors = function importErrors(res) { + if (typeof res == "string" || res && res.validatorType) { + this.addError(res); + } else if (res && res.errors) { + this.errors = this.errors.concat(res.errors); + } + }; + function stringizer(v, i) { + return i + ": " + v.toString() + "\n"; } - } -}); - -// node_modules/@actions/glob/lib/internal-path-helper.js -var require_internal_path_helper = __commonJS({ - "node_modules/@actions/glob/lib/internal-path-helper.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + ValidatorResult.prototype.toString = function toString3(res) { + return this.errors.map(stringizer).join(""); + }; + Object.defineProperty(ValidatorResult.prototype, "valid", { get: function() { + return !this.errors.length; + } }); + module2.exports.ValidatorResultError = ValidatorResultError; + function ValidatorResultError(result) { + if (Error.captureStackTrace) { + Error.captureStackTrace(this, ValidatorResultError); } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + this.instance = result.instance; + this.schema = result.schema; + this.options = result.options; + this.errors = result.errors; + } + ValidatorResultError.prototype = new Error(); + ValidatorResultError.prototype.constructor = ValidatorResultError; + ValidatorResultError.prototype.name = "Validation Error"; + var SchemaError = exports2.SchemaError = function SchemaError2(msg, schema2) { + this.message = msg; + this.schema = schema2; + Error.call(this, msg); + Error.captureStackTrace(this, SchemaError2); + }; + SchemaError.prototype = Object.create( + Error.prototype, + { + constructor: { value: SchemaError, enumerable: false }, + name: { value: "SchemaError", enumerable: false } + } + ); + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path13, base, schemas) { + this.schema = schema2; + this.options = options; + if (Array.isArray(path13)) { + this.path = path13; + this.propertyPath = path13.reduce(function(sum, item) { + return sum + makeSuffix(item); + }, "instance"); + } else { + this.propertyPath = path13; + } + this.base = base; + this.schemas = schemas; + }; + SchemaContext.prototype.resolve = function resolve6(target) { + return uri.resolve(this.base, target); + }; + SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { + var path13 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var id = schema2.$id || schema2.id; + var base = uri.resolve(this.base, id || ""); + var ctx = new SchemaContext(schema2, this.options, path13, base, Object.create(this.schemas)); + if (id && !ctx.schemas[base]) { + ctx.schemas[base] = schema2; + } + return ctx; + }; + var FORMAT_REGEXPS = exports2.FORMAT_REGEXPS = { + // 7.3.1. Dates, Times, and Duration + "date-time": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\.\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/, + "date": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/, + "time": /^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/, + "duration": /P(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S)|\d+(D|M(\d+D)?|Y(\d+M(\d+D)?)?)(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S))?|\d+W)/i, + // 7.3.2. Email Addresses + // TODO: fix the email production + "email": /^(?:[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+\.)*[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!\.)){0,61}[a-zA-Z0-9]?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\[(?:(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\.){3}(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\]))$/, + "idn-email": /^("(?:[!#-\[\]-\u{10FFFF}]|\\[\t -\u{10FFFF}])*"|[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*)@([!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*|\[[!-Z\^-\u{10FFFF}]*\])$/u, + // 7.3.3. Hostnames + // 7.3.4. IP Addresses + "ip-address": /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/, + // FIXME whitespace is invalid + "ipv6": /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/, + // 7.3.5. Resource Identifiers + // TODO: A more accurate regular expression for "uri" goes: + // [A-Za-z][+\-.0-9A-Za-z]*:((/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?)?#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])|/?%[0-9A-Fa-f]{2}|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*(#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?)? + "uri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, + "uri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/, + "iri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, + "iri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~-\u{10FFFF}]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~-\u{10FFFF}])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/u, + "uuid": /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + // 7.3.6. uri-template + "uri-template": /(%[0-9a-f]{2}|[!#$&(-;=?@\[\]_a-z~]|\{[!#&+,./;=?@|]?(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?(,(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?)*\})*/iu, + // 7.3.7. JSON Pointers + "json-pointer": /^(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*$/iu, + "relative-json-pointer": /^\d+(#|(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*)$/iu, + // hostname regex from: http://stackoverflow.com/a/1420225/5628 + "hostname": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, + "host-name": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, + "utc-millisec": function(input) { + return typeof input === "string" && parseFloat(input) === parseInt(input, 10) && !isNaN(input); + }, + // 7.3.8. regex + "regex": function(input) { + var result = true; + try { + new RegExp(input); + } catch (e) { + result = false; } - __setModuleDefault2(result, mod); return result; - }; - })(); - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; + }, + // Other definitions + // "style" was removed from JSON Schema in draft-4 and is deprecated + "style": /[\r\n\t ]*[^\r\n\t ][^:]*:[\r\n\t ]*[^\r\n\t ;]*[\r\n\t ]*;?/, + // "color" was removed from JSON Schema in draft-4 and is deprecated + "color": /^(#?([0-9A-Fa-f]{3}){1,2}\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\)))$/, + "phone": /^\+(?:[0-9] ?){6,14}[0-9]$/, + "alpha": /^[a-zA-Z]+$/, + "alphanumeric": /^[a-zA-Z0-9]+$/ }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.dirname = dirname3; - exports2.ensureAbsoluteRoot = ensureAbsoluteRoot; - exports2.hasAbsoluteRoot = hasAbsoluteRoot; - exports2.hasRoot = hasRoot; - exports2.normalizeSeparators = normalizeSeparators; - exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path13 = __importStar2(require("path")); - var assert_1 = __importDefault2(require("assert")); - var IS_WINDOWS = process.platform === "win32"; - function dirname3(p) { - p = safeTrimTrailingSeparator(p); - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; + FORMAT_REGEXPS.regexp = FORMAT_REGEXPS.regex; + FORMAT_REGEXPS.pattern = FORMAT_REGEXPS.regex; + FORMAT_REGEXPS.ipv4 = FORMAT_REGEXPS["ip-address"]; + exports2.isFormat = function isFormat(input, format, validator) { + if (typeof input === "string" && FORMAT_REGEXPS[format] !== void 0) { + if (FORMAT_REGEXPS[format] instanceof RegExp) { + return FORMAT_REGEXPS[format].test(input); + } + if (typeof FORMAT_REGEXPS[format] === "function") { + return FORMAT_REGEXPS[format](input); + } + } else if (validator && validator.customFormats && typeof validator.customFormats[format] === "function") { + return validator.customFormats[format](input); } - let result = path13.dirname(p); - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); + return true; + }; + var makeSuffix = exports2.makeSuffix = function makeSuffix2(key) { + key = key.toString(); + if (!key.match(/[.\s\[\]]/) && !key.match(/^[\d]/)) { + return "." + key; } - return result; - } - function ensureAbsoluteRoot(root, itemPath) { - (0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - (0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - if (hasAbsoluteRoot(itemPath)) { - return itemPath; + if (key.match(/^\d+$/)) { + return "[" + key + "]"; } - if (IS_WINDOWS) { - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - if (itemPath.length === 2) { - return `${itemPath[0]}:\\${cwd.substr(3)}`; - } else { - if (!cwd.endsWith("\\")) { - cwd += "\\"; - } - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; - } - } else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; - } - } else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; + return "[" + JSON.stringify(key) + "]"; + }; + exports2.deepCompareStrict = function deepCompareStrict(a, b) { + if (typeof a !== typeof b) { + return false; + } + if (Array.isArray(a)) { + if (!Array.isArray(b)) { + return false; } + if (a.length !== b.length) { + return false; + } + return a.every(function(v, i) { + return deepCompareStrict(a[i], b[i]); + }); } - (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { + if (typeof a === "object") { + if (!a || !b) { + return a === b; + } + var aKeys = Object.keys(a); + var bKeys = Object.keys(b); + if (aKeys.length !== bKeys.length) { + return false; + } + return aKeys.every(function(v) { + return deepCompareStrict(a[v], b[v]); + }); + } + return a === b; + }; + function deepMerger(target, dst, e, i) { + if (typeof e === "object") { + dst[i] = deepMerge(target[i], e); } else { - root += path13.sep; + if (target.indexOf(e) === -1) { + dst.push(e); + } } - return root + itemPath; } - function hasAbsoluteRoot(itemPath) { - (0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - itemPath = normalizeSeparators(itemPath); - if (IS_WINDOWS) { - return itemPath.startsWith("\\\\") || /^[A-Z]:\\/i.test(itemPath); - } - return itemPath.startsWith("/"); + function copyist(src, dst, key) { + dst[key] = src[key]; } - function hasRoot(itemPath) { - (0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`); - itemPath = normalizeSeparators(itemPath); - if (IS_WINDOWS) { - return itemPath.startsWith("\\") || /^[A-Z]:/i.test(itemPath); + function copyistWithDeepMerge(target, src, dst, key) { + if (typeof src[key] !== "object" || !src[key]) { + dst[key] = src[key]; + } else { + if (!target[key]) { + dst[key] = src[key]; + } else { + dst[key] = deepMerge(target[key], src[key]); + } } - return itemPath.startsWith("/"); } - function normalizeSeparators(p) { - p = p || ""; - if (IS_WINDOWS) { - p = p.replace(/\//g, "\\"); - const isUnc = /^\\\\+[^\\]/.test(p); - return (isUnc ? "\\" : "") + p.replace(/\\\\+/g, "\\"); + function deepMerge(target, src) { + var array = Array.isArray(src); + var dst = array && [] || {}; + if (array) { + target = target || []; + dst = dst.concat(target); + src.forEach(deepMerger.bind(null, target, dst)); + } else { + if (target && typeof target === "object") { + Object.keys(target).forEach(copyist.bind(null, target, dst)); + } + Object.keys(src).forEach(copyistWithDeepMerge.bind(null, target, src, dst)); } - return p.replace(/\/\/+/g, "/"); + return dst; } - function safeTrimTrailingSeparator(p) { - if (!p) { - return ""; + module2.exports.deepMerge = deepMerge; + exports2.objectGetPath = function objectGetPath(o, s) { + var parts = s.split("/").slice(1); + var k; + while (typeof (k = parts.shift()) == "string") { + var n = decodeURIComponent(k.replace(/~0/, "~").replace(/~1/g, "/")); + if (!(n in o)) return; + o = o[n]; } - p = normalizeSeparators(p); - if (!p.endsWith(path13.sep)) { - return p; + return o; + }; + function pathEncoder(v) { + return "/" + encodeURIComponent(v).replace(/~/g, "%7E"); + } + exports2.encodePath = function encodePointer(a) { + return a.map(pathEncoder).join(""); + }; + exports2.getDecimalPlaces = function getDecimalPlaces(number) { + var decimalPlaces = 0; + if (isNaN(number)) return decimalPlaces; + if (typeof number !== "number") { + number = Number(number); } - if (p === path13.sep) { - return p; + var parts = number.toString().split("e"); + if (parts.length === 2) { + if (parts[1][0] !== "-") { + return decimalPlaces; + } else { + decimalPlaces = Number(parts[1].slice(1)); + } } - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; + var decimalParts = parts[0].split("."); + if (decimalParts.length === 2) { + decimalPlaces += decimalParts[1].length; } - return p.substr(0, p.length - 1); - } - } -}); - -// node_modules/@actions/glob/lib/internal-match-kind.js -var require_internal_match_kind = __commonJS({ - "node_modules/@actions/glob/lib/internal-match-kind.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MatchKind = void 0; - var MatchKind; - (function(MatchKind2) { - MatchKind2[MatchKind2["None"] = 0] = "None"; - MatchKind2[MatchKind2["Directory"] = 1] = "Directory"; - MatchKind2[MatchKind2["File"] = 2] = "File"; - MatchKind2[MatchKind2["All"] = 3] = "All"; - })(MatchKind || (exports2.MatchKind = MatchKind = {})); + return decimalPlaces; + }; + exports2.isSchema = function isSchema(val) { + return typeof val === "object" && val || typeof val === "boolean"; + }; } }); -// node_modules/@actions/glob/lib/internal-pattern-helper.js -var require_internal_pattern_helper = __commonJS({ - "node_modules/@actions/glob/lib/internal-pattern-helper.js"(exports2) { +// node_modules/jsonschema/lib/attribute.js +var require_attribute = __commonJS({ + "node_modules/jsonschema/lib/attribute.js"(exports2, module2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getSearchPaths = getSearchPaths; - exports2.match = match; - exports2.partialMatch = partialMatch; - var pathHelper = __importStar2(require_internal_path_helper()); - var internal_match_kind_1 = require_internal_match_kind(); - var IS_WINDOWS = process.platform === "win32"; - function getSearchPaths(patterns) { - patterns = patterns.filter((x) => !x.negate); - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; - searchPathMap[key] = "candidate"; + var helpers = require_helpers(); + var ValidatorResult = helpers.ValidatorResult; + var SchemaError = helpers.SchemaError; + var attribute = {}; + attribute.ignoreProperties = { + // informative properties + "id": true, + "default": true, + "description": true, + "title": true, + // arguments to other properties + "additionalItems": true, + "then": true, + "else": true, + // special-handled properties + "$schema": true, + "$ref": true, + "extends": true + }; + var validators = attribute.validators = {}; + validators.type = function validateType(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - const result = []; - for (const pattern of patterns) { - const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; - if (searchPathMap[key] === "included") { - continue; - } - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; - } - tempKey = parent; - parent = pathHelper.dirname(tempKey); - } - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = "included"; - } + var result = new ValidatorResult(instance, schema2, options, ctx); + var types = Array.isArray(schema2.type) ? schema2.type : [schema2.type]; + if (!types.some(this.testType.bind(this, instance, schema2, options, ctx))) { + var list = types.map(function(v) { + if (!v) return; + var id = v.$id || v.id; + return id ? "<" + id + ">" : v + ""; + }); + result.addError({ + name: "type", + argument: list, + message: "is not of a type(s) " + list + }); } return result; - } - function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); - } else { - result |= pattern.match(itemPath); - } + }; + function testSchemaNoThrow(instance, options, ctx, callback, schema2) { + var throwError2 = options.throwError; + var throwAll = options.throwAll; + options.throwError = false; + options.throwAll = false; + var res = this.validateSchema(instance, schema2, options, ctx); + options.throwError = throwError2; + options.throwAll = throwAll; + if (!res.valid && callback instanceof Function) { + callback(res); } - return result; - } - function partialMatch(patterns, itemPath) { - return patterns.some((x) => !x.negate && x.partialMatch(itemPath)); + return res.valid; } - } -}); - -// node_modules/concat-map/index.js -var require_concat_map = __commonJS({ - "node_modules/concat-map/index.js"(exports2, module2) { - module2.exports = function(xs, fn) { - var res = []; - for (var i = 0; i < xs.length; i++) { - var x = fn(xs[i], i); - if (isArray(x)) res.push.apply(res, x); - else res.push(x); + validators.anyOf = function validateAnyOf(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - return res; - }; - var isArray = Array.isArray || function(xs) { - return Object.prototype.toString.call(xs) === "[object Array]"; - }; - } -}); - -// node_modules/balanced-match/index.js -var require_balanced_match = __commonJS({ - "node_modules/balanced-match/index.js"(exports2, module2) { - "use strict"; - module2.exports = balanced; - function balanced(a, b, str2) { - if (a instanceof RegExp) a = maybeMatch(a, str2); - if (b instanceof RegExp) b = maybeMatch(b, str2); - var r = range(a, b, str2); - return r && { - start: r[0], - end: r[1], - pre: str2.slice(0, r[0]), - body: str2.slice(r[0] + a.length, r[1]), - post: str2.slice(r[1] + b.length) - }; - } - function maybeMatch(reg, str2) { - var m = str2.match(reg); - return m ? m[0] : null; - } - balanced.range = range; - function range(a, b, str2) { - var begs, beg, left, right, result; - var ai = str2.indexOf(a); - var bi = str2.indexOf(b, ai + 1); - var i = ai; - if (ai >= 0 && bi > 0) { - begs = []; - left = str2.length; - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str2.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [begs.pop(), bi]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } - bi = str2.indexOf(b, i + 1); + var result = new ValidatorResult(instance, schema2, options, ctx); + var inner = new ValidatorResult(instance, schema2, options, ctx); + if (!Array.isArray(schema2.anyOf)) { + throw new SchemaError("anyOf must be an array"); + } + if (!schema2.anyOf.some( + testSchemaNoThrow.bind( + this, + instance, + options, + ctx, + function(res) { + inner.importErrors(res); } - i = ai < bi && ai >= 0 ? ai : bi; - } - if (begs.length) { - result = [left, right]; + ) + )) { + var list = schema2.anyOf.map(function(v, i) { + var id = v.$id || v.id; + if (id) return "<" + id + ">"; + return v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; + }); + if (options.nestedErrors) { + result.importErrors(inner); } + result.addError({ + name: "anyOf", + argument: list, + message: "is not any of " + list.join(",") + }); } return result; - } - } -}); - -// node_modules/brace-expansion/index.js -var require_brace_expansion = __commonJS({ - "node_modules/brace-expansion/index.js"(exports2, module2) { - var concatMap = require_concat_map(); - var balanced = require_balanced_match(); - module2.exports = expandTop; - var escSlash = "\0SLASH" + Math.random() + "\0"; - var escOpen = "\0OPEN" + Math.random() + "\0"; - var escClose = "\0CLOSE" + Math.random() + "\0"; - var escComma = "\0COMMA" + Math.random() + "\0"; - var escPeriod = "\0PERIOD" + Math.random() + "\0"; - function numeric(str2) { - return parseInt(str2, 10) == str2 ? parseInt(str2, 10) : str2.charCodeAt(0); - } - function escapeBraces(str2) { - return str2.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); - } - function unescapeBraces(str2) { - return str2.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); - } - function parseCommaParts(str2) { - if (!str2) - return [""]; - var parts = []; - var m = balanced("{", "}", str2); - if (!m) - return str2.split(","); - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(","); - p[p.length - 1] += "{" + body + "}"; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length - 1] += postParts.shift(); - p.push.apply(p, postParts); + }; + validators.allOf = function validateAllOf(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - parts.push.apply(parts, p); - return parts; - } - function expandTop(str2) { - if (!str2) - return []; - if (str2.substr(0, 2) === "{}") { - str2 = "\\{\\}" + str2.substr(2); + if (!Array.isArray(schema2.allOf)) { + throw new SchemaError("allOf must be an array"); } - return expand2(escapeBraces(str2), true).map(unescapeBraces); - } - function embrace(str2) { - return "{" + str2 + "}"; - } - function isPadded(el) { - return /^-?0\d/.test(el); - } - function lte(i, y) { - return i <= y; - } - function gte6(i, y) { - return i >= y; - } - function expand2(str2, isTop) { - var expansions = []; - var m = balanced("{", "}", str2); - if (!m || /\$$/.test(m.pre)) return [str2]; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(",") >= 0; - if (!isSequence && !isOptions) { - if (m.post.match(/,(?!,).*\}/)) { - str2 = m.pre + "{" + m.body + escClose + m.post; - return expand2(str2); + var result = new ValidatorResult(instance, schema2, options, ctx); + var self2 = this; + schema2.allOf.forEach(function(v, i) { + var valid3 = self2.validateSchema(instance, v, options, ctx); + if (!valid3.valid) { + var id = v.$id || v.id; + var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; + result.addError({ + name: "allOf", + argument: { id: msg, length: valid3.errors.length, valid: valid3 }, + message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:" + }); + result.importErrors(valid3); } - return [str2]; + }); + return result; + }; + validators.oneOf = function validateOneOf(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - n = expand2(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length ? expand2(m.post, false) : [""]; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } + if (!Array.isArray(schema2.oneOf)) { + throw new SchemaError("oneOf must be an array"); } - var pre = m.pre; - var post = m.post.length ? expand2(m.post, false) : [""]; - var N; - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length); - var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte6; - } - var pad = n.some(isPadded); - N = []; - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === "\\") - c = ""; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join("0"); - if (i < 0) - c = "-" + z + c.slice(1); - else - c = z + c; - } - } + var result = new ValidatorResult(instance, schema2, options, ctx); + var inner = new ValidatorResult(instance, schema2, options, ctx); + var count = schema2.oneOf.filter( + testSchemaNoThrow.bind( + this, + instance, + options, + ctx, + function(res) { + inner.importErrors(res); } - N.push(c); + ) + ).length; + var list = schema2.oneOf.map(function(v, i) { + var id = v.$id || v.id; + return id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; + }); + if (count !== 1) { + if (options.nestedErrors) { + result.importErrors(inner); } - } else { - N = concatMap(n, function(el) { - return expand2(el, false); + result.addError({ + name: "oneOf", + argument: list, + message: "is not exactly one from " + list.join(",") }); } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } + return result; + }; + validators.if = function validateIf(instance, schema2, options, ctx) { + if (instance === void 0) return null; + if (!helpers.isSchema(schema2.if)) throw new Error('Expected "if" keyword to be a schema'); + var ifValid = testSchemaNoThrow.call(this, instance, options, ctx, null, schema2.if); + var result = new ValidatorResult(instance, schema2, options, ctx); + var res; + if (ifValid) { + if (schema2.then === void 0) return; + if (!helpers.isSchema(schema2.then)) throw new Error('Expected "then" keyword to be a schema'); + res = this.validateSchema(instance, schema2.then, options, ctx.makeChild(schema2.then)); + result.importErrors(res); + } else { + if (schema2.else === void 0) return; + if (!helpers.isSchema(schema2.else)) throw new Error('Expected "else" keyword to be a schema'); + res = this.validateSchema(instance, schema2.else, options, ctx.makeChild(schema2.else)); + result.importErrors(res); + } + return result; + }; + function getEnumerableProperty(object, key) { + if (Object.hasOwnProperty.call(object, key)) return object[key]; + if (!(key in object)) return; + while (object = Object.getPrototypeOf(object)) { + if (Object.propertyIsEnumerable.call(object, key)) return object[key]; } - return expansions; } - } -}); - -// node_modules/minimatch/minimatch.js -var require_minimatch = __commonJS({ - "node_modules/minimatch/minimatch.js"(exports2, module2) { - module2.exports = minimatch; - minimatch.Minimatch = Minimatch; - var path13 = (function() { - try { - return require("path"); - } catch (e) { + validators.propertyNames = function validatePropertyNames(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var subschema = schema2.propertyNames !== void 0 ? schema2.propertyNames : {}; + if (!helpers.isSchema(subschema)) throw new SchemaError('Expected "propertyNames" to be a schema (object or boolean)'); + for (var property in instance) { + if (getEnumerableProperty(instance, property) !== void 0) { + var res = this.validateSchema(property, subschema, options, ctx.makeChild(subschema)); + result.importErrors(res); + } } - })() || { - sep: "/" + return result; }; - minimatch.sep = path13.sep; - var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; - var expand2 = require_brace_expansion(); - var plTypes = { - "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, - "?": { open: "(?:", close: ")?" }, - "+": { open: "(?:", close: ")+" }, - "*": { open: "(?:", close: ")*" }, - "@": { open: "(?:", close: ")" } + validators.properties = function validateProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var properties = schema2.properties || {}; + for (var property in properties) { + var subschema = properties[property]; + if (subschema === void 0) { + continue; + } else if (subschema === null) { + throw new SchemaError('Unexpected null, expected schema in "properties"'); + } + if (typeof options.preValidateProperty == "function") { + options.preValidateProperty(instance, property, subschema, options, ctx); + } + var prop = getEnumerableProperty(instance, property); + var res = this.validateSchema(prop, subschema, options, ctx.makeChild(subschema, property)); + if (res.instance !== result.instance[property]) result.instance[property] = res.instance; + result.importErrors(res); + } + return result; }; - var qmark = "[^/]"; - var star = qmark + "*?"; - var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; - var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; - var reSpecials = charSet("().*{}+?[]^$\\!"); - function charSet(s) { - return s.split("").reduce(function(set2, c) { - set2[c] = true; - return set2; - }, {}); - } - var slashSplit = /\/+/; - minimatch.filter = filter; - function filter(pattern, options) { - options = options || {}; - return function(p, i, list) { - return minimatch(p, pattern, options); - }; - } - function ext(a, b) { - b = b || {}; - var t = {}; - Object.keys(a).forEach(function(k) { - t[k] = a[k]; - }); - Object.keys(b).forEach(function(k) { - t[k] = b[k]; - }); - return t; - } - minimatch.defaults = function(def) { - if (!def || typeof def !== "object" || !Object.keys(def).length) { - return minimatch; - } - var orig = minimatch; - var m = function minimatch2(p, pattern, options) { - return orig(p, pattern, ext(def, options)); - }; - m.Minimatch = function Minimatch2(pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)); - }; - m.Minimatch.defaults = function defaults(options) { - return orig.defaults(ext(def, options)).Minimatch; - }; - m.filter = function filter2(pattern, options) { - return orig.filter(pattern, ext(def, options)); - }; - m.defaults = function defaults(options) { - return orig.defaults(ext(def, options)); - }; - m.makeRe = function makeRe2(pattern, options) { - return orig.makeRe(pattern, ext(def, options)); - }; - m.braceExpand = function braceExpand2(pattern, options) { - return orig.braceExpand(pattern, ext(def, options)); - }; - m.match = function(list, pattern, options) { - return orig.match(list, pattern, ext(def, options)); - }; - return m; - }; - Minimatch.defaults = function(def) { - return minimatch.defaults(def).Minimatch; - }; - function minimatch(p, pattern, options) { - assertValidPattern(pattern); - if (!options) options = {}; - if (!options.nocomment && pattern.charAt(0) === "#") { - return false; - } - return new Minimatch(pattern, options).match(p); - } - function Minimatch(pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options); - } - assertValidPattern(pattern); - if (!options) options = {}; - pattern = pattern.trim(); - if (!options.allowWindowsEscape && path13.sep !== "/") { - pattern = pattern.split(path13.sep).join("/"); - } - this.options = options; - this.set = []; - this.pattern = pattern; - this.regexp = null; - this.negate = false; - this.comment = false; - this.empty = false; - this.partial = !!options.partial; - this.make(); - } - Minimatch.prototype.debug = function() { - }; - Minimatch.prototype.make = make; - function make() { - var pattern = this.pattern; - var options = this.options; - if (!options.nocomment && pattern.charAt(0) === "#") { - this.comment = true; - return; - } - if (!pattern) { - this.empty = true; + function testAdditionalProperty(instance, schema2, options, ctx, property, result) { + if (!this.types.object(instance)) return; + if (schema2.properties && schema2.properties[property] !== void 0) { return; } - this.parseNegate(); - var set2 = this.globSet = this.braceExpand(); - if (options.debug) this.debug = function debug6() { - console.error.apply(console, arguments); - }; - this.debug(this.pattern, set2); - set2 = this.globParts = set2.map(function(s) { - return s.split(slashSplit); - }); - this.debug(this.pattern, set2); - set2 = set2.map(function(s, si, set3) { - return s.map(this.parse, this); - }, this); - this.debug(this.pattern, set2); - set2 = set2.filter(function(s) { - return s.indexOf(false) === -1; - }); - this.debug(this.pattern, set2); - this.set = set2; - } - Minimatch.prototype.parseNegate = parseNegate; - function parseNegate() { - var pattern = this.pattern; - var negate2 = false; - var options = this.options; - var negateOffset = 0; - if (options.nonegate) return; - for (var i = 0, l = pattern.length; i < l && pattern.charAt(i) === "!"; i++) { - negate2 = !negate2; - negateOffset++; + if (schema2.additionalProperties === false) { + result.addError({ + name: "additionalProperties", + argument: property, + message: "is not allowed to have the additional property " + JSON.stringify(property) + }); + } else { + var additionalProperties = schema2.additionalProperties || {}; + if (typeof options.preValidateProperty == "function") { + options.preValidateProperty(instance, property, additionalProperties, options, ctx); + } + var res = this.validateSchema(instance[property], additionalProperties, options, ctx.makeChild(additionalProperties, property)); + if (res.instance !== result.instance[property]) result.instance[property] = res.instance; + result.importErrors(res); } - if (negateOffset) this.pattern = pattern.substr(negateOffset); - this.negate = negate2; } - minimatch.braceExpand = function(pattern, options) { - return braceExpand(pattern, options); - }; - Minimatch.prototype.braceExpand = braceExpand; - function braceExpand(pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options; - } else { - options = {}; + validators.patternProperties = function validatePatternProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var patternProperties = schema2.patternProperties || {}; + for (var property in instance) { + var test = true; + for (var pattern in patternProperties) { + var subschema = patternProperties[pattern]; + if (subschema === void 0) { + continue; + } else if (subschema === null) { + throw new SchemaError('Unexpected null, expected schema in "patternProperties"'); + } + try { + var regexp = new RegExp(pattern, "u"); + } catch (_e) { + regexp = new RegExp(pattern); + } + if (!regexp.test(property)) { + continue; + } + test = false; + if (typeof options.preValidateProperty == "function") { + options.preValidateProperty(instance, property, subschema, options, ctx); + } + var res = this.validateSchema(instance[property], subschema, options, ctx.makeChild(subschema, property)); + if (res.instance !== result.instance[property]) result.instance[property] = res.instance; + result.importErrors(res); + } + if (test) { + testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); } } - pattern = typeof pattern === "undefined" ? this.pattern : pattern; - assertValidPattern(pattern); - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - return [pattern]; + return result; + }; + validators.additionalProperties = function validateAdditionalProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + if (schema2.patternProperties) { + return null; } - return expand2(pattern); - } - var MAX_PATTERN_LENGTH = 1024 * 64; - var assertValidPattern = function(pattern) { - if (typeof pattern !== "string") { - throw new TypeError("invalid pattern"); + var result = new ValidatorResult(instance, schema2, options, ctx); + for (var property in instance) { + testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); } - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError("pattern is too long"); + return result; + }; + validators.minProperties = function validateMinProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var keys = Object.keys(instance); + if (!(keys.length >= schema2.minProperties)) { + result.addError({ + name: "minProperties", + argument: schema2.minProperties, + message: "does not meet minimum property length of " + schema2.minProperties + }); } + return result; }; - Minimatch.prototype.parse = parse2; - var SUBPARSE = {}; - function parse2(pattern, isSub) { - assertValidPattern(pattern); - var options = this.options; - if (pattern === "**") { - if (!options.noglobstar) - return GLOBSTAR; - else - pattern = "*"; + validators.maxProperties = function validateMaxProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var keys = Object.keys(instance); + if (!(keys.length <= schema2.maxProperties)) { + result.addError({ + name: "maxProperties", + argument: schema2.maxProperties, + message: "does not meet maximum property length of " + schema2.maxProperties + }); } - if (pattern === "") return ""; - var re = ""; - var hasMagic = !!options.nocase; - var escaping = false; - var patternListStack = []; - var negativeLists = []; - var stateChar; - var inClass = false; - var reClassStart = -1; - var classStart = -1; - var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; + return result; + }; + validators.items = function validateItems(instance, schema2, options, ctx) { var self2 = this; - function clearStateChar() { - if (stateChar) { - switch (stateChar) { - case "*": - re += star; - hasMagic = true; - break; - case "?": - re += qmark; - hasMagic = true; - break; - default: - re += "\\" + stateChar; - break; - } - self2.debug("clearStateChar %j %j", stateChar, re); - stateChar = false; + if (!this.types.array(instance)) return; + if (schema2.items === void 0) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + instance.every(function(value, i) { + if (Array.isArray(schema2.items)) { + var items = schema2.items[i] === void 0 ? schema2.additionalItems : schema2.items[i]; + } else { + var items = schema2.items; + } + if (items === void 0) { + return true; + } + if (items === false) { + result.addError({ + name: "items", + message: "additionalItems not permitted" + }); + return false; } + var res = self2.validateSchema(value, items, options, ctx.makeChild(items, i)); + if (res.instance !== result.instance[i]) result.instance[i] = res.instance; + result.importErrors(res); + return true; + }); + return result; + }; + validators.contains = function validateContains(instance, schema2, options, ctx) { + var self2 = this; + if (!this.types.array(instance)) return; + if (schema2.contains === void 0) return; + if (!helpers.isSchema(schema2.contains)) throw new Error('Expected "contains" keyword to be a schema'); + var result = new ValidatorResult(instance, schema2, options, ctx); + var count = instance.some(function(value, i) { + var res = self2.validateSchema(value, schema2.contains, options, ctx.makeChild(schema2.contains, i)); + return res.errors.length === 0; + }); + if (count === false) { + result.addError({ + name: "contains", + argument: schema2.contains, + message: "must contain an item matching given schema" + }); } - for (var i = 0, len = pattern.length, c; i < len && (c = pattern.charAt(i)); i++) { - this.debug("%s %s %s %j", pattern, i, re, c); - if (escaping && reSpecials[c]) { - re += "\\" + c; - escaping = false; - continue; + return result; + }; + validators.minimum = function validateMinimum(instance, schema2, options, ctx) { + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (schema2.exclusiveMinimum && schema2.exclusiveMinimum === true) { + if (!(instance > schema2.minimum)) { + result.addError({ + name: "minimum", + argument: schema2.minimum, + message: "must be greater than " + schema2.minimum + }); } - switch (c) { - /* istanbul ignore next */ - case "/": { - return false; - } - case "\\": - clearStateChar(); - escaping = true; - continue; - // the various stateChar values - // for the "extglob" stuff. - case "?": - case "*": - case "+": - case "@": - case "!": - this.debug("%s %s %s %j <-- stateChar", pattern, i, re, c); - if (inClass) { - this.debug(" in class"); - if (c === "!" && i === classStart + 1) c = "^"; - re += c; - continue; - } - self2.debug("call clearStateChar %j", stateChar); - clearStateChar(); - stateChar = c; - if (options.noext) clearStateChar(); - continue; - case "(": - if (inClass) { - re += "("; - continue; - } - if (!stateChar) { - re += "\\("; - continue; - } - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }); - re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; - this.debug("plType %j %j", stateChar, re); - stateChar = false; - continue; - case ")": - if (inClass || !patternListStack.length) { - re += "\\)"; - continue; - } - clearStateChar(); - hasMagic = true; - var pl = patternListStack.pop(); - re += pl.close; - if (pl.type === "!") { - negativeLists.push(pl); - } - pl.reEnd = re.length; - continue; - case "|": - if (inClass || !patternListStack.length || escaping) { - re += "\\|"; - escaping = false; - continue; - } - clearStateChar(); - re += "|"; - continue; - // these are mostly the same in regexp and glob - case "[": - clearStateChar(); - if (inClass) { - re += "\\" + c; - continue; - } - inClass = true; - classStart = i; - reClassStart = re.length; - re += c; - continue; - case "]": - if (i === classStart + 1 || !inClass) { - re += "\\" + c; - escaping = false; - continue; - } - var cs = pattern.substring(classStart + 1, i); - try { - RegExp("[" + cs + "]"); - } catch (er) { - var sp = this.parse(cs, SUBPARSE); - re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; - hasMagic = hasMagic || sp[1]; - inClass = false; - continue; - } - hasMagic = true; - inClass = false; - re += c; - continue; - default: - clearStateChar(); - if (escaping) { - escaping = false; - } else if (reSpecials[c] && !(c === "^" && inClass)) { - re += "\\"; - } - re += c; + } else { + if (!(instance >= schema2.minimum)) { + result.addError({ + name: "minimum", + argument: schema2.minimum, + message: "must be greater than or equal to " + schema2.minimum + }); } } - if (inClass) { - cs = pattern.substr(classStart + 1); - sp = this.parse(cs, SUBPARSE); - re = re.substr(0, reClassStart) + "\\[" + sp[0]; - hasMagic = hasMagic || sp[1]; + return result; + }; + validators.maximum = function validateMaximum(instance, schema2, options, ctx) { + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (schema2.exclusiveMaximum && schema2.exclusiveMaximum === true) { + if (!(instance < schema2.maximum)) { + result.addError({ + name: "maximum", + argument: schema2.maximum, + message: "must be less than " + schema2.maximum + }); + } + } else { + if (!(instance <= schema2.maximum)) { + result.addError({ + name: "maximum", + argument: schema2.maximum, + message: "must be less than or equal to " + schema2.maximum + }); + } } - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length); - this.debug("setting tail", re, pl); - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { - if (!$2) { - $2 = "\\"; - } - return $1 + $1 + $2 + "|"; + return result; + }; + validators.exclusiveMinimum = function validateExclusiveMinimum(instance, schema2, options, ctx) { + if (typeof schema2.exclusiveMinimum === "boolean") return; + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var valid3 = instance > schema2.exclusiveMinimum; + if (!valid3) { + result.addError({ + name: "exclusiveMinimum", + argument: schema2.exclusiveMinimum, + message: "must be strictly greater than " + schema2.exclusiveMinimum }); - this.debug("tail=%j\n %s", tail, tail, pl, re); - var t = pl.type === "*" ? star : pl.type === "?" ? qmark : "\\" + pl.type; - hasMagic = true; - re = re.slice(0, pl.reStart) + t + "\\(" + tail; } - clearStateChar(); - if (escaping) { - re += "\\\\"; + return result; + }; + validators.exclusiveMaximum = function validateExclusiveMaximum(instance, schema2, options, ctx) { + if (typeof schema2.exclusiveMaximum === "boolean") return; + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var valid3 = instance < schema2.exclusiveMaximum; + if (!valid3) { + result.addError({ + name: "exclusiveMaximum", + argument: schema2.exclusiveMaximum, + message: "must be strictly less than " + schema2.exclusiveMaximum + }); } - var addPatternStart = false; - switch (re.charAt(0)) { - case "[": - case ".": - case "(": - addPatternStart = true; + return result; + }; + var validateMultipleOfOrDivisbleBy = function validateMultipleOfOrDivisbleBy2(instance, schema2, options, ctx, validationType, errorMessage) { + if (!this.types.number(instance)) return; + var validationArgument = schema2[validationType]; + if (validationArgument == 0) { + throw new SchemaError(validationType + " cannot be zero"); } - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n]; - var nlBefore = re.slice(0, nl.reStart); - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); - var nlAfter = re.slice(nl.reEnd); - nlLast += nlAfter; - var openParensBefore = nlBefore.split("(").length - 1; - var cleanAfter = nlAfter; - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); - } - nlAfter = cleanAfter; - var dollar = ""; - if (nlAfter === "" && isSub !== SUBPARSE) { - dollar = "$"; - } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; - re = newRe; + var result = new ValidatorResult(instance, schema2, options, ctx); + var instanceDecimals = helpers.getDecimalPlaces(instance); + var divisorDecimals = helpers.getDecimalPlaces(validationArgument); + var maxDecimals = Math.max(instanceDecimals, divisorDecimals); + var multiplier = Math.pow(10, maxDecimals); + if (Math.round(instance * multiplier) % Math.round(validationArgument * multiplier) !== 0) { + result.addError({ + name: validationType, + argument: validationArgument, + message: errorMessage + JSON.stringify(validationArgument) + }); } - if (re !== "" && hasMagic) { - re = "(?=.)" + re; + return result; + }; + validators.multipleOf = function validateMultipleOf(instance, schema2, options, ctx) { + return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "multipleOf", "is not a multiple of (divisible by) "); + }; + validators.divisibleBy = function validateDivisibleBy(instance, schema2, options, ctx) { + return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "divisibleBy", "is not divisible by (multiple of) "); + }; + validators.required = function validateRequired(instance, schema2, options, ctx) { + var result = new ValidatorResult(instance, schema2, options, ctx); + if (instance === void 0 && schema2.required === true) { + result.addError({ + name: "required", + message: "is required" + }); + } else if (this.types.object(instance) && Array.isArray(schema2.required)) { + schema2.required.forEach(function(n) { + if (getEnumerableProperty(instance, n) === void 0) { + result.addError({ + name: "required", + argument: n, + message: "requires property " + JSON.stringify(n) + }); + } + }); } - if (addPatternStart) { - re = patternStart + re; + return result; + }; + validators.pattern = function validatePattern(instance, schema2, options, ctx) { + if (!this.types.string(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var pattern = schema2.pattern; + try { + var regexp = new RegExp(pattern, "u"); + } catch (_e) { + regexp = new RegExp(pattern); } - if (isSub === SUBPARSE) { - return [re, hasMagic]; + if (!instance.match(regexp)) { + result.addError({ + name: "pattern", + argument: schema2.pattern, + message: "does not match pattern " + JSON.stringify(schema2.pattern.toString()) + }); } - if (!hasMagic) { - return globUnescape(pattern); + return result; + }; + validators.format = function validateFormat(instance, schema2, options, ctx) { + if (instance === void 0) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!result.disableFormat && !helpers.isFormat(instance, schema2.format, this)) { + result.addError({ + name: "format", + argument: schema2.format, + message: "does not conform to the " + JSON.stringify(schema2.format) + " format" + }); } - var flags = options.nocase ? "i" : ""; - try { - var regExp = new RegExp("^" + re + "$", flags); - } catch (er) { - return new RegExp("$."); + return result; + }; + validators.minLength = function validateMinLength(instance, schema2, options, ctx) { + if (!this.types.string(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var hsp = instance.match(/[\uDC00-\uDFFF]/g); + var length = instance.length - (hsp ? hsp.length : 0); + if (!(length >= schema2.minLength)) { + result.addError({ + name: "minLength", + argument: schema2.minLength, + message: "does not meet minimum length of " + schema2.minLength + }); } - regExp._glob = pattern; - regExp._src = re; - return regExp; - } - minimatch.makeRe = function(pattern, options) { - return new Minimatch(pattern, options || {}).makeRe(); + return result; }; - Minimatch.prototype.makeRe = makeRe; - function makeRe() { - if (this.regexp || this.regexp === false) return this.regexp; - var set2 = this.set; - if (!set2.length) { - this.regexp = false; - return this.regexp; + validators.maxLength = function validateMaxLength(instance, schema2, options, ctx) { + if (!this.types.string(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var hsp = instance.match(/[\uDC00-\uDFFF]/g); + var length = instance.length - (hsp ? hsp.length : 0); + if (!(length <= schema2.maxLength)) { + result.addError({ + name: "maxLength", + argument: schema2.maxLength, + message: "does not meet maximum length of " + schema2.maxLength + }); } - var options = this.options; - var twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot; - var flags = options.nocase ? "i" : ""; - var re = set2.map(function(pattern) { - return pattern.map(function(p) { - return p === GLOBSTAR ? twoStar : typeof p === "string" ? regExpEscape(p) : p._src; - }).join("\\/"); - }).join("|"); - re = "^(?:" + re + ")$"; - if (this.negate) re = "^(?!" + re + ").*$"; - try { - this.regexp = new RegExp(re, flags); - } catch (ex) { - this.regexp = false; + return result; + }; + validators.minItems = function validateMinItems(instance, schema2, options, ctx) { + if (!this.types.array(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!(instance.length >= schema2.minItems)) { + result.addError({ + name: "minItems", + argument: schema2.minItems, + message: "does not meet minimum length of " + schema2.minItems + }); } - return this.regexp; - } - minimatch.match = function(list, pattern, options) { - options = options || {}; - var mm = new Minimatch(pattern, options); - list = list.filter(function(f) { - return mm.match(f); - }); - if (mm.options.nonull && !list.length) { - list.push(pattern); + return result; + }; + validators.maxItems = function validateMaxItems(instance, schema2, options, ctx) { + if (!this.types.array(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!(instance.length <= schema2.maxItems)) { + result.addError({ + name: "maxItems", + argument: schema2.maxItems, + message: "does not meet maximum length of " + schema2.maxItems + }); } - return list; + return result; }; - Minimatch.prototype.match = function match(f, partial) { - if (typeof partial === "undefined") partial = this.partial; - this.debug("match", f, this.pattern); - if (this.comment) return false; - if (this.empty) return f === ""; - if (f === "/" && partial) return true; - var options = this.options; - if (path13.sep !== "/") { - f = f.split(path13.sep).join("/"); + function testArrays(v, i, a) { + var j, len = a.length; + for (j = i + 1, len; j < len; j++) { + if (helpers.deepCompareStrict(v, a[j])) { + return false; + } } - f = f.split(slashSplit); - this.debug(this.pattern, "split", f); - var set2 = this.set; - this.debug(this.pattern, "set", set2); - var filename; - var i; - for (i = f.length - 1; i >= 0; i--) { - filename = f[i]; - if (filename) break; + return true; + } + validators.uniqueItems = function validateUniqueItems(instance, schema2, options, ctx) { + if (schema2.uniqueItems !== true) return; + if (!this.types.array(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!instance.every(testArrays)) { + result.addError({ + name: "uniqueItems", + message: "contains duplicate item" + }); } - for (i = 0; i < set2.length; i++) { - var pattern = set2[i]; - var file = f; - if (options.matchBase && pattern.length === 1) { - file = [filename]; + return result; + }; + validators.dependencies = function validateDependencies(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + for (var property in schema2.dependencies) { + if (instance[property] === void 0) { + continue; } - var hit = this.matchOne(file, pattern, partial); - if (hit) { - if (options.flipNegate) return true; - return !this.negate; + var dep = schema2.dependencies[property]; + var childContext = ctx.makeChild(dep, property); + if (typeof dep == "string") { + dep = [dep]; } - } - if (options.flipNegate) return false; - return this.negate; - }; - Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; + if (Array.isArray(dep)) { + dep.forEach(function(prop) { + if (instance[prop] === void 0) { + result.addError({ + // FIXME there's two different "dependencies" errors here with slightly different outputs + // Can we make these the same? Or should we create different error types? + name: "dependencies", + argument: childContext.propertyPath, + message: "property " + prop + " not found, required by " + childContext.propertyPath + }); } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } - var hit; - if (typeof p === "string") { - hit = f === p; - this.debug("string match", p, f, hit); + }); } else { - hit = f.match(p); - this.debug("pattern match", p, f, hit); + var res = this.validateSchema(instance, dep, options, childContext); + if (result.instance !== res.instance) result.instance = res.instance; + if (res && res.errors.length) { + result.addError({ + name: "dependencies", + argument: childContext.propertyPath, + message: "does not meet dependency required by " + childContext.propertyPath + }); + result.importErrors(res); + } } - if (!hit) return false; } - if (fi === fl && pi === pl) { - return true; - } else if (fi === fl) { - return partial; - } else if (pi === pl) { - return fi === fl - 1 && file[fi] === ""; + return result; + }; + validators["enum"] = function validateEnum(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - throw new Error("wtf?"); + if (!Array.isArray(schema2["enum"])) { + throw new SchemaError("enum expects an array", schema2); + } + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!schema2["enum"].some(helpers.deepCompareStrict.bind(null, instance))) { + result.addError({ + name: "enum", + argument: schema2["enum"], + message: "is not one of enum values: " + schema2["enum"].map(String).join(",") + }); + } + return result; }; - function globUnescape(s) { - return s.replace(/\\(.)/g, "$1"); - } - function regExpEscape(s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); - } + validators["const"] = function validateEnum(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; + } + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!helpers.deepCompareStrict(schema2["const"], instance)) { + result.addError({ + name: "const", + argument: schema2["const"], + message: "does not exactly match expected constant: " + schema2["const"] + }); + } + return result; + }; + validators.not = validators.disallow = function validateNot(instance, schema2, options, ctx) { + var self2 = this; + if (instance === void 0) return null; + var result = new ValidatorResult(instance, schema2, options, ctx); + var notTypes = schema2.not || schema2.disallow; + if (!notTypes) return null; + if (!Array.isArray(notTypes)) notTypes = [notTypes]; + notTypes.forEach(function(type2) { + if (self2.testType(instance, schema2, options, ctx, type2)) { + var id = type2 && (type2.$id || type2.id); + var schemaId = id || type2; + result.addError({ + name: "not", + argument: schemaId, + message: "is of prohibited type " + schemaId + }); + } + }); + return result; + }; + module2.exports = attribute; } }); -// node_modules/@actions/glob/lib/internal-path.js -var require_internal_path = __commonJS({ - "node_modules/@actions/glob/lib/internal-path.js"(exports2) { +// node_modules/jsonschema/lib/scan.js +var require_scan = __commonJS({ + "node_modules/jsonschema/lib/scan.js"(exports2, module2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + var urilib = require("url"); + var helpers = require_helpers(); + module2.exports.SchemaScanResult = SchemaScanResult; + function SchemaScanResult(found, ref) { + this.id = found; + this.ref = ref; + } + module2.exports.scan = function scan(base, schema2) { + function scanSchema(baseuri, schema3) { + if (!schema3 || typeof schema3 != "object") return; + if (schema3.$ref) { + var resolvedUri = urilib.resolve(baseuri, schema3.$ref); + ref[resolvedUri] = ref[resolvedUri] ? ref[resolvedUri] + 1 : 0; + return; } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Path = void 0; - var path13 = __importStar2(require("path")); - var pathHelper = __importStar2(require_internal_path_helper()); - var assert_1 = __importDefault2(require("assert")); - var IS_WINDOWS = process.platform === "win32"; - var Path = class { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - if (typeof itemPath === "string") { - (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path13.sep); - } else { - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - const basename = path13.basename(remaining); - this.segments.unshift(basename); - remaining = dir; - dir = pathHelper.dirname(remaining); + var id = schema3.$id || schema3.id; + var ourBase = id ? urilib.resolve(baseuri, id) : baseuri; + if (ourBase) { + if (ourBase.indexOf("#") < 0) ourBase += "#"; + if (found[ourBase]) { + if (!helpers.deepCompareStrict(found[ourBase], schema3)) { + throw new Error("Schema <" + ourBase + "> already exists with different definition"); } - this.segments.unshift(remaining); + return found[ourBase]; } - } else { - (0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - (0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`); - segment = pathHelper.normalizeSeparators(itemPath[i]); - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } else { - (0, assert_1.default)(!segment.includes(path13.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } + found[ourBase] = schema3; + if (ourBase[ourBase.length - 1] == "#") { + found[ourBase.substring(0, ourBase.length - 1)] = schema3; } } + scanArray(ourBase + "/items", Array.isArray(schema3.items) ? schema3.items : [schema3.items]); + scanArray(ourBase + "/extends", Array.isArray(schema3.extends) ? schema3.extends : [schema3.extends]); + scanSchema(ourBase + "/additionalItems", schema3.additionalItems); + scanObject(ourBase + "/properties", schema3.properties); + scanSchema(ourBase + "/additionalProperties", schema3.additionalProperties); + scanObject(ourBase + "/definitions", schema3.definitions); + scanObject(ourBase + "/patternProperties", schema3.patternProperties); + scanObject(ourBase + "/dependencies", schema3.dependencies); + scanArray(ourBase + "/disallow", schema3.disallow); + scanArray(ourBase + "/allOf", schema3.allOf); + scanArray(ourBase + "/anyOf", schema3.anyOf); + scanArray(ourBase + "/oneOf", schema3.oneOf); + scanSchema(ourBase + "/not", schema3.not); } - /** - * Converts the path to it's string representation - */ - toString() { - let result = this.segments[0]; - let skipSlash = result.endsWith(path13.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } else { - result += path13.sep; - } - result += this.segments[i]; + function scanArray(baseuri, schemas) { + if (!Array.isArray(schemas)) return; + for (var i = 0; i < schemas.length; i++) { + scanSchema(baseuri + "/" + i, schemas[i]); + } + } + function scanObject(baseuri, schemas) { + if (!schemas || typeof schemas != "object") return; + for (var p in schemas) { + scanSchema(baseuri + "/" + p, schemas[p]); + } + } + var found = {}; + var ref = {}; + scanSchema(base, schema2); + return new SchemaScanResult(found, ref); + }; + } +}); + +// node_modules/jsonschema/lib/validator.js +var require_validator = __commonJS({ + "node_modules/jsonschema/lib/validator.js"(exports2, module2) { + "use strict"; + var urilib = require("url"); + var attribute = require_attribute(); + var helpers = require_helpers(); + var scanSchema = require_scan().scan; + var ValidatorResult = helpers.ValidatorResult; + var ValidatorResultError = helpers.ValidatorResultError; + var SchemaError = helpers.SchemaError; + var SchemaContext = helpers.SchemaContext; + var anonymousBase = "/"; + var Validator3 = function Validator4() { + this.customFormats = Object.create(Validator4.prototype.customFormats); + this.schemas = {}; + this.unresolvedRefs = []; + this.types = Object.create(types); + this.attributes = Object.create(attribute.validators); + }; + Validator3.prototype.customFormats = {}; + Validator3.prototype.schemas = null; + Validator3.prototype.types = null; + Validator3.prototype.attributes = null; + Validator3.prototype.unresolvedRefs = null; + Validator3.prototype.addSchema = function addSchema(schema2, base) { + var self2 = this; + if (!schema2) { + return null; + } + var scan = scanSchema(base || anonymousBase, schema2); + var ourUri = base || schema2.$id || schema2.id; + for (var uri in scan.id) { + this.schemas[uri] = scan.id[uri]; + } + for (var uri in scan.ref) { + this.unresolvedRefs.push(uri); + } + this.unresolvedRefs = this.unresolvedRefs.filter(function(uri2) { + return typeof self2.schemas[uri2] === "undefined"; + }); + return this.schemas[ourUri]; + }; + Validator3.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) { + if (!Array.isArray(schemas)) return; + for (var i = 0; i < schemas.length; i++) { + this.addSubSchema(baseuri, schemas[i]); + } + }; + Validator3.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) { + if (!schemas || typeof schemas != "object") return; + for (var p in schemas) { + this.addSubSchema(baseuri, schemas[p]); + } + }; + Validator3.prototype.setSchemas = function setSchemas(schemas) { + this.schemas = schemas; + }; + Validator3.prototype.getSchema = function getSchema(urn) { + return this.schemas[urn]; + }; + Validator3.prototype.validate = function validate(instance, schema2, options, ctx) { + if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) { + throw new SchemaError("Expected `schema` to be an object or boolean"); + } + if (!options) { + options = {}; + } + var id = schema2.$id || schema2.id; + var base = urilib.resolve(options.base || anonymousBase, id || ""); + if (!ctx) { + ctx = new SchemaContext(schema2, options, [], base, Object.create(this.schemas)); + if (!ctx.schemas[base]) { + ctx.schemas[base] = schema2; + } + var found = scanSchema(base, schema2); + for (var n in found.id) { + var sch = found.id[n]; + ctx.schemas[n] = sch; } + } + if (options.required && instance === void 0) { + var result = new ValidatorResult(instance, schema2, options, ctx); + result.addError("is required, but is undefined"); return result; } + var result = this.validateSchema(instance, schema2, options, ctx); + if (!result) { + throw new Error("Result undefined"); + } else if (options.throwAll && result.errors.length) { + throw new ValidatorResultError(result); + } + return result; }; - exports2.Path = Path; + function shouldResolve(schema2) { + var ref = typeof schema2 === "string" ? schema2 : schema2.$ref; + if (typeof ref == "string") return ref; + return false; + } + Validator3.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) { + var result = new ValidatorResult(instance, schema2, options, ctx); + if (typeof schema2 === "boolean") { + if (schema2 === true) { + schema2 = {}; + } else if (schema2 === false) { + schema2 = { type: [] }; + } + } else if (!schema2) { + throw new Error("schema is undefined"); + } + if (schema2["extends"]) { + if (Array.isArray(schema2["extends"])) { + var schemaobj = { schema: schema2, ctx }; + schema2["extends"].forEach(this.schemaTraverser.bind(this, schemaobj)); + schema2 = schemaobj.schema; + schemaobj.schema = null; + schemaobj.ctx = null; + schemaobj = null; + } else { + schema2 = helpers.deepMerge(schema2, this.superResolve(schema2["extends"], ctx)); + } + } + var switchSchema = shouldResolve(schema2); + if (switchSchema) { + var resolved = this.resolve(schema2, switchSchema, ctx); + var subctx = new SchemaContext(resolved.subschema, options, ctx.path, resolved.switchSchema, ctx.schemas); + return this.validateSchema(instance, resolved.subschema, options, subctx); + } + var skipAttributes = options && options.skipAttributes || []; + for (var key in schema2) { + if (!attribute.ignoreProperties[key] && skipAttributes.indexOf(key) < 0) { + var validatorErr = null; + var validator = this.attributes[key]; + if (validator) { + validatorErr = validator.call(this, instance, schema2, options, ctx); + } else if (options.allowUnknownAttributes === false) { + throw new SchemaError("Unsupported attribute: " + key, schema2); + } + if (validatorErr) { + result.importErrors(validatorErr); + } + } + } + if (typeof options.rewrite == "function") { + var value = options.rewrite.call(this, instance, schema2, options, ctx); + result.instance = value; + } + return result; + }; + Validator3.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) { + schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx)); + }; + Validator3.prototype.superResolve = function superResolve(schema2, ctx) { + var ref = shouldResolve(schema2); + if (ref) { + return this.resolve(schema2, ref, ctx).subschema; + } + return schema2; + }; + Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) { + switchSchema = ctx.resolve(switchSchema); + if (ctx.schemas[switchSchema]) { + return { subschema: ctx.schemas[switchSchema], switchSchema }; + } + var parsed = urilib.parse(switchSchema); + var fragment = parsed && parsed.hash; + var document2 = fragment && fragment.length && switchSchema.substr(0, switchSchema.length - fragment.length); + if (!document2 || !ctx.schemas[document2]) { + throw new SchemaError("no such schema <" + switchSchema + ">", schema2); + } + var subschema = helpers.objectGetPath(ctx.schemas[document2], fragment.substr(1)); + if (subschema === void 0) { + throw new SchemaError("no such schema " + fragment + " located in <" + document2 + ">", schema2); + } + return { subschema, switchSchema }; + }; + Validator3.prototype.testType = function validateType(instance, schema2, options, ctx, type2) { + if (type2 === void 0) { + return; + } else if (type2 === null) { + throw new SchemaError('Unexpected null in "type" keyword'); + } + if (typeof this.types[type2] == "function") { + return this.types[type2].call(this, instance); + } + if (type2 && typeof type2 == "object") { + var res = this.validateSchema(instance, type2, options, ctx); + return res === void 0 || !(res && res.errors.length); + } + return true; + }; + var types = Validator3.prototype.types = {}; + types.string = function testString(instance) { + return typeof instance == "string"; + }; + types.number = function testNumber(instance) { + return typeof instance == "number" && isFinite(instance); + }; + types.integer = function testInteger(instance) { + return typeof instance == "number" && instance % 1 === 0; + }; + types.boolean = function testBoolean(instance) { + return typeof instance == "boolean"; + }; + types.array = function testArray(instance) { + return Array.isArray(instance); + }; + types["null"] = function testNull(instance) { + return instance === null; + }; + types.date = function testDate(instance) { + return instance instanceof Date; + }; + types.any = function testAny(instance) { + return true; + }; + types.object = function testObject(instance) { + return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date); + }; + module2.exports = Validator3; } }); -// node_modules/@actions/glob/lib/internal-pattern.js -var require_internal_pattern = __commonJS({ - "node_modules/@actions/glob/lib/internal-pattern.js"(exports2) { +// node_modules/jsonschema/lib/index.js +var require_lib2 = __commonJS({ + "node_modules/jsonschema/lib/index.js"(exports2, module2) { + "use strict"; + var Validator3 = module2.exports.Validator = require_validator(); + module2.exports.ValidatorResult = require_helpers().ValidatorResult; + module2.exports.ValidatorResultError = require_helpers().ValidatorResultError; + module2.exports.ValidationError = require_helpers().ValidationError; + module2.exports.SchemaError = require_helpers().SchemaError; + module2.exports.SchemaScanResult = require_scan().SchemaScanResult; + module2.exports.scan = require_scan().scan; + module2.exports.validate = function(instance, schema2, options) { + var v = new Validator3(); + return v.validate(instance, schema2, options); + }; + } +}); + +// node_modules/@actions/glob/lib/internal-glob-options-helper.js +var require_internal_glob_options_helper = __commonJS({ + "node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -48686,199 +48747,47 @@ var require_internal_pattern = __commonJS({ return result; }; })(); - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Pattern = void 0; - var os3 = __importStar2(require("os")); - var path13 = __importStar2(require("path")); - var pathHelper = __importStar2(require_internal_path_helper()); - var assert_1 = __importDefault2(require("assert")); - var minimatch_1 = require_minimatch(); - var internal_match_kind_1 = require_internal_match_kind(); - var internal_path_1 = require_internal_path(); - var IS_WINDOWS = process.platform === "win32"; - var Pattern = class _Pattern { - constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { - this.negate = false; - let pattern; - if (typeof patternOrNegate === "string") { - pattern = patternOrNegate.trim(); - } else { - segments = segments || []; - (0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`); - const root = _Pattern.getLiteral(segments[0]); - (0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; - } - } - while (pattern.startsWith("!")) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); - } - pattern = _Pattern.fixupPattern(pattern, homedir); - this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path13.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - let foundGlob = false; - const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - this.rootRegExp = new RegExp(_Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? "i" : ""); - this.isImplicitPattern = isImplicitPattern; - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, "/") : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); - } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - if (this.segments[this.segments.length - 1] === "**") { - itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path13.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path13.sep}`; - } - } else { - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + exports2.getOptions = getOptions; + var core14 = __importStar2(require_core()); + function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + matchDirectories: true, + omitBrokenSymbolicLinks: true, + excludeHiddenFiles: false + }; + if (copy) { + if (typeof copy.followSymbolicLinks === "boolean") { + result.followSymbolicLinks = copy.followSymbolicLinks; + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + if (typeof copy.implicitDescendants === "boolean") { + result.implicitDescendants = copy.implicitDescendants; + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } - return internal_match_kind_1.MatchKind.None; - } - /** - * Indicates whether the pattern may match descendants of the specified path - */ - partialMatch(itemPath) { - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); + if (typeof copy.matchDirectories === "boolean") { + result.matchDirectories = copy.matchDirectories; + core14.debug(`matchDirectories '${result.matchDirectories}'`); } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); - } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, "\\\\")).replace(/(\[)(?=[^/]+\])/g, "[[]").replace(/\?/g, "[?]").replace(/\*/g, "[*]"); - } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern, homedir) { - (0, assert_1.default)(pattern, "pattern cannot be empty"); - const literalSegments = new internal_path_1.Path(pattern).segments.map((x) => _Pattern.getLiteral(x)); - (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path13.sep}`)) { - pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path13.sep}`)) { - homedir = homedir || os3.homedir(); - (0, assert_1.default)(homedir, "Unable to determine HOME directory"); - (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = _Pattern.globEscape(homedir) + pattern.substr(1); - } else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith("\\")) { - root += "\\"; - } - pattern = _Pattern.globEscape(root) + pattern.substr(2); - } else if (IS_WINDOWS && (pattern === "\\" || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", "\\"); - if (!root.endsWith("\\")) { - root += "\\"; - } - pattern = _Pattern.globEscape(root) + pattern.substr(1); - } else { - pattern = pathHelper.ensureAbsoluteRoot(_Pattern.globEscape(process.cwd()), pattern); + if (typeof copy.omitBrokenSymbolicLinks === "boolean") { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } - return pathHelper.normalizeSeparators(pattern); - } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ""; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - if (c === "\\" && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } else if (c === "*" || c === "?") { - return ""; - } else if (c === "[" && i + 1 < segment.length) { - let set2 = ""; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - if (c2 === "\\" && !IS_WINDOWS && i2 + 1 < segment.length) { - set2 += segment[++i2]; - continue; - } else if (c2 === "]") { - closed = i2; - break; - } else { - set2 += c2; - } - } - if (closed >= 0) { - if (set2.length > 1) { - return ""; - } - if (set2) { - literal += set2; - i = closed; - continue; - } - } - } - literal += c; + if (typeof copy.excludeHiddenFiles === "boolean") { + result.excludeHiddenFiles = copy.excludeHiddenFiles; + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } - return literal; - } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, "\\$&"); - } - }; - exports2.Pattern = Pattern; - } -}); - -// node_modules/@actions/glob/lib/internal-search-state.js -var require_internal_search_state = __commonJS({ - "node_modules/@actions/glob/lib/internal-search-state.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.SearchState = void 0; - var SearchState = class { - constructor(path13, level) { - this.path = path13; - this.level = level; } - }; - exports2.SearchState = SearchState; + return result; + } } }); -// node_modules/@actions/glob/lib/internal-globber.js -var require_internal_globber = __commonJS({ - "node_modules/@actions/glob/lib/internal-globber.js"(exports2) { +// node_modules/@actions/glob/lib/internal-path-helper.js +var require_internal_path_helper = __commonJS({ + "node_modules/@actions/glob/lib/internal-path-helper.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -48917,258 +48826,128 @@ var require_internal_globber = __commonJS({ return result; }; })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; }; - var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); - }); - }; - } - function settle(resolve6, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); - }, reject); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.dirname = dirname3; + exports2.ensureAbsoluteRoot = ensureAbsoluteRoot; + exports2.hasAbsoluteRoot = hasAbsoluteRoot; + exports2.hasRoot = hasRoot; + exports2.normalizeSeparators = normalizeSeparators; + exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; + var path13 = __importStar2(require("path")); + var assert_1 = __importDefault2(require("assert")); + var IS_WINDOWS = process.platform === "win32"; + function dirname3(p) { + p = safeTrimTrailingSeparator(p); + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; } - }; - var __await2 = exports2 && exports2.__await || function(v) { - return this instanceof __await2 ? (this.v = v, this) : new __await2(v); - }; - var __asyncGenerator2 = exports2 && exports2.__asyncGenerator || function(thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { - return this; - }, i; - function awaitReturn(f) { - return function(v) { - return Promise.resolve(v).then(f, reject); - }; + let result = path13.dirname(p); + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); } - function verb(n, f) { - if (g[n]) { - i[n] = function(v) { - return new Promise(function(a, b) { - q.push([n, v, a, b]) > 1 || resume(n, v); - }); - }; - if (f) i[n] = f(i[n]); - } + return result; + } + function ensureAbsoluteRoot(root, itemPath) { + (0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + (0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + if (hasAbsoluteRoot(itemPath)) { + return itemPath; } - function resume(n, v) { - try { - step(g[n](v)); - } catch (e) { - settle(q[0][3], e); + if (IS_WINDOWS) { + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + if (itemPath.length === 2) { + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } else { + if (!cwd.endsWith("\\")) { + cwd += "\\"; + } + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; } } - function step(r) { - r.value instanceof __await2 ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); - } - function fulfill(value) { - resume("next", value); - } - function reject(value) { - resume("throw", value); + (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { + } else { + root += path13.sep; } - function settle(f, v) { - if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); + return root + itemPath; + } + function hasAbsoluteRoot(itemPath) { + (0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + itemPath = normalizeSeparators(itemPath); + if (IS_WINDOWS) { + return itemPath.startsWith("\\\\") || /^[A-Z]:\\/i.test(itemPath); } - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DefaultGlobber = void 0; - var core14 = __importStar2(require_core()); - var fs13 = __importStar2(require("fs")); - var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path13 = __importStar2(require("path")); - var patternHelper = __importStar2(require_internal_pattern_helper()); - var internal_match_kind_1 = require_internal_match_kind(); - var internal_pattern_1 = require_internal_pattern(); - var internal_search_state_1 = require_internal_search_state(); - var IS_WINDOWS = process.platform === "win32"; - var DefaultGlobber = class _DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); + return itemPath.startsWith("/"); + } + function hasRoot(itemPath) { + (0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`); + itemPath = normalizeSeparators(itemPath); + if (IS_WINDOWS) { + return itemPath.startsWith("\\") || /^[A-Z]:/i.test(itemPath); } - getSearchPaths() { - return this.searchPaths.slice(); + return itemPath.startsWith("/"); + } + function normalizeSeparators(p) { + p = p || ""; + if (IS_WINDOWS) { + p = p.replace(/\//g, "\\"); + const isUnc = /^\\\\+[^\\]/.test(p); + return (isUnc ? "\\" : "") + p.replace(/\\\\+/g, "\\"); } - glob() { - return __awaiter2(this, void 0, void 0, function* () { - var _a, e_1, _b, _c; - const result = []; - try { - for (var _d = true, _e = __asyncValues2(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const itemPath = _c; - result.push(itemPath); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield _b.call(_e); - } finally { - if (e_1) throw e_1.error; - } - } - return result; - }); + return p.replace(/\/\/+/g, "/"); + } + function safeTrimTrailingSeparator(p) { + if (!p) { + return ""; } - globGenerator() { - return __asyncGenerator2(this, arguments, function* globGenerator_1() { - const options = globOptionsHelper.getOptions(this.options); - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== "**")) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat("**"))); - } - } - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); - try { - yield __await2(fs13.promises.lstat(searchPath)); - } catch (err) { - if (err.code === "ENOENT") { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - const traversalChain = []; - while (stack.length) { - const item = stack.pop(); - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - const stats = yield __await2( - _DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - if (!stats) { - continue; - } - if (options.excludeHiddenFiles && path13.basename(item.path).match(/^\./)) { - continue; - } - if (stats.isDirectory()) { - if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { - yield yield __await2(item.path); - } else if (!partialMatch) { - continue; - } - const childLevel = item.level + 1; - const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await2(item.path); - } - } - }); + p = normalizeSeparators(p); + if (!p.endsWith(path13.sep)) { + return p; } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter2(this, void 0, void 0, function* () { - const result = new _DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, "\n"); - patterns = patterns.replace(/\r/g, "\n"); - } - const lines = patterns.split("\n").map((x) => x.trim()); - for (const line of lines) { - if (!line || line.startsWith("#")) { - continue; - } else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); + if (p === path13.sep) { + return p; } - static stat(item, options, traversalChain) { - return __awaiter2(this, void 0, void 0, function* () { - let stats; - if (options.followSymbolicLinks) { - try { - stats = yield fs13.promises.stat(item.path); - } catch (err) { - if (err.code === "ENOENT") { - if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); - return void 0; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } else { - stats = yield fs13.promises.lstat(item.path); - } - if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs13.promises.realpath(item.path); - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return void 0; - } - traversalChain.push(realPath); - } - return stats; - }); + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; } - }; - exports2.DefaultGlobber = DefaultGlobber; + return p.substr(0, p.length - 1); + } } }); -// node_modules/@actions/glob/lib/internal-hash-files.js -var require_internal_hash_files = __commonJS({ - "node_modules/@actions/glob/lib/internal-hash-files.js"(exports2) { +// node_modules/@actions/glob/lib/internal-match-kind.js +var require_internal_match_kind = __commonJS({ + "node_modules/@actions/glob/lib/internal-match-kind.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.MatchKind = void 0; + var MatchKind; + (function(MatchKind2) { + MatchKind2[MatchKind2["None"] = 0] = "None"; + MatchKind2[MatchKind2["Directory"] = 1] = "Directory"; + MatchKind2[MatchKind2["File"] = 2] = "File"; + MatchKind2[MatchKind2["All"] = 3] = "All"; + })(MatchKind || (exports2.MatchKind = MatchKind = {})); + } +}); + +// node_modules/@actions/glob/lib/internal-pattern-helper.js +var require_internal_pattern_helper = __commonJS({ + "node_modules/@actions/glob/lib/internal-pattern-helper.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -49207,1360 +48986,854 @@ var require_internal_hash_files = __commonJS({ return result; }; })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getSearchPaths = getSearchPaths; + exports2.match = match; + exports2.partialMatch = partialMatch; + var pathHelper = __importStar2(require_internal_path_helper()); + var internal_match_kind_1 = require_internal_match_kind(); + var IS_WINDOWS = process.platform === "win32"; + function getSearchPaths(patterns) { + patterns = patterns.filter((x) => !x.negate); + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; + searchPathMap[key] = "candidate"; } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } + const result = []; + for (const pattern of patterns) { + const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; + if (searchPathMap[key] === "included") { + continue; } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; } + tempKey = parent; + parent = pathHelper.dirname(tempKey); } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = "included"; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); - }); - }; - } - function settle(resolve6, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); - }, reject); } - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.hashFiles = hashFiles; - var crypto2 = __importStar2(require("crypto")); - var core14 = __importStar2(require_core()); - var fs13 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); - var util = __importStar2(require("util")); - var path13 = __importStar2(require("path")); - function hashFiles(globber_1, currentWorkspace_1) { - return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { - var _a, e_1, _b, _c; - var _d; - const writeDelegate = verbose ? core14.info : core14.debug; - let hasMatch = false; - const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); - const result = crypto2.createHash("sha256"); - let count = 0; - try { - for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { - _c = _g.value; - _e = false; - const file = _c; - writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path13.sep}`)) { - writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); - continue; - } - if (fs13.statSync(file).isDirectory()) { - writeDelegate(`Skip directory '${file}'.`); - continue; - } - const hash2 = crypto2.createHash("sha256"); - const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs13.createReadStream(file), hash2); - result.write(hash2.digest()); - count++; - if (!hasMatch) { - hasMatch = true; - } - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); - } finally { - if (e_1) throw e_1.error; - } - } - result.end(); - if (hasMatch) { - writeDelegate(`Found ${count} files to hash.`); - return result.digest("hex"); + return result; + } + function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); } else { - writeDelegate(`No matches found for glob`); - return ""; + result |= pattern.match(itemPath); } - }); + } + return result; + } + function partialMatch(patterns, itemPath) { + return patterns.some((x) => !x.negate && x.partialMatch(itemPath)); } } }); -// node_modules/@actions/glob/lib/glob.js -var require_glob = __commonJS({ - "node_modules/@actions/glob/lib/glob.js"(exports2) { - "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); +// node_modules/concat-map/index.js +var require_concat_map = __commonJS({ + "node_modules/concat-map/index.js"(exports2, module2) { + module2.exports = function(xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + return res; + }; + var isArray = Array.isArray || function(xs) { + return Object.prototype.toString.call(xs) === "[object Array]"; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.create = create; - exports2.hashFiles = hashFiles; - var internal_globber_1 = require_internal_globber(); - var internal_hash_files_1 = require_internal_hash_files(); - function create(patterns, options) { - return __awaiter2(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); - } - function hashFiles(patterns_1) { - return __awaiter2(this, arguments, void 0, function* (patterns, currentWorkspace = "", options, verbose = false) { - let followSymbolicLinks = true; - if (options && typeof options.followSymbolicLinks === "boolean") { - followSymbolicLinks = options.followSymbolicLinks; - } - const globber = yield create(patterns, { followSymbolicLinks }); - return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose); - }); - } } }); -// node_modules/@actions/cache/node_modules/semver/semver.js -var require_semver3 = __commonJS({ - "node_modules/@actions/cache/node_modules/semver/semver.js"(exports2, module2) { - exports2 = module2.exports = SemVer; - var debug6; - if (typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug6 = function() { - var args = Array.prototype.slice.call(arguments, 0); - args.unshift("SEMVER"); - console.log.apply(console, args); - }; - } else { - debug6 = function() { +// node_modules/balanced-match/index.js +var require_balanced_match = __commonJS({ + "node_modules/balanced-match/index.js"(exports2, module2) { + "use strict"; + module2.exports = balanced; + function balanced(a, b, str2) { + if (a instanceof RegExp) a = maybeMatch(a, str2); + if (b instanceof RegExp) b = maybeMatch(b, str2); + var r = range(a, b, str2); + return r && { + start: r[0], + end: r[1], + pre: str2.slice(0, r[0]), + body: str2.slice(r[0] + a.length, r[1]), + post: str2.slice(r[1] + b.length) }; } - exports2.SEMVER_SPEC_VERSION = "2.0.0"; - var MAX_LENGTH = 256; - var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ - 9007199254740991; - var MAX_SAFE_COMPONENT_LENGTH = 16; - var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6; - var re = exports2.re = []; - var safeRe = exports2.safeRe = []; - var src = exports2.src = []; - var t = exports2.tokens = {}; - var R = 0; - function tok(n) { - t[n] = R++; + function maybeMatch(reg, str2) { + var m = str2.match(reg); + return m ? m[0] : null; } - var LETTERDASHNUMBER = "[a-zA-Z0-9-]"; - var safeRegexReplacements = [ - ["\\s", 1], - ["\\d", MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] - ]; - function makeSafeRe(value) { - for (var i2 = 0; i2 < safeRegexReplacements.length; i2++) { - var token = safeRegexReplacements[i2][0]; - var max = safeRegexReplacements[i2][1]; - value = value.split(token + "*").join(token + "{0," + max + "}").split(token + "+").join(token + "{1," + max + "}"); + balanced.range = range; + function range(a, b, str2) { + var begs, beg, left, right, result; + var ai = str2.indexOf(a); + var bi = str2.indexOf(b, ai + 1); + var i = ai; + if (ai >= 0 && bi > 0) { + begs = []; + left = str2.length; + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str2.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [begs.pop(), bi]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + bi = str2.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length) { + result = [left, right]; + } } - return value; + return result; } - tok("NUMERICIDENTIFIER"); - src[t.NUMERICIDENTIFIER] = "0|[1-9]\\d*"; - tok("NUMERICIDENTIFIERLOOSE"); - src[t.NUMERICIDENTIFIERLOOSE] = "\\d+"; - tok("NONNUMERICIDENTIFIER"); - src[t.NONNUMERICIDENTIFIER] = "\\d*[a-zA-Z-]" + LETTERDASHNUMBER + "*"; - tok("MAINVERSION"); - src[t.MAINVERSION] = "(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")"; - tok("MAINVERSIONLOOSE"); - src[t.MAINVERSIONLOOSE] = "(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")"; - tok("PRERELEASEIDENTIFIER"); - src[t.PRERELEASEIDENTIFIER] = "(?:" + src[t.NUMERICIDENTIFIER] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; - tok("PRERELEASEIDENTIFIERLOOSE"); - src[t.PRERELEASEIDENTIFIERLOOSE] = "(?:" + src[t.NUMERICIDENTIFIERLOOSE] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; - tok("PRERELEASE"); - src[t.PRERELEASE] = "(?:-(" + src[t.PRERELEASEIDENTIFIER] + "(?:\\." + src[t.PRERELEASEIDENTIFIER] + ")*))"; - tok("PRERELEASELOOSE"); - src[t.PRERELEASELOOSE] = "(?:-?(" + src[t.PRERELEASEIDENTIFIERLOOSE] + "(?:\\." + src[t.PRERELEASEIDENTIFIERLOOSE] + ")*))"; - tok("BUILDIDENTIFIER"); - src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + "+"; - tok("BUILD"); - src[t.BUILD] = "(?:\\+(" + src[t.BUILDIDENTIFIER] + "(?:\\." + src[t.BUILDIDENTIFIER] + ")*))"; - tok("FULL"); - tok("FULLPLAIN"); - src[t.FULLPLAIN] = "v?" + src[t.MAINVERSION] + src[t.PRERELEASE] + "?" + src[t.BUILD] + "?"; - src[t.FULL] = "^" + src[t.FULLPLAIN] + "$"; - tok("LOOSEPLAIN"); - src[t.LOOSEPLAIN] = "[v=\\s]*" + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + "?" + src[t.BUILD] + "?"; - tok("LOOSE"); - src[t.LOOSE] = "^" + src[t.LOOSEPLAIN] + "$"; - tok("GTLT"); - src[t.GTLT] = "((?:<|>)?=?)"; - tok("XRANGEIDENTIFIERLOOSE"); - src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + "|x|X|\\*"; - tok("XRANGEIDENTIFIER"); - src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + "|x|X|\\*"; - tok("XRANGEPLAIN"); - src[t.XRANGEPLAIN] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:" + src[t.PRERELEASE] + ")?" + src[t.BUILD] + "?)?)?"; - tok("XRANGEPLAINLOOSE"); - src[t.XRANGEPLAINLOOSE] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:" + src[t.PRERELEASELOOSE] + ")?" + src[t.BUILD] + "?)?)?"; - tok("XRANGE"); - src[t.XRANGE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAIN] + "$"; - tok("XRANGELOOSE"); - src[t.XRANGELOOSE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAINLOOSE] + "$"; - tok("COERCE"); - src[t.COERCE] = "(^|[^\\d])(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "})(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:$|[^\\d])"; - tok("COERCERTL"); - re[t.COERCERTL] = new RegExp(src[t.COERCE], "g"); - safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), "g"); - tok("LONETILDE"); - src[t.LONETILDE] = "(?:~>?)"; - tok("TILDETRIM"); - src[t.TILDETRIM] = "(\\s*)" + src[t.LONETILDE] + "\\s+"; - re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], "g"); - safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), "g"); - var tildeTrimReplace = "$1~"; - tok("TILDE"); - src[t.TILDE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAIN] + "$"; - tok("TILDELOOSE"); - src[t.TILDELOOSE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + "$"; - tok("LONECARET"); - src[t.LONECARET] = "(?:\\^)"; - tok("CARETTRIM"); - src[t.CARETTRIM] = "(\\s*)" + src[t.LONECARET] + "\\s+"; - re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], "g"); - safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), "g"); - var caretTrimReplace = "$1^"; - tok("CARET"); - src[t.CARET] = "^" + src[t.LONECARET] + src[t.XRANGEPLAIN] + "$"; - tok("CARETLOOSE"); - src[t.CARETLOOSE] = "^" + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + "$"; - tok("COMPARATORLOOSE"); - src[t.COMPARATORLOOSE] = "^" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + ")$|^$"; - tok("COMPARATOR"); - src[t.COMPARATOR] = "^" + src[t.GTLT] + "\\s*(" + src[t.FULLPLAIN] + ")$|^$"; - tok("COMPARATORTRIM"); - src[t.COMPARATORTRIM] = "(\\s*)" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + "|" + src[t.XRANGEPLAIN] + ")"; - re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], "g"); - safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), "g"); - var comparatorTrimReplace = "$1$2$3"; - tok("HYPHENRANGE"); - src[t.HYPHENRANGE] = "^\\s*(" + src[t.XRANGEPLAIN] + ")\\s+-\\s+(" + src[t.XRANGEPLAIN] + ")\\s*$"; - tok("HYPHENRANGELOOSE"); - src[t.HYPHENRANGELOOSE] = "^\\s*(" + src[t.XRANGEPLAINLOOSE] + ")\\s+-\\s+(" + src[t.XRANGEPLAINLOOSE] + ")\\s*$"; - tok("STAR"); - src[t.STAR] = "(<|>)?=?\\s*\\*"; - for (i = 0; i < R; i++) { - debug6(i, src[i]); - if (!re[i]) { - re[i] = new RegExp(src[i]); - safeRe[i] = new RegExp(makeSafeRe(src[i])); - } + } +}); + +// node_modules/brace-expansion/index.js +var require_brace_expansion = __commonJS({ + "node_modules/brace-expansion/index.js"(exports2, module2) { + var concatMap = require_concat_map(); + var balanced = require_balanced_match(); + module2.exports = expandTop; + var escSlash = "\0SLASH" + Math.random() + "\0"; + var escOpen = "\0OPEN" + Math.random() + "\0"; + var escClose = "\0CLOSE" + Math.random() + "\0"; + var escComma = "\0COMMA" + Math.random() + "\0"; + var escPeriod = "\0PERIOD" + Math.random() + "\0"; + function numeric(str2) { + return parseInt(str2, 10) == str2 ? parseInt(str2, 10) : str2.charCodeAt(0); } - var i; - exports2.parse = parse2; - function parse2(version, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (version instanceof SemVer) { - return version; - } - if (typeof version !== "string") { - return null; - } - if (version.length > MAX_LENGTH) { - return null; - } - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; - if (!r.test(version)) { - return null; + function escapeBraces(str2) { + return str2.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); + } + function unescapeBraces(str2) { + return str2.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); + } + function parseCommaParts(str2) { + if (!str2) + return [""]; + var parts = []; + var m = balanced("{", "}", str2); + if (!m) + return str2.split(","); + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(","); + p[p.length - 1] += "{" + body + "}"; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); } - try { - return new SemVer(version, options); - } catch (er) { - return null; + parts.push.apply(parts, p); + return parts; + } + function expandTop(str2) { + if (!str2) + return []; + if (str2.substr(0, 2) === "{}") { + str2 = "\\{\\}" + str2.substr(2); } + return expand2(escapeBraces(str2), true).map(unescapeBraces); } - exports2.valid = valid3; - function valid3(version, options) { - var v = parse2(version, options); - return v ? v.version : null; + function embrace(str2) { + return "{" + str2 + "}"; } - exports2.clean = clean3; - function clean3(version, options) { - var s = parse2(version.trim().replace(/^[=v]+/, ""), options); - return s ? s.version : null; + function isPadded(el) { + return /^-?0\d/.test(el); } - exports2.SemVer = SemVer; - function SemVer(version, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version; - } else { - version = version.version; + function lte(i, y) { + return i <= y; + } + function gte6(i, y) { + return i >= y; + } + function expand2(str2, isTop) { + var expansions = []; + var m = balanced("{", "}", str2); + if (!m || /\$$/.test(m.pre)) return [str2]; + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(",") >= 0; + if (!isSequence && !isOptions) { + if (m.post.match(/,(?!,).*\}/)) { + str2 = m.pre + "{" + m.body + escClose + m.post; + return expand2(str2); } - } else if (typeof version !== "string") { - throw new TypeError("Invalid Version: " + version); - } - if (version.length > MAX_LENGTH) { - throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); - } - if (!(this instanceof SemVer)) { - return new SemVer(version, options); - } - debug6("SemVer", version, options); - this.options = options; - this.loose = !!options.loose; - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); - if (!m) { - throw new TypeError("Invalid Version: " + version); - } - this.raw = version; - this.major = +m[1]; - this.minor = +m[2]; - this.patch = +m[3]; - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError("Invalid major version"); - } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError("Invalid minor version"); - } - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError("Invalid patch version"); + return [str2]; } - if (!m[4]) { - this.prerelease = []; + var n; + if (isSequence) { + n = m.body.split(/\.\./); } else { - this.prerelease = m[4].split(".").map(function(id) { - if (/^[0-9]+$/.test(id)) { - var num = +id; - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num; + n = parseCommaParts(m.body); + if (n.length === 1) { + n = expand2(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length ? expand2(m.post, false) : [""]; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + var pre = m.pre; + var post = m.post.length ? expand2(m.post, false) : [""]; + var N; + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length); + var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte6; + } + var pad = n.some(isPadded); + N = []; + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === "\\") + c = ""; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join("0"); + if (i < 0) + c = "-" + z + c.slice(1); + else + c = z + c; + } } } - return id; + N.push(c); + } + } else { + N = concatMap(n, function(el) { + return expand2(el, false); }); } - this.build = m[5] ? m[5].split(".") : []; - this.format(); + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + return expansions; } - SemVer.prototype.format = function() { - this.version = this.major + "." + this.minor + "." + this.patch; - if (this.prerelease.length) { - this.version += "-" + this.prerelease.join("."); + } +}); + +// node_modules/minimatch/minimatch.js +var require_minimatch = __commonJS({ + "node_modules/minimatch/minimatch.js"(exports2, module2) { + module2.exports = minimatch; + minimatch.Minimatch = Minimatch; + var path13 = (function() { + try { + return require("path"); + } catch (e) { } - return this.version; + })() || { + sep: "/" }; - SemVer.prototype.toString = function() { - return this.version; + minimatch.sep = path13.sep; + var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; + var expand2 = require_brace_expansion(); + var plTypes = { + "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, + "?": { open: "(?:", close: ")?" }, + "+": { open: "(?:", close: ")+" }, + "*": { open: "(?:", close: ")*" }, + "@": { open: "(?:", close: ")" } }; - SemVer.prototype.compare = function(other) { - debug6("SemVer.compare", this.version, this.options, other); - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); + var qmark = "[^/]"; + var star = qmark + "*?"; + var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; + var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; + var reSpecials = charSet("().*{}+?[]^$\\!"); + function charSet(s) { + return s.split("").reduce(function(set2, c) { + set2[c] = true; + return set2; + }, {}); + } + var slashSplit = /\/+/; + minimatch.filter = filter; + function filter(pattern, options) { + options = options || {}; + return function(p, i, list) { + return minimatch(p, pattern, options); + }; + } + function ext(a, b) { + b = b || {}; + var t = {}; + Object.keys(a).forEach(function(k) { + t[k] = a[k]; + }); + Object.keys(b).forEach(function(k) { + t[k] = b[k]; + }); + return t; + } + minimatch.defaults = function(def) { + if (!def || typeof def !== "object" || !Object.keys(def).length) { + return minimatch; } - return this.compareMain(other) || this.comparePre(other); + var orig = minimatch; + var m = function minimatch2(p, pattern, options) { + return orig(p, pattern, ext(def, options)); + }; + m.Minimatch = function Minimatch2(pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)); + }; + m.Minimatch.defaults = function defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + }; + m.filter = function filter2(pattern, options) { + return orig.filter(pattern, ext(def, options)); + }; + m.defaults = function defaults(options) { + return orig.defaults(ext(def, options)); + }; + m.makeRe = function makeRe2(pattern, options) { + return orig.makeRe(pattern, ext(def, options)); + }; + m.braceExpand = function braceExpand2(pattern, options) { + return orig.braceExpand(pattern, ext(def, options)); + }; + m.match = function(list, pattern, options) { + return orig.match(list, pattern, ext(def, options)); + }; + return m; }; - SemVer.prototype.compareMain = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); - } - return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); + Minimatch.defaults = function(def) { + return minimatch.defaults(def).Minimatch; }; - SemVer.prototype.comparePre = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); + function minimatch(p, pattern, options) { + assertValidPattern(pattern); + if (!options) options = {}; + if (!options.nocomment && pattern.charAt(0) === "#") { + return false; } - if (this.prerelease.length && !other.prerelease.length) { - return -1; - } else if (!this.prerelease.length && other.prerelease.length) { - return 1; - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0; + return new Minimatch(pattern, options).match(p); + } + function Minimatch(pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options); } - var i2 = 0; - do { - var a = this.prerelease[i2]; - var b = other.prerelease[i2]; - debug6("prerelease compare", i2, a, b); - if (a === void 0 && b === void 0) { - return 0; - } else if (b === void 0) { - return 1; - } else if (a === void 0) { - return -1; - } else if (a === b) { - continue; - } else { - return compareIdentifiers(a, b); - } - } while (++i2); - }; - SemVer.prototype.compareBuild = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); - } - var i2 = 0; - do { - var a = this.build[i2]; - var b = other.build[i2]; - debug6("prerelease compare", i2, a, b); - if (a === void 0 && b === void 0) { - return 0; - } else if (b === void 0) { - return 1; - } else if (a === void 0) { - return -1; - } else if (a === b) { - continue; - } else { - return compareIdentifiers(a, b); - } - } while (++i2); - }; - SemVer.prototype.inc = function(release2, identifier) { - switch (release2) { - case "premajor": - this.prerelease.length = 0; - this.patch = 0; - this.minor = 0; - this.major++; - this.inc("pre", identifier); - break; - case "preminor": - this.prerelease.length = 0; - this.patch = 0; - this.minor++; - this.inc("pre", identifier); - break; - case "prepatch": - this.prerelease.length = 0; - this.inc("patch", identifier); - this.inc("pre", identifier); - break; - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case "prerelease": - if (this.prerelease.length === 0) { - this.inc("patch", identifier); - } - this.inc("pre", identifier); - break; - case "major": - if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { - this.major++; - } - this.minor = 0; - this.patch = 0; - this.prerelease = []; - break; - case "minor": - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++; - } - this.patch = 0; - this.prerelease = []; - break; - case "patch": - if (this.prerelease.length === 0) { - this.patch++; - } - this.prerelease = []; - break; - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case "pre": - if (this.prerelease.length === 0) { - this.prerelease = [0]; - } else { - var i2 = this.prerelease.length; - while (--i2 >= 0) { - if (typeof this.prerelease[i2] === "number") { - this.prerelease[i2]++; - i2 = -2; - } - } - if (i2 === -1) { - this.prerelease.push(0); - } - } - if (identifier) { - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0]; - } - } else { - this.prerelease = [identifier, 0]; - } - } - break; - default: - throw new Error("invalid increment argument: " + release2); - } - this.format(); - this.raw = this.version; - return this; - }; - exports2.inc = inc; - function inc(version, release2, loose, identifier) { - if (typeof loose === "string") { - identifier = loose; - loose = void 0; - } - try { - return new SemVer(version, loose).inc(release2, identifier).version; - } catch (er) { - return null; + assertValidPattern(pattern); + if (!options) options = {}; + pattern = pattern.trim(); + if (!options.allowWindowsEscape && path13.sep !== "/") { + pattern = pattern.split(path13.sep).join("/"); } + this.options = options; + this.set = []; + this.pattern = pattern; + this.regexp = null; + this.negate = false; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.make(); } - exports2.diff = diff; - function diff(version1, version2) { - if (eq(version1, version2)) { - return null; - } else { - var v1 = parse2(version1); - var v2 = parse2(version2); - var prefix = ""; - if (v1.prerelease.length || v2.prerelease.length) { - prefix = "pre"; - var defaultResult = "prerelease"; - } - for (var key in v1) { - if (key === "major" || key === "minor" || key === "patch") { - if (v1[key] !== v2[key]) { - return prefix + key; - } - } - } - return defaultResult; + Minimatch.prototype.debug = function() { + }; + Minimatch.prototype.make = make; + function make() { + var pattern = this.pattern; + var options = this.options; + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true; + return; } - } - exports2.compareIdentifiers = compareIdentifiers; - var numeric = /^[0-9]+$/; - function compareIdentifiers(a, b) { - var anum = numeric.test(a); - var bnum = numeric.test(b); - if (anum && bnum) { - a = +a; - b = +b; + if (!pattern) { + this.empty = true; + return; } - return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; - } - exports2.rcompareIdentifiers = rcompareIdentifiers; - function rcompareIdentifiers(a, b) { - return compareIdentifiers(b, a); - } - exports2.major = major; - function major(a, loose) { - return new SemVer(a, loose).major; - } - exports2.minor = minor; - function minor(a, loose) { - return new SemVer(a, loose).minor; - } - exports2.patch = patch; - function patch(a, loose) { - return new SemVer(a, loose).patch; - } - exports2.compare = compare3; - function compare3(a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)); - } - exports2.compareLoose = compareLoose; - function compareLoose(a, b) { - return compare3(a, b, true); - } - exports2.compareBuild = compareBuild; - function compareBuild(a, b, loose) { - var versionA = new SemVer(a, loose); - var versionB = new SemVer(b, loose); - return versionA.compare(versionB) || versionA.compareBuild(versionB); - } - exports2.rcompare = rcompare; - function rcompare(a, b, loose) { - return compare3(b, a, loose); - } - exports2.sort = sort; - function sort(list, loose) { - return list.sort(function(a, b) { - return exports2.compareBuild(a, b, loose); + this.parseNegate(); + var set2 = this.globSet = this.braceExpand(); + if (options.debug) this.debug = function debug6() { + console.error.apply(console, arguments); + }; + this.debug(this.pattern, set2); + set2 = this.globParts = set2.map(function(s) { + return s.split(slashSplit); }); - } - exports2.rsort = rsort; - function rsort(list, loose) { - return list.sort(function(a, b) { - return exports2.compareBuild(b, a, loose); + this.debug(this.pattern, set2); + set2 = set2.map(function(s, si, set3) { + return s.map(this.parse, this); + }, this); + this.debug(this.pattern, set2); + set2 = set2.filter(function(s) { + return s.indexOf(false) === -1; }); + this.debug(this.pattern, set2); + this.set = set2; } - exports2.gt = gt; - function gt(a, b, loose) { - return compare3(a, b, loose) > 0; - } - exports2.lt = lt; - function lt(a, b, loose) { - return compare3(a, b, loose) < 0; - } - exports2.eq = eq; - function eq(a, b, loose) { - return compare3(a, b, loose) === 0; - } - exports2.neq = neq; - function neq(a, b, loose) { - return compare3(a, b, loose) !== 0; - } - exports2.gte = gte6; - function gte6(a, b, loose) { - return compare3(a, b, loose) >= 0; - } - exports2.lte = lte; - function lte(a, b, loose) { - return compare3(a, b, loose) <= 0; - } - exports2.cmp = cmp; - function cmp(a, op, b, loose) { - switch (op) { - case "===": - if (typeof a === "object") - a = a.version; - if (typeof b === "object") - b = b.version; - return a === b; - case "!==": - if (typeof a === "object") - a = a.version; - if (typeof b === "object") - b = b.version; - return a !== b; - case "": - case "=": - case "==": - return eq(a, b, loose); - case "!=": - return neq(a, b, loose); - case ">": - return gt(a, b, loose); - case ">=": - return gte6(a, b, loose); - case "<": - return lt(a, b, loose); - case "<=": - return lte(a, b, loose); - default: - throw new TypeError("Invalid operator: " + op); + Minimatch.prototype.parseNegate = parseNegate; + function parseNegate() { + var pattern = this.pattern; + var negate2 = false; + var options = this.options; + var negateOffset = 0; + if (options.nonegate) return; + for (var i = 0, l = pattern.length; i < l && pattern.charAt(i) === "!"; i++) { + negate2 = !negate2; + negateOffset++; } + if (negateOffset) this.pattern = pattern.substr(negateOffset); + this.negate = negate2; } - exports2.Comparator = Comparator; - function Comparator(comp, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp; + minimatch.braceExpand = function(pattern, options) { + return braceExpand(pattern, options); + }; + Minimatch.prototype.braceExpand = braceExpand; + function braceExpand(pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options; } else { - comp = comp.value; + options = {}; } } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options); - } - comp = comp.trim().split(/\s+/).join(" "); - debug6("comparator", comp, options); - this.options = options; - this.loose = !!options.loose; - this.parse(comp); - if (this.semver === ANY) { - this.value = ""; - } else { - this.value = this.operator + this.semver.version; + pattern = typeof pattern === "undefined" ? this.pattern : pattern; + assertValidPattern(pattern); + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + return [pattern]; } - debug6("comp", this); + return expand2(pattern); } - var ANY = {}; - Comparator.prototype.parse = function(comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; - var m = comp.match(r); - if (!m) { - throw new TypeError("Invalid comparator: " + comp); - } - this.operator = m[1] !== void 0 ? m[1] : ""; - if (this.operator === "=") { - this.operator = ""; - } - if (!m[2]) { - this.semver = ANY; - } else { - this.semver = new SemVer(m[2], this.options.loose); - } - }; - Comparator.prototype.toString = function() { - return this.value; - }; - Comparator.prototype.test = function(version) { - debug6("Comparator.test", version, this.options.loose); - if (this.semver === ANY || version === ANY) { - return true; + var MAX_PATTERN_LENGTH = 1024 * 64; + var assertValidPattern = function(pattern) { + if (typeof pattern !== "string") { + throw new TypeError("invalid pattern"); } - if (typeof version === "string") { - try { - version = new SemVer(version, this.options); - } catch (er) { - return false; - } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError("pattern is too long"); } - return cmp(version, this.operator, this.semver, this.options); }; - Comparator.prototype.intersects = function(comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError("a Comparator is required"); - } - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; + Minimatch.prototype.parse = parse2; + var SUBPARSE = {}; + function parse2(pattern, isSub) { + assertValidPattern(pattern); + var options = this.options; + if (pattern === "**") { + if (!options.noglobstar) + return GLOBSTAR; + else + pattern = "*"; } - var rangeTmp; - if (this.operator === "") { - if (this.value === "") { - return true; - } - rangeTmp = new Range2(comp.value, options); - return satisfies2(this.value, rangeTmp, options); - } else if (comp.operator === "") { - if (comp.value === "") { - return true; + if (pattern === "") return ""; + var re = ""; + var hasMagic = !!options.nocase; + var escaping = false; + var patternListStack = []; + var negativeLists = []; + var stateChar; + var inClass = false; + var reClassStart = -1; + var classStart = -1; + var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; + var self2 = this; + function clearStateChar() { + if (stateChar) { + switch (stateChar) { + case "*": + re += star; + hasMagic = true; + break; + case "?": + re += qmark; + hasMagic = true; + break; + default: + re += "\\" + stateChar; + break; + } + self2.debug("clearStateChar %j %j", stateChar, re); + stateChar = false; } - rangeTmp = new Range2(this.value, options); - return satisfies2(comp.semver, rangeTmp, options); - } - var sameDirectionIncreasing = (this.operator === ">=" || this.operator === ">") && (comp.operator === ">=" || comp.operator === ">"); - var sameDirectionDecreasing = (this.operator === "<=" || this.operator === "<") && (comp.operator === "<=" || comp.operator === "<"); - var sameSemVer = this.semver.version === comp.semver.version; - var differentDirectionsInclusive = (this.operator === ">=" || this.operator === "<=") && (comp.operator === ">=" || comp.operator === "<="); - var oppositeDirectionsLessThan = cmp(this.semver, "<", comp.semver, options) && ((this.operator === ">=" || this.operator === ">") && (comp.operator === "<=" || comp.operator === "<")); - var oppositeDirectionsGreaterThan = cmp(this.semver, ">", comp.semver, options) && ((this.operator === "<=" || this.operator === "<") && (comp.operator === ">=" || comp.operator === ">")); - return sameDirectionIncreasing || sameDirectionDecreasing || sameSemVer && differentDirectionsInclusive || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; - }; - exports2.Range = Range2; - function Range2(range, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; } - if (range instanceof Range2) { - if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { - return range; - } else { - return new Range2(range.raw, options); + for (var i = 0, len = pattern.length, c; i < len && (c = pattern.charAt(i)); i++) { + this.debug("%s %s %s %j", pattern, i, re, c); + if (escaping && reSpecials[c]) { + re += "\\" + c; + escaping = false; + continue; } - } - if (range instanceof Comparator) { - return new Range2(range.value, options); - } - if (!(this instanceof Range2)) { - return new Range2(range, options); - } - this.options = options; - this.loose = !!options.loose; - this.includePrerelease = !!options.includePrerelease; - this.raw = range.trim().split(/\s+/).join(" "); - this.set = this.raw.split("||").map(function(range2) { - return this.parseRange(range2.trim()); - }, this).filter(function(c) { - return c.length; - }); - if (!this.set.length) { - throw new TypeError("Invalid SemVer Range: " + this.raw); - } - this.format(); - } - Range2.prototype.format = function() { - this.range = this.set.map(function(comps) { - return comps.join(" ").trim(); - }).join("||").trim(); - return this.range; - }; - Range2.prototype.toString = function() { - return this.range; - }; - Range2.prototype.parseRange = function(range) { - var loose = this.options.loose; - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]; - range = range.replace(hr, hyphenReplace); - debug6("hyphen replace", range); - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace); - debug6("comparator trim", range, safeRe[t.COMPARATORTRIM]); - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace); - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace); - range = range.split(/\s+/).join(" "); - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; - var set2 = range.split(" ").map(function(comp) { - return parseComparator(comp, this.options); - }, this).join(" ").split(/\s+/); - if (this.options.loose) { - set2 = set2.filter(function(comp) { - return !!comp.match(compRe); - }); - } - set2 = set2.map(function(comp) { - return new Comparator(comp, this.options); - }, this); - return set2; - }; - Range2.prototype.intersects = function(range, options) { - if (!(range instanceof Range2)) { - throw new TypeError("a Range is required"); - } - return this.set.some(function(thisComparators) { - return isSatisfiable(thisComparators, options) && range.set.some(function(rangeComparators) { - return isSatisfiable(rangeComparators, options) && thisComparators.every(function(thisComparator) { - return rangeComparators.every(function(rangeComparator) { - return thisComparator.intersects(rangeComparator, options); - }); - }); - }); - }); - }; - function isSatisfiable(comparators, options) { - var result = true; - var remainingComparators = comparators.slice(); - var testComparator = remainingComparators.pop(); - while (result && remainingComparators.length) { - result = remainingComparators.every(function(otherComparator) { - return testComparator.intersects(otherComparator, options); - }); - testComparator = remainingComparators.pop(); - } - return result; - } - exports2.toComparators = toComparators; - function toComparators(range, options) { - return new Range2(range, options).set.map(function(comp) { - return comp.map(function(c) { - return c.value; - }).join(" ").trim().split(" "); - }); - } - function parseComparator(comp, options) { - debug6("comp", comp, options); - comp = replaceCarets(comp, options); - debug6("caret", comp); - comp = replaceTildes(comp, options); - debug6("tildes", comp); - comp = replaceXRanges(comp, options); - debug6("xrange", comp); - comp = replaceStars(comp, options); - debug6("stars", comp); - return comp; - } - function isX(id) { - return !id || id.toLowerCase() === "x" || id === "*"; - } - function replaceTildes(comp, options) { - return comp.trim().split(/\s+/).map(function(comp2) { - return replaceTilde(comp2, options); - }).join(" "); - } - function replaceTilde(comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]; - return comp.replace(r, function(_, M, m, p, pr) { - debug6("tilde", comp, _, M, m, p, pr); - var ret; - if (isX(M)) { - ret = ""; - } else if (isX(m)) { - ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; - } else if (isX(p)) { - ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; - } else if (pr) { - debug6("replaceTilde pr", pr); - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; - } else { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; - } - debug6("tilde return", ret); - return ret; - }); - } - function replaceCarets(comp, options) { - return comp.trim().split(/\s+/).map(function(comp2) { - return replaceCaret(comp2, options); - }).join(" "); - } - function replaceCaret(comp, options) { - debug6("caret", comp, options); - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]; - return comp.replace(r, function(_, M, m, p, pr) { - debug6("caret", comp, _, M, m, p, pr); - var ret; - if (isX(M)) { - ret = ""; - } else if (isX(m)) { - ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; - } else if (isX(p)) { - if (M === "0") { - ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; - } else { - ret = ">=" + M + "." + m + ".0 <" + (+M + 1) + ".0.0"; + switch (c) { + /* istanbul ignore next */ + case "/": { + return false; } - } else if (pr) { - debug6("replaceCaret pr", pr); - if (M === "0") { - if (m === "0") { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + m + "." + (+p + 1); - } else { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; + case "\\": + clearStateChar(); + escaping = true; + continue; + // the various stateChar values + // for the "extglob" stuff. + case "?": + case "*": + case "+": + case "@": + case "!": + this.debug("%s %s %s %j <-- stateChar", pattern, i, re, c); + if (inClass) { + this.debug(" in class"); + if (c === "!" && i === classStart + 1) c = "^"; + re += c; + continue; } - } else { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + (+M + 1) + ".0.0"; - } - } else { - debug6("no pr"); - if (M === "0") { - if (m === "0") { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + m + "." + (+p + 1); - } else { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; + self2.debug("call clearStateChar %j", stateChar); + clearStateChar(); + stateChar = c; + if (options.noext) clearStateChar(); + continue; + case "(": + if (inClass) { + re += "("; + continue; } - } else { - ret = ">=" + M + "." + m + "." + p + " <" + (+M + 1) + ".0.0"; - } - } - debug6("caret return", ret); - return ret; - }); - } - function replaceXRanges(comp, options) { - debug6("replaceXRanges", comp, options); - return comp.split(/\s+/).map(function(comp2) { - return replaceXRange(comp2, options); - }).join(" "); - } - function replaceXRange(comp, options) { - comp = comp.trim(); - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]; - return comp.replace(r, function(ret, gtlt, M, m, p, pr) { - debug6("xRange", comp, ret, gtlt, M, m, p, pr); - var xM = isX(M); - var xm = xM || isX(m); - var xp = xm || isX(p); - var anyX = xp; - if (gtlt === "=" && anyX) { - gtlt = ""; - } - pr = options.includePrerelease ? "-0" : ""; - if (xM) { - if (gtlt === ">" || gtlt === "<") { - ret = "<0.0.0-0"; - } else { - ret = "*"; - } - } else if (gtlt && anyX) { - if (xm) { - m = 0; - } - p = 0; - if (gtlt === ">") { - gtlt = ">="; - if (xm) { - M = +M + 1; - m = 0; - p = 0; - } else { - m = +m + 1; - p = 0; + if (!stateChar) { + re += "\\("; + continue; } - } else if (gtlt === "<=") { - gtlt = "<"; - if (xm) { - M = +M + 1; - } else { - m = +m + 1; + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }); + re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; + this.debug("plType %j %j", stateChar, re); + stateChar = false; + continue; + case ")": + if (inClass || !patternListStack.length) { + re += "\\)"; + continue; } - } - ret = gtlt + M + "." + m + "." + p + pr; - } else if (xm) { - ret = ">=" + M + ".0.0" + pr + " <" + (+M + 1) + ".0.0" + pr; - } else if (xp) { - ret = ">=" + M + "." + m + ".0" + pr + " <" + M + "." + (+m + 1) + ".0" + pr; + clearStateChar(); + hasMagic = true; + var pl = patternListStack.pop(); + re += pl.close; + if (pl.type === "!") { + negativeLists.push(pl); + } + pl.reEnd = re.length; + continue; + case "|": + if (inClass || !patternListStack.length || escaping) { + re += "\\|"; + escaping = false; + continue; + } + clearStateChar(); + re += "|"; + continue; + // these are mostly the same in regexp and glob + case "[": + clearStateChar(); + if (inClass) { + re += "\\" + c; + continue; + } + inClass = true; + classStart = i; + reClassStart = re.length; + re += c; + continue; + case "]": + if (i === classStart + 1 || !inClass) { + re += "\\" + c; + escaping = false; + continue; + } + var cs = pattern.substring(classStart + 1, i); + try { + RegExp("[" + cs + "]"); + } catch (er) { + var sp = this.parse(cs, SUBPARSE); + re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; + hasMagic = hasMagic || sp[1]; + inClass = false; + continue; + } + hasMagic = true; + inClass = false; + re += c; + continue; + default: + clearStateChar(); + if (escaping) { + escaping = false; + } else if (reSpecials[c] && !(c === "^" && inClass)) { + re += "\\"; + } + re += c; } - debug6("xRange return", ret); - return ret; - }); - } - function replaceStars(comp, options) { - debug6("replaceStars", comp, options); - return comp.trim().replace(safeRe[t.STAR], ""); - } - function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = ""; - } else if (isX(fm)) { - from = ">=" + fM + ".0.0"; - } else if (isX(fp)) { - from = ">=" + fM + "." + fm + ".0"; - } else { - from = ">=" + from; } - if (isX(tM)) { - to = ""; - } else if (isX(tm)) { - to = "<" + (+tM + 1) + ".0.0"; - } else if (isX(tp)) { - to = "<" + tM + "." + (+tm + 1) + ".0"; - } else if (tpr) { - to = "<=" + tM + "." + tm + "." + tp + "-" + tpr; - } else { - to = "<=" + to; + if (inClass) { + cs = pattern.substr(classStart + 1); + sp = this.parse(cs, SUBPARSE); + re = re.substr(0, reClassStart) + "\\[" + sp[0]; + hasMagic = hasMagic || sp[1]; } - return (from + " " + to).trim(); - } - Range2.prototype.test = function(version) { - if (!version) { - return false; + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length); + this.debug("setting tail", re, pl); + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { + if (!$2) { + $2 = "\\"; + } + return $1 + $1 + $2 + "|"; + }); + this.debug("tail=%j\n %s", tail, tail, pl, re); + var t = pl.type === "*" ? star : pl.type === "?" ? qmark : "\\" + pl.type; + hasMagic = true; + re = re.slice(0, pl.reStart) + t + "\\(" + tail; } - if (typeof version === "string") { - try { - version = new SemVer(version, this.options); - } catch (er) { - return false; - } + clearStateChar(); + if (escaping) { + re += "\\\\"; } - for (var i2 = 0; i2 < this.set.length; i2++) { - if (testSet(this.set[i2], version, this.options)) { - return true; - } + var addPatternStart = false; + switch (re.charAt(0)) { + case "[": + case ".": + case "(": + addPatternStart = true; } - return false; - }; - function testSet(set2, version, options) { - for (var i2 = 0; i2 < set2.length; i2++) { - if (!set2[i2].test(version)) { - return false; + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n]; + var nlBefore = re.slice(0, nl.reStart); + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); + var nlAfter = re.slice(nl.reEnd); + nlLast += nlAfter; + var openParensBefore = nlBefore.split("(").length - 1; + var cleanAfter = nlAfter; + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); } - } - if (version.prerelease.length && !options.includePrerelease) { - for (i2 = 0; i2 < set2.length; i2++) { - debug6(set2[i2].semver); - if (set2[i2].semver === ANY) { - continue; - } - if (set2[i2].semver.prerelease.length > 0) { - var allowed = set2[i2].semver; - if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) { - return true; - } - } + nlAfter = cleanAfter; + var dollar = ""; + if (nlAfter === "" && isSub !== SUBPARSE) { + dollar = "$"; } - return false; + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; + re = newRe; } - return true; - } - exports2.satisfies = satisfies2; - function satisfies2(version, range, options) { - try { - range = new Range2(range, options); - } catch (er) { - return false; + if (re !== "" && hasMagic) { + re = "(?=.)" + re; } - return range.test(version); - } - exports2.maxSatisfying = maxSatisfying; - function maxSatisfying(versions, range, options) { - var max = null; - var maxSV = null; + if (addPatternStart) { + re = patternStart + re; + } + if (isSub === SUBPARSE) { + return [re, hasMagic]; + } + if (!hasMagic) { + return globUnescape(pattern); + } + var flags = options.nocase ? "i" : ""; try { - var rangeObj = new Range2(range, options); + var regExp = new RegExp("^" + re + "$", flags); } catch (er) { - return null; + return new RegExp("$."); } - versions.forEach(function(v) { - if (rangeObj.test(v)) { - if (!max || maxSV.compare(v) === -1) { - max = v; - maxSV = new SemVer(max, options); - } - } - }); - return max; + regExp._glob = pattern; + regExp._src = re; + return regExp; } - exports2.minSatisfying = minSatisfying; - function minSatisfying(versions, range, options) { - var min = null; - var minSV = null; + minimatch.makeRe = function(pattern, options) { + return new Minimatch(pattern, options || {}).makeRe(); + }; + Minimatch.prototype.makeRe = makeRe; + function makeRe() { + if (this.regexp || this.regexp === false) return this.regexp; + var set2 = this.set; + if (!set2.length) { + this.regexp = false; + return this.regexp; + } + var options = this.options; + var twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot; + var flags = options.nocase ? "i" : ""; + var re = set2.map(function(pattern) { + return pattern.map(function(p) { + return p === GLOBSTAR ? twoStar : typeof p === "string" ? regExpEscape(p) : p._src; + }).join("\\/"); + }).join("|"); + re = "^(?:" + re + ")$"; + if (this.negate) re = "^(?!" + re + ").*$"; try { - var rangeObj = new Range2(range, options); - } catch (er) { - return null; + this.regexp = new RegExp(re, flags); + } catch (ex) { + this.regexp = false; } - versions.forEach(function(v) { - if (rangeObj.test(v)) { - if (!min || minSV.compare(v) === 1) { - min = v; - minSV = new SemVer(min, options); - } - } - }); - return min; + return this.regexp; } - exports2.minVersion = minVersion; - function minVersion(range, loose) { - range = new Range2(range, loose); - var minver = new SemVer("0.0.0"); - if (range.test(minver)) { - return minver; + minimatch.match = function(list, pattern, options) { + options = options || {}; + var mm = new Minimatch(pattern, options); + list = list.filter(function(f) { + return mm.match(f); + }); + if (mm.options.nonull && !list.length) { + list.push(pattern); } - minver = new SemVer("0.0.0-0"); - if (range.test(minver)) { - return minver; + return list; + }; + Minimatch.prototype.match = function match(f, partial) { + if (typeof partial === "undefined") partial = this.partial; + this.debug("match", f, this.pattern); + if (this.comment) return false; + if (this.empty) return f === ""; + if (f === "/" && partial) return true; + var options = this.options; + if (path13.sep !== "/") { + f = f.split(path13.sep).join("/"); } - minver = null; - for (var i2 = 0; i2 < range.set.length; ++i2) { - var comparators = range.set[i2]; - comparators.forEach(function(comparator) { - var compver = new SemVer(comparator.semver.version); - switch (comparator.operator) { - case ">": - if (compver.prerelease.length === 0) { - compver.patch++; - } else { - compver.prerelease.push(0); - } - compver.raw = compver.format(); - /* fallthrough */ - case "": - case ">=": - if (!minver || gt(minver, compver)) { - minver = compver; - } - break; - case "<": - case "<=": - break; - /* istanbul ignore next */ - default: - throw new Error("Unexpected operation: " + comparator.operator); - } - }); - } - if (minver && range.test(minver)) { - return minver; - } - return null; - } - exports2.validRange = validRange; - function validRange(range, options) { - try { - return new Range2(range, options).range || "*"; - } catch (er) { - return null; - } - } - exports2.ltr = ltr; - function ltr(version, range, options) { - return outside(version, range, "<", options); - } - exports2.gtr = gtr; - function gtr(version, range, options) { - return outside(version, range, ">", options); - } - exports2.outside = outside; - function outside(version, range, hilo, options) { - version = new SemVer(version, options); - range = new Range2(range, options); - var gtfn, ltefn, ltfn, comp, ecomp; - switch (hilo) { - case ">": - gtfn = gt; - ltefn = lte; - ltfn = lt; - comp = ">"; - ecomp = ">="; - break; - case "<": - gtfn = lt; - ltefn = gte6; - ltfn = gt; - comp = "<"; - ecomp = "<="; - break; - default: - throw new TypeError('Must provide a hilo val of "<" or ">"'); + f = f.split(slashSplit); + this.debug(this.pattern, "split", f); + var set2 = this.set; + this.debug(this.pattern, "set", set2); + var filename; + var i; + for (i = f.length - 1; i >= 0; i--) { + filename = f[i]; + if (filename) break; } - if (satisfies2(version, range, options)) { - return false; + for (i = 0; i < set2.length; i++) { + var pattern = set2[i]; + var file = f; + if (options.matchBase && pattern.length === 1) { + file = [filename]; + } + var hit = this.matchOne(file, pattern, partial); + if (hit) { + if (options.flipNegate) return true; + return !this.negate; + } } - for (var i2 = 0; i2 < range.set.length; ++i2) { - var comparators = range.set[i2]; - var high = null; - var low = null; - comparators.forEach(function(comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator(">=0.0.0"); + if (options.flipNegate) return false; + return this.negate; + }; + Minimatch.prototype.matchOne = function(file, pattern, partial) { + var options = this.options; + this.debug( + "matchOne", + { "this": this, file, pattern } + ); + this.debug("matchOne", file.length, pattern.length); + for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + var p = pattern[pi]; + var f = file[fi]; + this.debug(pattern, p, f); + if (p === false) return false; + if (p === GLOBSTAR) { + this.debug("GLOBSTAR", [pattern, p, f]); + var fr = fi; + var pr = pi + 1; + if (pr === pl) { + this.debug("** at the end"); + for (; fi < fl; fi++) { + if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; + } + return true; } - high = high || comparator; - low = low || comparator; - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator; - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator; + while (fr < fl) { + var swallowee = file[fr]; + this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug("globstar found match!", fr, fl, swallowee); + return true; + } else { + if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { + this.debug("dot detected!", file, fr, pattern, pr); + break; + } + this.debug("globstar swallow a segment, and continue"); + fr++; + } + } + if (partial) { + this.debug("\n>>> no match, partial?", file, fr, pattern, pr); + if (fr === fl) return true; } - }); - if (high.operator === comp || high.operator === ecomp) { - return false; - } - if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { - return false; - } else if (low.operator === ecomp && ltfn(version, low.semver)) { return false; } - } - return true; - } - exports2.prerelease = prerelease; - function prerelease(version, options) { - var parsed = parse2(version, options); - return parsed && parsed.prerelease.length ? parsed.prerelease : null; - } - exports2.intersects = intersects; - function intersects(r1, r2, options) { - r1 = new Range2(r1, options); - r2 = new Range2(r2, options); - return r1.intersects(r2); - } - exports2.coerce = coerce3; - function coerce3(version, options) { - if (version instanceof SemVer) { - return version; - } - if (typeof version === "number") { - version = String(version); - } - if (typeof version !== "string") { - return null; - } - options = options || {}; - var match = null; - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]); - } else { - var next; - while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length)) { - if (!match || next.index + next[0].length !== match.index + match[0].length) { - match = next; - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length; + var hit; + if (typeof p === "string") { + hit = f === p; + this.debug("string match", p, f, hit); + } else { + hit = f.match(p); + this.debug("pattern match", p, f, hit); } - safeRe[t.COERCERTL].lastIndex = -1; + if (!hit) return false; } - if (match === null) { - return null; + if (fi === fl && pi === pl) { + return true; + } else if (fi === fl) { + return partial; + } else if (pi === pl) { + return fi === fl - 1 && file[fi] === ""; } - return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); + throw new Error("wtf?"); + }; + function globUnescape(s) { + return s.replace(/\\(.)/g, "$1"); + } + function regExpEscape(s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); } } }); -// node_modules/@actions/cache/lib/internal/constants.js -var require_constants12 = __commonJS({ - "node_modules/@actions/cache/lib/internal/constants.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheFileSizeLimit = exports2.ManifestFilename = exports2.TarFilename = exports2.SystemTarPathOnWindows = exports2.GnuTarPathOnWindows = exports2.SocketTimeout = exports2.DefaultRetryDelay = exports2.DefaultRetryAttempts = exports2.ArchiveToolType = exports2.CompressionMethod = exports2.CacheFilename = void 0; - var CacheFilename; - (function(CacheFilename2) { - CacheFilename2["Gzip"] = "cache.tgz"; - CacheFilename2["Zstd"] = "cache.tzst"; - })(CacheFilename || (exports2.CacheFilename = CacheFilename = {})); - var CompressionMethod; - (function(CompressionMethod2) { - CompressionMethod2["Gzip"] = "gzip"; - CompressionMethod2["ZstdWithoutLong"] = "zstd-without-long"; - CompressionMethod2["Zstd"] = "zstd"; - })(CompressionMethod || (exports2.CompressionMethod = CompressionMethod = {})); - var ArchiveToolType; - (function(ArchiveToolType2) { - ArchiveToolType2["GNU"] = "gnu"; - ArchiveToolType2["BSD"] = "bsd"; - })(ArchiveToolType || (exports2.ArchiveToolType = ArchiveToolType = {})); - exports2.DefaultRetryAttempts = 2; - exports2.DefaultRetryDelay = 5e3; - exports2.SocketTimeout = 5e3; - exports2.GnuTarPathOnWindows = `${process.env["PROGRAMFILES"]}\\Git\\usr\\bin\\tar.exe`; - exports2.SystemTarPathOnWindows = `${process.env["SYSTEMDRIVE"]}\\Windows\\System32\\tar.exe`; - exports2.TarFilename = "cache.tar"; - exports2.ManifestFilename = "manifest.txt"; - exports2.CacheFileSizeLimit = 10 * Math.pow(1024, 3); - } -}); - -// node_modules/@actions/cache/lib/internal/cacheUtils.js -var require_cacheUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/cacheUtils.js"(exports2) { +// node_modules/@actions/glob/lib/internal-path.js +var require_internal_path = __commonJS({ + "node_modules/@actions/glob/lib/internal-path.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -50599,670 +49872,81 @@ var require_cacheUtils = __commonJS({ return result; }; })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); - }); - }; - } - function settle(resolve6, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); - }, reject); - } + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createTempDirectory = createTempDirectory; - exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; - exports2.resolvePaths = resolvePaths; - exports2.unlinkFile = unlinkFile; - exports2.getCompressionMethod = getCompressionMethod; - exports2.getCacheFileName = getCacheFileName; - exports2.getGnuTarPathOnWindows = getGnuTarPathOnWindows; - exports2.assertDefined = assertDefined; - exports2.getCacheVersion = getCacheVersion; - exports2.getRuntimeToken = getRuntimeToken; - var core14 = __importStar2(require_core()); - var exec = __importStar2(require_exec()); - var glob = __importStar2(require_glob()); - var io6 = __importStar2(require_io()); - var crypto2 = __importStar2(require("crypto")); - var fs13 = __importStar2(require("fs")); + exports2.Path = void 0; var path13 = __importStar2(require("path")); - var semver9 = __importStar2(require_semver3()); - var util = __importStar2(require("util")); - var constants_1 = require_constants12(); - var versionSalt = "1.0"; - function createTempDirectory() { - return __awaiter2(this, void 0, void 0, function* () { - const IS_WINDOWS = process.platform === "win32"; - let tempDirectory = process.env["RUNNER_TEMP"] || ""; - if (!tempDirectory) { - let baseLocation; - if (IS_WINDOWS) { - baseLocation = process.env["USERPROFILE"] || "C:\\"; + var pathHelper = __importStar2(require_internal_path_helper()); + var assert_1 = __importDefault2(require("assert")); + var IS_WINDOWS = process.platform === "win32"; + var Path = class { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + if (typeof itemPath === "string") { + (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path13.sep); } else { - if (process.platform === "darwin") { - baseLocation = "/Users"; - } else { - baseLocation = "/home"; + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + const basename = path13.basename(remaining); + this.segments.unshift(basename); + remaining = dir; + dir = pathHelper.dirname(remaining); } + this.segments.unshift(remaining); } - tempDirectory = path13.join(baseLocation, "actions", "temp"); - } - const dest = path13.join(tempDirectory, crypto2.randomUUID()); - yield io6.mkdirP(dest); - return dest; - }); - } - function getArchiveFileSizeInBytes(filePath) { - return fs13.statSync(filePath).size; - } - function resolvePaths(patterns) { - return __awaiter2(this, void 0, void 0, function* () { - var _a, e_1, _b, _c; - var _d; - const paths = []; - const workspace = (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); - const globber = yield glob.create(patterns.join("\n"), { - implicitDescendants: false - }); - try { - for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { - _c = _g.value; - _e = false; - const file = _c; - const relativeFile = path13.relative(workspace, file).replace(new RegExp(`\\${path13.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); - if (relativeFile === "") { - paths.push("."); + } else { + (0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + (0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`); + segment = pathHelper.normalizeSeparators(itemPath[i]); + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); } else { - paths.push(`${relativeFile}`); + (0, assert_1.default)(!segment.includes(path13.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); } } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); - } finally { - if (e_1) throw e_1.error; - } - } - return paths; - }); - } - function unlinkFile(filePath) { - return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs13.unlink)(filePath); - }); - } - function getVersion(app_1) { - return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { - let versionOutput = ""; - additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); - try { - yield exec.exec(`${app}`, additionalArgs, { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => versionOutput += data.toString(), - stderr: (data) => versionOutput += data.toString() - } - }); - } catch (err) { - core14.debug(err.message); - } - versionOutput = versionOutput.trim(); - core14.debug(versionOutput); - return versionOutput; - }); - } - function getCompressionMethod() { - return __awaiter2(this, void 0, void 0, function* () { - const versionOutput = yield getVersion("zstd", ["--quiet"]); - const version = semver9.clean(versionOutput); - core14.debug(`zstd version: ${version}`); - if (versionOutput === "") { - return constants_1.CompressionMethod.Gzip; - } else { - return constants_1.CompressionMethod.ZstdWithoutLong; - } - }); - } - function getCacheFileName(compressionMethod) { - return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; - } - function getGnuTarPathOnWindows() { - return __awaiter2(this, void 0, void 0, function* () { - if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) { - return constants_1.GnuTarPathOnWindows; } - const versionOutput = yield getVersion("tar"); - return versionOutput.toLowerCase().includes("gnu tar") ? io6.which("tar") : ""; - }); - } - function assertDefined(name, value) { - if (value === void 0) { - throw Error(`Expected ${name} but value was undefiend`); - } - return value; - } - function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { - const components = paths.slice(); - if (compressionMethod) { - components.push(compressionMethod); - } - if (process.platform === "win32" && !enableCrossOsArchive) { - components.push("windows-only"); } - components.push(versionSalt); - return crypto2.createHash("sha256").update(components.join("|")).digest("hex"); - } - function getRuntimeToken() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"]; - if (!token) { - throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); + /** + * Converts the path to it's string representation + */ + toString() { + let result = this.segments[0]; + let skipSlash = result.endsWith(path13.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } else { + result += path13.sep; + } + result += this.segments[i]; + } + return result; } - return token; - } + }; + exports2.Path = Path; } }); -// node_modules/tslib/tslib.es6.mjs -var tslib_es6_exports = {}; -__export(tslib_es6_exports, { - __addDisposableResource: () => __addDisposableResource, - __assign: () => __assign, - __asyncDelegator: () => __asyncDelegator, - __asyncGenerator: () => __asyncGenerator, - __asyncValues: () => __asyncValues, - __await: () => __await, - __awaiter: () => __awaiter, - __classPrivateFieldGet: () => __classPrivateFieldGet, - __classPrivateFieldIn: () => __classPrivateFieldIn, - __classPrivateFieldSet: () => __classPrivateFieldSet, - __createBinding: () => __createBinding, - __decorate: () => __decorate, - __disposeResources: () => __disposeResources, - __esDecorate: () => __esDecorate, - __exportStar: () => __exportStar, - __extends: () => __extends, - __generator: () => __generator, - __importDefault: () => __importDefault, - __importStar: () => __importStar, - __makeTemplateObject: () => __makeTemplateObject, - __metadata: () => __metadata, - __param: () => __param, - __propKey: () => __propKey, - __read: () => __read, - __rest: () => __rest, - __rewriteRelativeImportExtension: () => __rewriteRelativeImportExtension, - __runInitializers: () => __runInitializers, - __setFunctionName: () => __setFunctionName, - __spread: () => __spread, - __spreadArray: () => __spreadArray, - __spreadArrays: () => __spreadArrays, - __values: () => __values2, - default: () => tslib_es6_default -}); -function __extends(d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { - this.constructor = d; - } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); -} -function __rest(s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; -} -function __decorate(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; -} -function __param(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; -} -function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { - if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); - return f; - } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { - if (done) throw new TypeError("Cannot add initializers after decoration has completed"); - extraInitializers.push(accept(f || null)); - }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; -} -function __runInitializers(thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; -} -function __propKey(x) { - return typeof x === "symbol" ? x : "".concat(x); -} -function __setFunctionName(f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); -} -function __metadata(metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); -} -function __awaiter(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -} -function __generator(thisArg, body) { - var _ = { label: 0, sent: function() { - if (t[0] & 1) throw t[1]; - return t[1]; - }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { - return this; - }), g; - function verb(n) { - return function(v) { - return step([n, v]); - }; - } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: - case 1: - t = op; - break; - case 4: - _.label++; - return { value: op[1], done: false }; - case 5: - _.label++; - y = op[1]; - op = [0]; - continue; - case 7: - op = _.ops.pop(); - _.trys.pop(); - continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { - _ = 0; - continue; - } - if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { - _.label = op[1]; - break; - } - if (op[0] === 6 && _.label < t[1]) { - _.label = t[1]; - t = op; - break; - } - if (t && _.label < t[2]) { - _.label = t[2]; - _.ops.push(op); - break; - } - if (t[2]) _.ops.pop(); - _.trys.pop(); - continue; - } - op = body.call(thisArg, _); - } catch (e) { - op = [6, e]; - y = 0; - } finally { - f = t = 0; - } - if (op[0] & 5) throw op[1]; - return { value: op[0] ? op[1] : void 0, done: true }; - } -} -function __exportStar(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); -} -function __values2(o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function() { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); -} -function __read(o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } catch (error3) { - e = { error: error3 }; - } finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } finally { - if (e) throw e.error; - } - } - return ar; -} -function __spread() { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; -} -function __spreadArrays() { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; -} -function __spreadArray(to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -} -function __await(v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); -} -function __asyncGenerator(thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { - return this; - }, i; - function awaitReturn(f) { - return function(v) { - return Promise.resolve(v).then(f, reject); - }; - } - function verb(n, f) { - if (g[n]) { - i[n] = function(v) { - return new Promise(function(a, b) { - q.push([n, v, a, b]) > 1 || resume(n, v); - }); - }; - if (f) i[n] = f(i[n]); - } - } - function resume(n, v) { - try { - step(g[n](v)); - } catch (e) { - settle(q[0][3], e); - } - } - function step(r) { - r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); - } - function fulfill(value) { - resume("next", value); - } - function reject(value) { - resume("throw", value); - } - function settle(f, v) { - if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); - } -} -function __asyncDelegator(o) { - var i, p; - return i = {}, verb("next"), verb("throw", function(e) { - throw e; - }), verb("return"), i[Symbol.iterator] = function() { - return this; - }, i; - function verb(n, f) { - i[n] = o[n] ? function(v) { - return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; - } : f; - } -} -function __asyncValues(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values2 === "function" ? __values2(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve6, reject) { - v = o[n](v), settle(resolve6, reject, v.done, v.value); - }); - }; - } - function settle(resolve6, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve6({ value: v2, done: d }); - }, reject); - } -} -function __makeTemplateObject(cooked, raw) { - if (Object.defineProperty) { - Object.defineProperty(cooked, "raw", { value: raw }); - } else { - cooked.raw = raw; - } - return cooked; -} -function __importStar(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - } - __setModuleDefault(result, mod); - return result; -} -function __importDefault(mod) { - return mod && mod.__esModule ? mod : { default: mod }; -} -function __classPrivateFieldGet(receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -} -function __classPrivateFieldSet(receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value; -} -function __classPrivateFieldIn(state, receiver) { - if (receiver === null || typeof receiver !== "object" && typeof receiver !== "function") throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); -} -function __addDisposableResource(env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose, inner; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - if (async) inner = dispose; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - if (inner) dispose = function() { - try { - inner.call(this); - } catch (e) { - return Promise.reject(e); - } - }; - env.stack.push({ value, dispose, async }); - } else if (async) { - env.stack.push({ async: true }); - } - return value; -} -function __disposeResources(env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - var r, s = 0; - function next() { - while (r = env.stack.pop()) { - try { - if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); - if (r.dispose) { - var result = r.dispose.call(r.value); - if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { - fail(e); - return next(); - }); - } else s |= 1; - } catch (e) { - fail(e); - } - } - if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); - if (env.hasError) throw env.error; - } - return next(); -} -function __rewriteRelativeImportExtension(path13, preserveJsx) { - if (typeof path13 === "string" && /^\.\.?\//.test(path13)) { - return path13.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { - return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; - }); - } - return path13; -} -var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; -var init_tslib_es6 = __esm({ - "node_modules/tslib/tslib.es6.mjs"() { - extendStatics = function(d, b) { - extendStatics = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function(d2, b2) { - d2.__proto__ = b2; - } || function(d2, b2) { - for (var p in b2) if (Object.prototype.hasOwnProperty.call(b2, p)) d2[p] = b2[p]; - }; - return extendStatics(d, b); - }; - __assign = function() { - __assign = Object.assign || function __assign2(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); - }; - __createBinding = Object.create ? (function(o, m, k, k2) { +// node_modules/@actions/glob/lib/internal-pattern.js +var require_internal_pattern = __commonJS({ + "node_modules/@actions/glob/lib/internal-pattern.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -51274,2704 +49958,1906 @@ var init_tslib_es6 = __esm({ }) : (function(o, m, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m[k]; - }); - __setModuleDefault = Object.create ? (function(o, v) { + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; - }; - ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); }; - return ownKeys(o); - }; - _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error3, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error3, e.suppressed = suppressed, e; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; }; - tslib_es6_default = { - __extends, - __assign, - __rest, - __decorate, - __param, - __esDecorate, - __runInitializers, - __propKey, - __setFunctionName, - __metadata, - __awaiter, - __generator, - __createBinding, - __exportStar, - __values: __values2, - __read, - __spread, - __spreadArrays, - __spreadArray, - __await, - __asyncGenerator, - __asyncDelegator, - __asyncValues, - __makeTemplateObject, - __importStar, - __importDefault, - __classPrivateFieldGet, - __classPrivateFieldSet, - __classPrivateFieldIn, - __addDisposableResource, - __disposeResources, - __rewriteRelativeImportExtension - }; - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js -var require_AbortError = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError = class extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } - }; - exports2.AbortError = AbortError; - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js -var require_log = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.log = log; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_os_1 = require("node:os"); - var node_util_1 = tslib_1.__importDefault(require("node:util")); - var node_process_1 = tslib_1.__importDefault(require("node:process")); - function log(message, ...args) { - node_process_1.default.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js -var require_debug2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js"(exports2) { - "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var log_js_1 = require_log(); - var debugEnvVariable = typeof process !== "undefined" && process.env && process.env.DEBUG || void 0; - var enabledString; - var enabledNamespaces = []; - var skippedNamespaces = []; - var debuggers = []; - if (debugEnvVariable) { - enable(debugEnvVariable); - } - var debugObj = Object.assign((namespace) => { - return createDebugger(namespace); - }, { - enable, - enabled, - disable, - log: log_js_1.log - }); - function enable(namespaces) { - enabledString = namespaces; - enabledNamespaces = []; - skippedNamespaces = []; - const namespaceList = namespaces.split(",").map((ns) => ns.trim()); - for (const ns of namespaceList) { - if (ns.startsWith("-")) { - skippedNamespaces.push(ns.substring(1)); + exports2.Pattern = void 0; + var os3 = __importStar2(require("os")); + var path13 = __importStar2(require("path")); + var pathHelper = __importStar2(require_internal_path_helper()); + var assert_1 = __importDefault2(require("assert")); + var minimatch_1 = require_minimatch(); + var internal_match_kind_1 = require_internal_match_kind(); + var internal_path_1 = require_internal_path(); + var IS_WINDOWS = process.platform === "win32"; + var Pattern = class _Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + this.negate = false; + let pattern; + if (typeof patternOrNegate === "string") { + pattern = patternOrNegate.trim(); } else { - enabledNamespaces.push(ns); + segments = segments || []; + (0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`); + const root = _Pattern.getLiteral(segments[0]); + (0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } } + while (pattern.startsWith("!")) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + pattern = _Pattern.fixupPattern(pattern, homedir); + this.segments = new internal_path_1.Path(pattern).segments; + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path13.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + let foundGlob = false; + const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + this.rootRegExp = new RegExp(_Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? "i" : ""); + this.isImplicitPattern = isImplicitPattern; + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, "/") : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } - for (const instance of debuggers) { - instance.enabled = enabled(instance.namespace); - } - } - function enabled(namespace) { - if (namespace.endsWith("*")) { - return true; - } - for (const skipped of skippedNamespaces) { - if (namespaceMatches(namespace, skipped)) { - return false; + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + if (this.segments[this.segments.length - 1] === "**") { + itemPath = pathHelper.normalizeSeparators(itemPath); + if (!itemPath.endsWith(path13.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path13.sep}`; + } + } else { + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } + return internal_match_kind_1.MatchKind.None; } - for (const enabledNamespace of enabledNamespaces) { - if (namespaceMatches(namespace, enabledNamespace)) { - return true; + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } - return false; - } - function namespaceMatches(namespace, patternToMatch) { - if (patternToMatch.indexOf("*") === -1) { - return namespace === patternToMatch; + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, "\\\\")).replace(/(\[)(?=[^/]+\])/g, "[[]").replace(/\?/g, "[?]").replace(/\*/g, "[*]"); } - let pattern = patternToMatch; - if (patternToMatch.indexOf("**") !== -1) { - const patternParts = []; - let lastCharacter = ""; - for (const character of patternToMatch) { - if (character === "*" && lastCharacter === "*") { - continue; - } else { - lastCharacter = character; - patternParts.push(character); + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + (0, assert_1.default)(pattern, "pattern cannot be empty"); + const literalSegments = new internal_path_1.Path(pattern).segments.map((x) => _Pattern.getLiteral(x)); + (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + pattern = pathHelper.normalizeSeparators(pattern); + if (pattern === "." || pattern.startsWith(`.${path13.sep}`)) { + pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); + } else if (pattern === "~" || pattern.startsWith(`~${path13.sep}`)) { + homedir = homedir || os3.homedir(); + (0, assert_1.default)(homedir, "Unable to determine HOME directory"); + (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = _Pattern.globEscape(homedir) + pattern.substr(1); + } else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith("\\")) { + root += "\\"; + } + pattern = _Pattern.globEscape(root) + pattern.substr(2); + } else if (IS_WINDOWS && (pattern === "\\" || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", "\\"); + if (!root.endsWith("\\")) { + root += "\\"; } + pattern = _Pattern.globEscape(root) + pattern.substr(1); + } else { + pattern = pathHelper.ensureAbsoluteRoot(_Pattern.globEscape(process.cwd()), pattern); } - pattern = patternParts.join(""); + return pathHelper.normalizeSeparators(pattern); } - let namespaceIndex = 0; - let patternIndex = 0; - const patternLength = pattern.length; - const namespaceLength = namespace.length; - let lastWildcard = -1; - let lastWildcardNamespace = -1; - while (namespaceIndex < namespaceLength && patternIndex < patternLength) { - if (pattern[patternIndex] === "*") { - lastWildcard = patternIndex; - patternIndex++; - if (patternIndex === patternLength) { - return true; - } - while (namespace[namespaceIndex] !== pattern[patternIndex]) { - namespaceIndex++; - if (namespaceIndex === namespaceLength) { - return false; + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ""; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + if (c === "\\" && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } else if (c === "*" || c === "?") { + return ""; + } else if (c === "[" && i + 1 < segment.length) { + let set2 = ""; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + if (c2 === "\\" && !IS_WINDOWS && i2 + 1 < segment.length) { + set2 += segment[++i2]; + continue; + } else if (c2 === "]") { + closed = i2; + break; + } else { + set2 += c2; + } } - } - lastWildcardNamespace = namespaceIndex; - namespaceIndex++; - patternIndex++; - continue; - } else if (pattern[patternIndex] === namespace[namespaceIndex]) { - patternIndex++; - namespaceIndex++; - } else if (lastWildcard >= 0) { - patternIndex = lastWildcard + 1; - namespaceIndex = lastWildcardNamespace + 1; - if (namespaceIndex === namespaceLength) { - return false; - } - while (namespace[namespaceIndex] !== pattern[patternIndex]) { - namespaceIndex++; - if (namespaceIndex === namespaceLength) { - return false; + if (closed >= 0) { + if (set2.length > 1) { + return ""; + } + if (set2) { + literal += set2; + i = closed; + continue; + } } } - lastWildcardNamespace = namespaceIndex; - namespaceIndex++; - patternIndex++; - continue; - } else { - return false; - } - } - const namespaceDone = namespaceIndex === namespace.length; - const patternDone = patternIndex === pattern.length; - const trailingWildCard = patternIndex === pattern.length - 1 && pattern[patternIndex] === "*"; - return namespaceDone && (patternDone || trailingWildCard); - } - function disable() { - const result = enabledString || ""; - enable(""); - return result; - } - function createDebugger(namespace) { - const newDebugger = Object.assign(debug6, { - enabled: enabled(namespace), - destroy, - log: debugObj.log, - namespace, - extend: extend3 - }); - function debug6(...args) { - if (!newDebugger.enabled) { - return; - } - if (args.length > 0) { - args[0] = `${namespace} ${args[0]}`; + literal += c; } - newDebugger.log(...args); + return literal; } - debuggers.push(newDebugger); - return newDebugger; - } - function destroy() { - const index = debuggers.indexOf(this); - if (index >= 0) { - debuggers.splice(index, 1); - return true; + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, "\\$&"); } - return false; - } - function extend3(namespace) { - const newDebugger = createDebugger(`${this.namespace}:${namespace}`); - newDebugger.log = this.log; - return newDebugger; - } - exports2.default = debugObj; + }; + exports2.Pattern = Pattern; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js -var require_logger = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js"(exports2) { +// node_modules/@actions/glob/lib/internal-search-state.js +var require_internal_search_state = __commonJS({ + "node_modules/@actions/glob/lib/internal-search-state.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TypeSpecRuntimeLogger = void 0; - exports2.createLoggerContext = createLoggerContext; - exports2.setLogLevel = setLogLevel; - exports2.getLogLevel = getLogLevel; - exports2.createClientLogger = createClientLogger; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var debug_js_1 = tslib_1.__importDefault(require_debug2()); - var TYPESPEC_RUNTIME_LOG_LEVELS = ["verbose", "info", "warning", "error"]; - var levelMap = { - verbose: 400, - info: 300, - warning: 200, - error: 100 + exports2.SearchState = void 0; + var SearchState = class { + constructor(path13, level) { + this.path = path13; + this.level = level; + } }; - function patchLogMethod(parent, child) { - child.log = (...args) => { - parent.log(...args); - }; - } - function isTypeSpecRuntimeLogLevel(level) { - return TYPESPEC_RUNTIME_LOG_LEVELS.includes(level); - } - function createLoggerContext(options) { - const registeredLoggers = /* @__PURE__ */ new Set(); - const logLevelFromEnv = typeof process !== "undefined" && process.env && process.env[options.logLevelEnvVarName] || void 0; - let logLevel; - const clientLogger = (0, debug_js_1.default)(options.namespace); - clientLogger.log = (...args) => { - debug_js_1.default.log(...args); + exports2.SearchState = SearchState; + } +}); + +// node_modules/@actions/glob/lib/internal-globber.js +var require_internal_globber = __commonJS({ + "node_modules/@actions/glob/lib/internal-globber.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); }; - function contextSetLogLevel(level) { - if (level && !isTypeSpecRuntimeLogLevel(level)) { - throw new Error(`Unknown log level '${level}'. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(",")}`); + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - logLevel = level; - const enabledNamespaces = []; - for (const logger of registeredLoggers) { - if (shouldEnable(logger)) { - enabledNamespaces.push(logger.namespace); + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); + } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } } - debug_js_1.default.enable(enabledNamespaces.join(",")); - } - if (logLevelFromEnv) { - if (isTypeSpecRuntimeLogLevel(logLevelFromEnv)) { - contextSetLogLevel(logLevelFromEnv); - } else { - console.error(`${options.logLevelEnvVarName} set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(", ")}.`); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - function shouldEnable(logger) { - return Boolean(logLevel && levelMap[logger.level] <= levelMap[logLevel]); - } - function createLogger2(parent, level) { - const logger = Object.assign(parent.extend(level), { - level - }); - patchLogMethod(parent, logger); - if (shouldEnable(logger)) { - const enabledNamespaces = debug_js_1.default.disable(); - debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); } - registeredLoggers.add(logger); - return logger; + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve6, reject) { + v = o[n](v), settle(resolve6, reject, v.done, v.value); + }); + }; } - function contextGetLogLevel() { - return logLevel; + function settle(resolve6, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve6({ value: v2, done: d }); + }, reject); } - function contextCreateClientLogger(namespace) { - const clientRootLogger = clientLogger.extend(namespace); - patchLogMethod(clientLogger, clientRootLogger); - return { - error: createLogger2(clientRootLogger, "error"), - warning: createLogger2(clientRootLogger, "warning"), - info: createLogger2(clientRootLogger, "info"), - verbose: createLogger2(clientRootLogger, "verbose") + }; + var __await2 = exports2 && exports2.__await || function(v) { + return this instanceof __await2 ? (this.v = v, this) : new __await2(v); + }; + var __asyncGenerator2 = exports2 && exports2.__asyncGenerator || function(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { + return this; + }, i; + function awaitReturn(f) { + return function(v) { + return Promise.resolve(v).then(f, reject); }; } - return { - setLogLevel: contextSetLogLevel, - getLogLevel: contextGetLogLevel, - createClientLogger: contextCreateClientLogger, - logger: clientLogger - }; - } - var context2 = createLoggerContext({ - logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", - namespace: "typeSpecRuntime" - }); - exports2.TypeSpecRuntimeLogger = context2.logger; - function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); - } - function getLogLevel() { - return context2.getLogLevel(); - } - function createClientLogger(namespace) { - return context2.createClientLogger(namespace); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js -var require_httpHeaders = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpHeaders = createHttpHeaders; - function normalizeName(name) { - return name.toLowerCase(); - } - function* headerIterator(map2) { - for (const entry of map2.values()) { - yield [entry.name, entry.value]; - } - } - var HttpHeadersImpl = class { - _headersMap; - constructor(rawHeaders) { - this._headersMap = /* @__PURE__ */ new Map(); - if (rawHeaders) { - for (const headerName of Object.keys(rawHeaders)) { - this.set(headerName, rawHeaders[headerName]); - } + function verb(n, f) { + if (g[n]) { + i[n] = function(v) { + return new Promise(function(a, b) { + q.push([n, v, a, b]) > 1 || resume(n, v); + }); + }; + if (f) i[n] = f(i[n]); } } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param name - The name of the header to set. This value is case-insensitive. - * @param value - The value of the header to set. - */ - set(name, value) { - this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param name - The name of the header. This value is case-insensitive. - */ - get(name) { - return this._headersMap.get(normalizeName(name))?.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - * @param name - The name of the header to set. This value is case-insensitive. - */ - has(name) { - return this._headersMap.has(normalizeName(name)); + function resume(n, v) { + try { + step(g[n](v)); + } catch (e) { + settle(q[0][3], e); + } } - /** - * Remove the header with the provided headerName. - * @param name - The name of the header to remove. - */ - delete(name) { - this._headersMap.delete(normalizeName(name)); + function step(r) { + r.value instanceof __await2 ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJSON(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const entry of this._headersMap.values()) { - result[entry.name] = entry.value; - } - } else { - for (const [normalizedName, entry] of this._headersMap) { - result[normalizedName] = entry.value; - } - } - return result; + function fulfill(value) { + resume("next", value); } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJSON({ preserveCase: true })); + function reject(value) { + resume("throw", value); } - /** - * Iterate over tuples of header [name, value] pairs. - */ - [Symbol.iterator]() { - return headerIterator(this._headersMap); + function settle(f, v) { + if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } }; - function createHttpHeaders(rawHeaders) { - return new HttpHeadersImpl(rawHeaders); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js -var require_schemes = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js"(exports2) { - "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js -var require_oauth2Flows = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js -var require_uuidUtils = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.randomUUID = randomUUID2; - function randomUUID2() { - return crypto.randomUUID(); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js -var require_pipelineRequest = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createPipelineRequest = createPipelineRequest; - var httpHeaders_js_1 = require_httpHeaders(); - var uuidUtils_js_1 = require_uuidUtils(); - var PipelineRequestImpl = class { - url; - method; - headers; - timeout; - withCredentials; - body; - multipartBody; - formData; - streamResponseStatusCodes; - enableBrowserStreams; - proxySettings; - disableKeepAlive; - abortSignal; - requestId; - allowInsecureConnection; - onUploadProgress; - onDownloadProgress; - requestOverrides; - authSchemes; + exports2.DefaultGlobber = void 0; + var core14 = __importStar2(require_core()); + var fs13 = __importStar2(require("fs")); + var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); + var path13 = __importStar2(require("path")); + var patternHelper = __importStar2(require_internal_pattern_helper()); + var internal_match_kind_1 = require_internal_match_kind(); + var internal_pattern_1 = require_internal_pattern(); + var internal_search_state_1 = require_internal_search_state(); + var IS_WINDOWS = process.platform === "win32"; + var DefaultGlobber = class _DefaultGlobber { constructor(options) { - this.url = options.url; - this.body = options.body; - this.headers = options.headers ?? (0, httpHeaders_js_1.createHttpHeaders)(); - this.method = options.method ?? "GET"; - this.timeout = options.timeout ?? 0; - this.multipartBody = options.multipartBody; - this.formData = options.formData; - this.disableKeepAlive = options.disableKeepAlive ?? false; - this.proxySettings = options.proxySettings; - this.streamResponseStatusCodes = options.streamResponseStatusCodes; - this.withCredentials = options.withCredentials ?? false; - this.abortSignal = options.abortSignal; - this.onUploadProgress = options.onUploadProgress; - this.onDownloadProgress = options.onDownloadProgress; - this.requestId = options.requestId || (0, uuidUtils_js_1.randomUUID)(); - this.allowInsecureConnection = options.allowInsecureConnection ?? false; - this.enableBrowserStreams = options.enableBrowserStreams ?? false; - this.requestOverrides = options.requestOverrides; - this.authSchemes = options.authSchemes; - } - }; - function createPipelineRequest(options) { - return new PipelineRequestImpl(options); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js -var require_pipeline = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createEmptyPipeline = createEmptyPipeline; - var ValidPhaseNames = /* @__PURE__ */ new Set(["Deserialize", "Serialize", "Retry", "Sign"]); - var HttpPipeline = class _HttpPipeline { - _policies = []; - _orderedPolicies; - constructor(policies) { - this._policies = policies?.slice(0) ?? []; - this._orderedPolicies = void 0; + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); } - addPolicy(policy, options = {}) { - if (options.phase && options.afterPhase) { - throw new Error("Policies inside a phase cannot specify afterPhase."); - } - if (options.phase && !ValidPhaseNames.has(options.phase)) { - throw new Error(`Invalid phase name: ${options.phase}`); - } - if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { - throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); - } - this._policies.push({ - policy, - options - }); - this._orderedPolicies = void 0; + getSearchPaths() { + return this.searchPaths.slice(); } - removePolicy(options) { - const removedPolicies = []; - this._policies = this._policies.filter((policyDescriptor) => { - if (options.name && policyDescriptor.policy.name === options.name || options.phase && policyDescriptor.options.phase === options.phase) { - removedPolicies.push(policyDescriptor.policy); - return false; - } else { - return true; + glob() { + return __awaiter2(this, void 0, void 0, function* () { + var _a, e_1, _b, _c; + const result = []; + try { + for (var _d = true, _e = __asyncValues2(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const itemPath = _c; + result.push(itemPath); + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (!_d && !_a && (_b = _e.return)) yield _b.call(_e); + } finally { + if (e_1) throw e_1.error; + } } + return result; }); - this._orderedPolicies = void 0; - return removedPolicies; - } - sendRequest(httpClient, request2) { - const policies = this.getOrderedPolicies(); - const pipeline = policies.reduceRight((next, policy) => { - return (req) => { - return policy.sendRequest(req, next); - }; - }, (req) => httpClient.sendRequest(req)); - return pipeline(request2); - } - getOrderedPolicies() { - if (!this._orderedPolicies) { - this._orderedPolicies = this.orderPolicies(); - } - return this._orderedPolicies; - } - clone() { - return new _HttpPipeline(this._policies); - } - static create() { - return new _HttpPipeline(); } - orderPolicies() { - const result = []; - const policyMap = /* @__PURE__ */ new Map(); - function createPhase(name) { - return { - name, - policies: /* @__PURE__ */ new Set(), - hasRun: false, - hasAfterPolicies: false - }; - } - const serializePhase = createPhase("Serialize"); - const noPhase = createPhase("None"); - const deserializePhase = createPhase("Deserialize"); - const retryPhase = createPhase("Retry"); - const signPhase = createPhase("Sign"); - const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; - function getPhase(phase) { - if (phase === "Retry") { - return retryPhase; - } else if (phase === "Serialize") { - return serializePhase; - } else if (phase === "Deserialize") { - return deserializePhase; - } else if (phase === "Sign") { - return signPhase; - } else { - return noPhase; - } - } - for (const descriptor of this._policies) { - const policy = descriptor.policy; - const options = descriptor.options; - const policyName = policy.name; - if (policyMap.has(policyName)) { - throw new Error("Duplicate policy names not allowed in pipeline"); - } - const node = { - policy, - dependsOn: /* @__PURE__ */ new Set(), - dependants: /* @__PURE__ */ new Set() - }; - if (options.afterPhase) { - node.afterPhase = getPhase(options.afterPhase); - node.afterPhase.hasAfterPolicies = true; - } - policyMap.set(policyName, node); - const phase = getPhase(options.phase); - phase.policies.add(node); - } - for (const descriptor of this._policies) { - const { policy, options } = descriptor; - const policyName = policy.name; - const node = policyMap.get(policyName); - if (!node) { - throw new Error(`Missing node for policy ${policyName}`); + globGenerator() { + return __asyncGenerator2(this, arguments, function* globGenerator_1() { + const options = globOptionsHelper.getOptions(this.options); + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== "**")) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat("**"))); + } } - if (options.afterPolicies) { - for (const afterPolicyName of options.afterPolicies) { - const afterNode = policyMap.get(afterPolicyName); - if (afterNode) { - node.dependsOn.add(afterNode); - afterNode.dependants.add(node); + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core14.debug(`Search path '${searchPath}'`); + try { + yield __await2(fs13.promises.lstat(searchPath)); + } catch (err) { + if (err.code === "ENOENT") { + continue; } + throw err; } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } - if (options.beforePolicies) { - for (const beforePolicyName of options.beforePolicies) { - const beforeNode = policyMap.get(beforePolicyName); - if (beforeNode) { - beforeNode.dependsOn.add(node); - node.dependants.add(beforeNode); + const traversalChain = []; + while (stack.length) { + const item = stack.pop(); + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + const stats = yield __await2( + _DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + if (!stats) { + continue; + } + if (options.excludeHiddenFiles && path13.basename(item.path).match(/^\./)) { + continue; + } + if (stats.isDirectory()) { + if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { + yield yield __await2(item.path); + } else if (!partialMatch) { + continue; } + const childLevel = item.level + 1; + const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await2(item.path); } } - } - function walkPhase(phase) { - phase.hasRun = true; - for (const node of phase.policies) { - if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter2(this, void 0, void 0, function* () { + const result = new _DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, "\n"); + patterns = patterns.replace(/\r/g, "\n"); + } + const lines = patterns.split("\n").map((x) => x.trim()); + for (const line of lines) { + if (!line || line.startsWith("#")) { continue; + } else { + result.patterns.push(new internal_pattern_1.Pattern(line)); } - if (node.dependsOn.size === 0) { - result.push(node.policy); - for (const dependant of node.dependants) { - dependant.dependsOn.delete(node); + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter2(this, void 0, void 0, function* () { + let stats; + if (options.followSymbolicLinks) { + try { + stats = yield fs13.promises.stat(item.path); + } catch (err) { + if (err.code === "ENOENT") { + if (options.omitBrokenSymbolicLinks) { + core14.debug(`Broken symlink '${item.path}'`); + return void 0; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } - policyMap.delete(node.policy.name); - phase.policies.delete(node); + throw err; } + } else { + stats = yield fs13.promises.lstat(item.path); } - } - function walkPhases() { - for (const phase of orderedPhases) { - walkPhase(phase); - if (phase.policies.size > 0 && phase !== noPhase) { - if (!noPhase.hasRun) { - walkPhase(noPhase); - } - return; + if (stats.isDirectory() && options.followSymbolicLinks) { + const realPath = yield fs13.promises.realpath(item.path); + while (traversalChain.length >= item.level) { + traversalChain.pop(); } - if (phase.hasAfterPolicies) { - walkPhase(noPhase); + if (traversalChain.some((x) => x === realPath)) { + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return void 0; } + traversalChain.push(realPath); } - } - let iteration = 0; - while (policyMap.size > 0) { - iteration++; - const initialResultLength = result.length; - walkPhases(); - if (result.length <= initialResultLength && iteration > 1) { - throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); - } - } - return result; + return stats; + }); } }; - function createEmptyPipeline() { - return HttpPipeline.create(); - } + exports2.DefaultGlobber = DefaultGlobber; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js -var require_object = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js"(exports2) { +// node_modules/@actions/glob/lib/internal-hash-files.js +var require_internal_hash_files = __commonJS({ + "node_modules/@actions/glob/lib/internal-hash-files.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isObject = isObject2; - function isObject2(input) { - return typeof input === "object" && input !== null && !Array.isArray(input) && !(input instanceof RegExp) && !(input instanceof Date); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js -var require_error = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isError = isError; - var object_js_1 = require_object(); - function isError(e) { - if ((0, object_js_1.isObject)(e)) { - const hasName = typeof e.name === "string"; - const hasMessage = typeof e.message === "string"; - return hasName && hasMessage; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - return false; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js -var require_inspect = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.custom = void 0; - var node_util_1 = require("node:util"); - exports2.custom = node_util_1.inspect.custom; - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js -var require_sanitizer = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Sanitizer = void 0; - var object_js_1 = require_object(); - var RedactedString = "REDACTED"; - var defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate" - ]; - var defaultAllowedQueryParameters = ["api-version"]; - var Sanitizer = class { - allowedHeaderNames; - allowedQueryParameters; - constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [] } = {}) { - allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); - allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - /** - * Sanitizes an object for logging. - * @param obj - The object to sanitize - * @returns - The sanitized object as a string - */ - sanitize(obj) { - const seen = /* @__PURE__ */ new Set(); - return JSON.stringify(obj, (key, value) => { - if (value instanceof Error) { - return { - ...value, - name: value.name, - message: value.message - }; - } - if (key === "headers") { - return this.sanitizeHeaders(value); - } else if (key === "url") { - return this.sanitizeUrl(value); - } else if (key === "query") { - return this.sanitizeQuery(value); - } else if (key === "body") { - return void 0; - } else if (key === "response") { - return void 0; - } else if (key === "operationSpec") { - return void 0; - } else if (Array.isArray(value) || (0, object_js_1.isObject)(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - return value; - }, 2); - } - /** - * Sanitizes a URL for logging. - * @param value - The URL to sanitize - * @returns - The sanitized URL as a string - */ - sanitizeUrl(value) { - if (typeof value !== "string" || value === null || value === "") { - return value; - } - const url2 = new URL(value); - if (!url2.search) { - return value; } - for (const [key] of url2.searchParams) { - if (!this.allowedQueryParameters.has(key.toLowerCase())) { - url2.searchParams.set(key, RedactedString); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } } - return url2.toString(); - } - sanitizeHeaders(obj) { - const sanitized = {}; - for (const key of Object.keys(obj)) { - if (this.allowedHeaderNames.has(key.toLowerCase())) { - sanitized[key] = obj[key]; - } else { - sanitized[key] = RedactedString; - } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); } - return sanitized; + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve6, reject) { + v = o[n](v), settle(resolve6, reject, v.done, v.value); + }); + }; } - sanitizeQuery(value) { - if (typeof value !== "object" || value === null) { - return value; - } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (this.allowedQueryParameters.has(k.toLowerCase())) { - sanitized[k] = value[k]; - } else { - sanitized[k] = RedactedString; - } - } - return sanitized; + function settle(resolve6, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve6({ value: v2, done: d }); + }, reject); } }; - exports2.Sanitizer = Sanitizer; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.hashFiles = hashFiles; + var crypto2 = __importStar2(require("crypto")); + var core14 = __importStar2(require_core()); + var fs13 = __importStar2(require("fs")); + var stream2 = __importStar2(require("stream")); + var util = __importStar2(require("util")); + var path13 = __importStar2(require("path")); + function hashFiles(globber_1, currentWorkspace_1) { + return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { + var _a, e_1, _b, _c; + var _d; + const writeDelegate = verbose ? core14.info : core14.debug; + let hasMatch = false; + const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); + const result = crypto2.createHash("sha256"); + let count = 0; + try { + for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { + _c = _g.value; + _e = false; + const file = _c; + writeDelegate(file); + if (!file.startsWith(`${githubWorkspace}${path13.sep}`)) { + writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); + continue; + } + if (fs13.statSync(file).isDirectory()) { + writeDelegate(`Skip directory '${file}'.`); + continue; + } + const hash2 = crypto2.createHash("sha256"); + const pipeline = util.promisify(stream2.pipeline); + yield pipeline(fs13.createReadStream(file), hash2); + result.write(hash2.digest()); + count++; + if (!hasMatch) { + hasMatch = true; + } + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); + } finally { + if (e_1) throw e_1.error; + } + } + result.end(); + if (hasMatch) { + writeDelegate(`Found ${count} files to hash.`); + return result.digest("hex"); + } else { + writeDelegate(`No matches found for glob`); + return ""; + } + }); + } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js -var require_restError = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js"(exports2) { +// node_modules/@actions/glob/lib/glob.js +var require_glob = __commonJS({ + "node_modules/@actions/glob/lib/glob.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RestError = void 0; - exports2.isRestError = isRestError; - var error_js_1 = require_error(); - var inspect_js_1 = require_inspect(); - var sanitizer_js_1 = require_sanitizer(); - var errorSanitizer = new sanitizer_js_1.Sanitizer(); - var RestError = class _RestError extends Error { - /** - * Something went wrong when making the request. - * This means the actual request failed for some reason, - * such as a DNS issue or the connection being lost. - */ - static REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; - /** - * This means that parsing the response from the server failed. - * It may have been malformed. - */ - static PARSE_ERROR = "PARSE_ERROR"; - /** - * The code of the error itself (use statics on RestError if possible.) - */ - code; - /** - * The HTTP status code of the request (if applicable.) - */ - statusCode; - /** - * The request that was made. - * This property is non-enumerable. - */ - request; - /** - * The response received (if any.) - * This property is non-enumerable. - */ - response; - /** - * Bonus property set by the throw site. - */ - details; - constructor(message, options = {}) { - super(message); - this.name = "RestError"; - this.code = options.code; - this.statusCode = options.statusCode; - Object.defineProperty(this, "request", { value: options.request, enumerable: false }); - Object.defineProperty(this, "response", { value: options.response, enumerable: false }); - const agent = this.request?.agent ? { - maxFreeSockets: this.request.agent.maxFreeSockets, - maxSockets: this.request.agent.maxSockets - } : void 0; - Object.defineProperty(this, inspect_js_1.custom, { - value: () => { - return `RestError: ${this.message} - ${errorSanitizer.sanitize({ - ...this, - request: { ...this.request, agent }, - response: this.response - })}`; - }, - enumerable: false + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); }); - Object.setPrototypeOf(this, _RestError.prototype); } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - exports2.RestError = RestError; - function isRestError(e) { - if (e instanceof RestError) { - return true; - } - return (0, error_js_1.isError)(e) && e.name === "RestError"; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js -var require_bytesEncoding = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js"(exports2) { - "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.uint8ArrayToString = uint8ArrayToString; - exports2.stringToUint8Array = stringToUint8Array; - function uint8ArrayToString(bytes, format) { - return Buffer.from(bytes).toString(format); + exports2.create = create; + exports2.hashFiles = hashFiles; + var internal_globber_1 = require_internal_globber(); + var internal_hash_files_1 = require_internal_hash_files(); + function create(patterns, options) { + return __awaiter2(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); } - function stringToUint8Array(value, format) { - return Buffer.from(value, format); + function hashFiles(patterns_1) { + return __awaiter2(this, arguments, void 0, function* (patterns, currentWorkspace = "", options, verbose = false) { + let followSymbolicLinks = true; + if (options && typeof options.followSymbolicLinks === "boolean") { + followSymbolicLinks = options.followSymbolicLinks; + } + const globber = yield create(patterns, { followSymbolicLinks }); + return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose); + }); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js -var require_log2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_js_1 = require_logger(); - exports2.logger = (0, logger_js_1.createClientLogger)("ts-http-runtime"); - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js -var require_nodeHttpClient = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getBodyLength = getBodyLength; - exports2.createNodeHttpClient = createNodeHttpClient; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_http_1 = tslib_1.__importDefault(require("node:http")); - var node_https_1 = tslib_1.__importDefault(require("node:https")); - var node_zlib_1 = tslib_1.__importDefault(require("node:zlib")); - var node_stream_1 = require("node:stream"); - var AbortError_js_1 = require_AbortError(); - var httpHeaders_js_1 = require_httpHeaders(); - var restError_js_1 = require_restError(); - var log_js_1 = require_log2(); - var sanitizer_js_1 = require_sanitizer(); - var DEFAULT_TLS_SETTINGS = {}; - function isReadableStream(body) { - return body && typeof body.pipe === "function"; +// node_modules/@actions/cache/node_modules/semver/semver.js +var require_semver3 = __commonJS({ + "node_modules/@actions/cache/node_modules/semver/semver.js"(exports2, module2) { + exports2 = module2.exports = SemVer; + var debug6; + if (typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug6 = function() { + var args = Array.prototype.slice.call(arguments, 0); + args.unshift("SEMVER"); + console.log.apply(console, args); + }; + } else { + debug6 = function() { + }; } - function isStreamComplete(stream2) { - if (stream2.readable === false) { - return Promise.resolve(); + exports2.SEMVER_SPEC_VERSION = "2.0.0"; + var MAX_LENGTH = 256; + var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ + 9007199254740991; + var MAX_SAFE_COMPONENT_LENGTH = 16; + var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6; + var re = exports2.re = []; + var safeRe = exports2.safeRe = []; + var src = exports2.src = []; + var t = exports2.tokens = {}; + var R = 0; + function tok(n) { + t[n] = R++; + } + var LETTERDASHNUMBER = "[a-zA-Z0-9-]"; + var safeRegexReplacements = [ + ["\\s", 1], + ["\\d", MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] + ]; + function makeSafeRe(value) { + for (var i2 = 0; i2 < safeRegexReplacements.length; i2++) { + var token = safeRegexReplacements[i2][0]; + var max = safeRegexReplacements[i2][1]; + value = value.split(token + "*").join(token + "{0," + max + "}").split(token + "+").join(token + "{1," + max + "}"); } - return new Promise((resolve6) => { - const handler2 = () => { - resolve6(); - stream2.removeListener("close", handler2); - stream2.removeListener("end", handler2); - stream2.removeListener("error", handler2); - }; - stream2.on("close", handler2); - stream2.on("end", handler2); - stream2.on("error", handler2); - }); + return value; } - function isArrayBuffer(body) { - return body && typeof body.byteLength === "number"; + tok("NUMERICIDENTIFIER"); + src[t.NUMERICIDENTIFIER] = "0|[1-9]\\d*"; + tok("NUMERICIDENTIFIERLOOSE"); + src[t.NUMERICIDENTIFIERLOOSE] = "\\d+"; + tok("NONNUMERICIDENTIFIER"); + src[t.NONNUMERICIDENTIFIER] = "\\d*[a-zA-Z-]" + LETTERDASHNUMBER + "*"; + tok("MAINVERSION"); + src[t.MAINVERSION] = "(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")"; + tok("MAINVERSIONLOOSE"); + src[t.MAINVERSIONLOOSE] = "(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")"; + tok("PRERELEASEIDENTIFIER"); + src[t.PRERELEASEIDENTIFIER] = "(?:" + src[t.NUMERICIDENTIFIER] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; + tok("PRERELEASEIDENTIFIERLOOSE"); + src[t.PRERELEASEIDENTIFIERLOOSE] = "(?:" + src[t.NUMERICIDENTIFIERLOOSE] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; + tok("PRERELEASE"); + src[t.PRERELEASE] = "(?:-(" + src[t.PRERELEASEIDENTIFIER] + "(?:\\." + src[t.PRERELEASEIDENTIFIER] + ")*))"; + tok("PRERELEASELOOSE"); + src[t.PRERELEASELOOSE] = "(?:-?(" + src[t.PRERELEASEIDENTIFIERLOOSE] + "(?:\\." + src[t.PRERELEASEIDENTIFIERLOOSE] + ")*))"; + tok("BUILDIDENTIFIER"); + src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + "+"; + tok("BUILD"); + src[t.BUILD] = "(?:\\+(" + src[t.BUILDIDENTIFIER] + "(?:\\." + src[t.BUILDIDENTIFIER] + ")*))"; + tok("FULL"); + tok("FULLPLAIN"); + src[t.FULLPLAIN] = "v?" + src[t.MAINVERSION] + src[t.PRERELEASE] + "?" + src[t.BUILD] + "?"; + src[t.FULL] = "^" + src[t.FULLPLAIN] + "$"; + tok("LOOSEPLAIN"); + src[t.LOOSEPLAIN] = "[v=\\s]*" + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + "?" + src[t.BUILD] + "?"; + tok("LOOSE"); + src[t.LOOSE] = "^" + src[t.LOOSEPLAIN] + "$"; + tok("GTLT"); + src[t.GTLT] = "((?:<|>)?=?)"; + tok("XRANGEIDENTIFIERLOOSE"); + src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + "|x|X|\\*"; + tok("XRANGEIDENTIFIER"); + src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + "|x|X|\\*"; + tok("XRANGEPLAIN"); + src[t.XRANGEPLAIN] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:" + src[t.PRERELEASE] + ")?" + src[t.BUILD] + "?)?)?"; + tok("XRANGEPLAINLOOSE"); + src[t.XRANGEPLAINLOOSE] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:" + src[t.PRERELEASELOOSE] + ")?" + src[t.BUILD] + "?)?)?"; + tok("XRANGE"); + src[t.XRANGE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAIN] + "$"; + tok("XRANGELOOSE"); + src[t.XRANGELOOSE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAINLOOSE] + "$"; + tok("COERCE"); + src[t.COERCE] = "(^|[^\\d])(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "})(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:$|[^\\d])"; + tok("COERCERTL"); + re[t.COERCERTL] = new RegExp(src[t.COERCE], "g"); + safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), "g"); + tok("LONETILDE"); + src[t.LONETILDE] = "(?:~>?)"; + tok("TILDETRIM"); + src[t.TILDETRIM] = "(\\s*)" + src[t.LONETILDE] + "\\s+"; + re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], "g"); + safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), "g"); + var tildeTrimReplace = "$1~"; + tok("TILDE"); + src[t.TILDE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAIN] + "$"; + tok("TILDELOOSE"); + src[t.TILDELOOSE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + "$"; + tok("LONECARET"); + src[t.LONECARET] = "(?:\\^)"; + tok("CARETTRIM"); + src[t.CARETTRIM] = "(\\s*)" + src[t.LONECARET] + "\\s+"; + re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], "g"); + safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), "g"); + var caretTrimReplace = "$1^"; + tok("CARET"); + src[t.CARET] = "^" + src[t.LONECARET] + src[t.XRANGEPLAIN] + "$"; + tok("CARETLOOSE"); + src[t.CARETLOOSE] = "^" + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + "$"; + tok("COMPARATORLOOSE"); + src[t.COMPARATORLOOSE] = "^" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + ")$|^$"; + tok("COMPARATOR"); + src[t.COMPARATOR] = "^" + src[t.GTLT] + "\\s*(" + src[t.FULLPLAIN] + ")$|^$"; + tok("COMPARATORTRIM"); + src[t.COMPARATORTRIM] = "(\\s*)" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + "|" + src[t.XRANGEPLAIN] + ")"; + re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], "g"); + safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), "g"); + var comparatorTrimReplace = "$1$2$3"; + tok("HYPHENRANGE"); + src[t.HYPHENRANGE] = "^\\s*(" + src[t.XRANGEPLAIN] + ")\\s+-\\s+(" + src[t.XRANGEPLAIN] + ")\\s*$"; + tok("HYPHENRANGELOOSE"); + src[t.HYPHENRANGELOOSE] = "^\\s*(" + src[t.XRANGEPLAINLOOSE] + ")\\s+-\\s+(" + src[t.XRANGEPLAINLOOSE] + ")\\s*$"; + tok("STAR"); + src[t.STAR] = "(<|>)?=?\\s*\\*"; + for (i = 0; i < R; i++) { + debug6(i, src[i]); + if (!re[i]) { + re[i] = new RegExp(src[i]); + safeRe[i] = new RegExp(makeSafeRe(src[i])); + } } - var ReportTransform = class extends node_stream_1.Transform { - loadedBytes = 0; - progressCallback; - // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - try { - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(); - } catch (e) { - callback(e); + var i; + exports2.parse = parse2; + function parse2(version, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + if (version instanceof SemVer) { + return version; + } + if (typeof version !== "string") { + return null; + } + if (version.length > MAX_LENGTH) { + return null; + } + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; + if (!r.test(version)) { + return null; + } + try { + return new SemVer(version, options); + } catch (er) { + return null; + } + } + exports2.valid = valid3; + function valid3(version, options) { + var v = parse2(version, options); + return v ? v.version : null; + } + exports2.clean = clean3; + function clean3(version, options) { + var s = parse2(version.trim().replace(/^[=v]+/, ""), options); + return s ? s.version : null; + } + exports2.SemVer = SemVer; + function SemVer(version, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version; + } else { + version = version.version; } + } else if (typeof version !== "string") { + throw new TypeError("Invalid Version: " + version); } - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; + if (version.length > MAX_LENGTH) { + throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); } - }; - var NodeHttpClient = class { - cachedHttpAgent; - cachedHttpsAgents = /* @__PURE__ */ new WeakMap(); - /** - * Makes a request over an underlying transport layer and returns the response. - * @param request - The request to be made. - */ - async sendRequest(request2) { - const abortController = new AbortController(); - let abortListener; - if (request2.abortSignal) { - if (request2.abortSignal.aborted) { - throw new AbortError_js_1.AbortError("The operation was aborted. Request has already been canceled."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController.abort(); + if (!(this instanceof SemVer)) { + return new SemVer(version, options); + } + debug6("SemVer", version, options); + this.options = options; + this.loose = !!options.loose; + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); + if (!m) { + throw new TypeError("Invalid Version: " + version); + } + this.raw = version; + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError("Invalid major version"); + } + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError("Invalid minor version"); + } + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError("Invalid patch version"); + } + if (!m[4]) { + this.prerelease = []; + } else { + this.prerelease = m[4].split(".").map(function(id) { + if (/^[0-9]+$/.test(id)) { + var num = +id; + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num; } - }; - request2.abortSignal.addEventListener("abort", abortListener); - } - let timeoutId; - if (request2.timeout > 0) { - timeoutId = setTimeout(() => { - const sanitizer = new sanitizer_js_1.Sanitizer(); - log_js_1.logger.info(`request to '${sanitizer.sanitizeUrl(request2.url)}' timed out. canceling...`); - abortController.abort(); - }, request2.timeout); - } - const acceptEncoding = request2.headers.get("Accept-Encoding"); - const shouldDecompress = acceptEncoding?.includes("gzip") || acceptEncoding?.includes("deflate"); - let body = typeof request2.body === "function" ? request2.body() : request2.body; - if (body && !request2.headers.has("Content-Length")) { - const bodyLength = getBodyLength(body); - if (bodyLength !== null) { - request2.headers.set("Content-Length", bodyLength); } + return id; + }); + } + this.build = m[5] ? m[5].split(".") : []; + this.format(); + } + SemVer.prototype.format = function() { + this.version = this.major + "." + this.minor + "." + this.patch; + if (this.prerelease.length) { + this.version += "-" + this.prerelease.join("."); + } + return this.version; + }; + SemVer.prototype.toString = function() { + return this.version; + }; + SemVer.prototype.compare = function(other) { + debug6("SemVer.compare", this.version, this.options, other); + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + return this.compareMain(other) || this.comparePre(other); + }; + SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); + }; + SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + if (this.prerelease.length && !other.prerelease.length) { + return -1; + } else if (!this.prerelease.length && other.prerelease.length) { + return 1; + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0; + } + var i2 = 0; + do { + var a = this.prerelease[i2]; + var b = other.prerelease[i2]; + debug6("prerelease compare", i2, a, b); + if (a === void 0 && b === void 0) { + return 0; + } else if (b === void 0) { + return 1; + } else if (a === void 0) { + return -1; + } else if (a === b) { + continue; + } else { + return compareIdentifiers(a, b); } - let responseStream; - try { - if (body && request2.onUploadProgress) { - const onUploadProgress = request2.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - uploadReportStream.on("error", (e) => { - log_js_1.logger.error("Error in upload progress", e); - }); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } else { - uploadReportStream.end(body); - } - body = uploadReportStream; + } while (++i2); + }; + SemVer.prototype.compareBuild = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + var i2 = 0; + do { + var a = this.build[i2]; + var b = other.build[i2]; + debug6("prerelease compare", i2, a, b); + if (a === void 0 && b === void 0) { + return 0; + } else if (b === void 0) { + return 1; + } else if (a === void 0) { + return -1; + } else if (a === b) { + continue; + } else { + return compareIdentifiers(a, b); + } + } while (++i2); + }; + SemVer.prototype.inc = function(release2, identifier) { + switch (release2) { + case "premajor": + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc("pre", identifier); + break; + case "preminor": + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc("pre", identifier); + break; + case "prepatch": + this.prerelease.length = 0; + this.inc("patch", identifier); + this.inc("pre", identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case "prerelease": + if (this.prerelease.length === 0) { + this.inc("patch", identifier); } - const res = await this.makeRequest(request2, abortController, body); - if (timeoutId !== void 0) { - clearTimeout(timeoutId); + this.inc("pre", identifier); + break; + case "major": + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { + this.major++; } - const headers = getResponseHeaders(res); - const status = res.statusCode ?? 0; - const response = { - status, - headers, - request: request2 - }; - if (request2.method === "HEAD") { - res.resume(); - return response; + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case "minor": + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++; } - responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; - const onDownloadProgress = request2.onDownloadProgress; - if (onDownloadProgress) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - downloadReportStream.on("error", (e) => { - log_js_1.logger.error("Error in download progress", e); - }); - responseStream.pipe(downloadReportStream); - responseStream = downloadReportStream; + this.patch = 0; + this.prerelease = []; + break; + case "patch": + if (this.prerelease.length === 0) { + this.patch++; } - if ( - // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code - request2.streamResponseStatusCodes?.has(Number.POSITIVE_INFINITY) || request2.streamResponseStatusCodes?.has(response.status) - ) { - response.readableStreamBody = responseStream; + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case "pre": + if (this.prerelease.length === 0) { + this.prerelease = [0]; } else { - response.bodyAsText = await streamToText(responseStream); - } - return response; - } finally { - if (request2.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); + var i2 = this.prerelease.length; + while (--i2 >= 0) { + if (typeof this.prerelease[i2] === "number") { + this.prerelease[i2]++; + i2 = -2; + } } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(responseStream)) { - downloadStreamDone = isStreamComplete(responseStream); + if (i2 === -1) { + this.prerelease.push(0); } - Promise.all([uploadStreamDone, downloadStreamDone]).then(() => { - if (abortListener) { - request2.abortSignal?.removeEventListener("abort", abortListener); + } + if (identifier) { + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0]; } - }).catch((e) => { - log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); + } else { + this.prerelease = [identifier, 0]; + } } - } + break; + default: + throw new Error("invalid increment argument: " + release2); } - makeRequest(request2, abortController, body) { - const url2 = new URL(request2.url); - const isInsecure = url2.protocol !== "https:"; - if (isInsecure && !request2.allowInsecureConnection) { - throw new Error(`Cannot connect to ${request2.url} while allowInsecureConnection is false.`); + this.format(); + this.raw = this.version; + return this; + }; + exports2.inc = inc; + function inc(version, release2, loose, identifier) { + if (typeof loose === "string") { + identifier = loose; + loose = void 0; + } + try { + return new SemVer(version, loose).inc(release2, identifier).version; + } catch (er) { + return null; + } + } + exports2.diff = diff; + function diff(version1, version2) { + if (eq(version1, version2)) { + return null; + } else { + var v1 = parse2(version1); + var v2 = parse2(version2); + var prefix = ""; + if (v1.prerelease.length || v2.prerelease.length) { + prefix = "pre"; + var defaultResult = "prerelease"; } - const agent = request2.agent ?? this.getOrCreateAgent(request2, isInsecure); - const options = { - agent, - hostname: url2.hostname, - path: `${url2.pathname}${url2.search}`, - port: url2.port, - method: request2.method, - headers: request2.headers.toJSON({ preserveCase: true }), - ...request2.requestOverrides - }; - return new Promise((resolve6, reject) => { - const req = isInsecure ? node_http_1.default.request(options, resolve6) : node_https_1.default.request(options, resolve6); - req.once("error", (err) => { - reject(new restError_js_1.RestError(err.message, { code: err.code ?? restError_js_1.RestError.REQUEST_SEND_ERROR, request: request2 })); - }); - abortController.signal.addEventListener("abort", () => { - const abortError = new AbortError_js_1.AbortError("The operation was aborted. Rejecting from abort signal callback while making request."); - req.destroy(abortError); - reject(abortError); - }); - if (body && isReadableStream(body)) { - body.pipe(req); - } else if (body) { - if (typeof body === "string" || Buffer.isBuffer(body)) { - req.end(body); - } else if (isArrayBuffer(body)) { - req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); - } else { - log_js_1.logger.error("Unrecognized body type", body); - reject(new restError_js_1.RestError("Unrecognized body type")); + for (var key in v1) { + if (key === "major" || key === "minor" || key === "patch") { + if (v1[key] !== v2[key]) { + return prefix + key; } - } else { - req.end(); } - }); + } + return defaultResult; } - getOrCreateAgent(request2, isInsecure) { - const disableKeepAlive = request2.disableKeepAlive; - if (isInsecure) { - if (disableKeepAlive) { - return node_http_1.default.globalAgent; - } - if (!this.cachedHttpAgent) { - this.cachedHttpAgent = new node_http_1.default.Agent({ keepAlive: true }); - } - return this.cachedHttpAgent; - } else { - if (disableKeepAlive && !request2.tlsSettings) { - return node_https_1.default.globalAgent; - } - const tlsSettings = request2.tlsSettings ?? DEFAULT_TLS_SETTINGS; - let agent = this.cachedHttpsAgents.get(tlsSettings); - if (agent && agent.options.keepAlive === !disableKeepAlive) { - return agent; - } - log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); - agent = new node_https_1.default.Agent({ - // keepAlive is true if disableKeepAlive is false. - keepAlive: !disableKeepAlive, - // Since we are spreading, if no tslSettings were provided, nothing is added to the agent options. - ...tlsSettings - }); - this.cachedHttpsAgents.set(tlsSettings, agent); - return agent; - } - } - }; - function getResponseHeaders(res) { - const headers = (0, httpHeaders_js_1.createHttpHeaders)(); - for (const header of Object.keys(res.headers)) { - const value = res.headers[header]; - if (Array.isArray(value)) { - if (value.length > 0) { - headers.set(header, value[0]); - } - } else if (value) { - headers.set(header, value); - } - } - return headers; } - function getDecodedResponseStream(stream2, headers) { - const contentEncoding = headers.get("Content-Encoding"); - if (contentEncoding === "gzip") { - const unzip = node_zlib_1.default.createGunzip(); - stream2.pipe(unzip); - return unzip; - } else if (contentEncoding === "deflate") { - const inflate = node_zlib_1.default.createInflate(); - stream2.pipe(inflate); - return inflate; + exports2.compareIdentifiers = compareIdentifiers; + var numeric = /^[0-9]+$/; + function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + if (anum && bnum) { + a = +a; + b = +b; } - return stream2; - } - function streamToText(stream2) { - return new Promise((resolve6, reject) => { - const buffer = []; - stream2.on("data", (chunk) => { - if (Buffer.isBuffer(chunk)) { - buffer.push(chunk); - } else { - buffer.push(Buffer.from(chunk)); - } - }); - stream2.on("end", () => { - resolve6(Buffer.concat(buffer).toString("utf8")); - }); - stream2.on("error", (e) => { - if (e && e?.name === "AbortError") { - reject(e); - } else { - reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { - code: restError_js_1.RestError.PARSE_ERROR - })); - } - }); - }); + return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; } - function getBodyLength(body) { - if (!body) { - return 0; - } else if (Buffer.isBuffer(body)) { - return body.length; - } else if (isReadableStream(body)) { - return null; - } else if (isArrayBuffer(body)) { - return body.byteLength; - } else if (typeof body === "string") { - return Buffer.from(body).length; - } else { - return null; - } + exports2.rcompareIdentifiers = rcompareIdentifiers; + function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); } - function createNodeHttpClient() { - return new NodeHttpClient(); + exports2.major = major; + function major(a, loose) { + return new SemVer(a, loose).major; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js -var require_defaultHttpClient = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createDefaultHttpClient = createDefaultHttpClient; - var nodeHttpClient_js_1 = require_nodeHttpClient(); - function createDefaultHttpClient() { - return (0, nodeHttpClient_js_1.createNodeHttpClient)(); + exports2.minor = minor; + function minor(a, loose) { + return new SemVer(a, loose).minor; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js -var require_logPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logPolicyName = void 0; - exports2.logPolicy = logPolicy; - var log_js_1 = require_log2(); - var sanitizer_js_1 = require_sanitizer(); - exports2.logPolicyName = "logPolicy"; - function logPolicy(options = {}) { - const logger = options.logger ?? log_js_1.logger.info; - const sanitizer = new sanitizer_js_1.Sanitizer({ - additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, - additionalAllowedQueryParameters: options.additionalAllowedQueryParameters - }); - return { - name: exports2.logPolicyName, - async sendRequest(request2, next) { - if (!logger.enabled) { - return next(request2); - } - logger(`Request: ${sanitizer.sanitize(request2)}`); - const response = await next(request2); - logger(`Response status code: ${response.status}`); - logger(`Headers: ${sanitizer.sanitize(response.headers)}`); - return response; - } - }; + exports2.patch = patch; + function patch(a, loose) { + return new SemVer(a, loose).patch; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js -var require_redirectPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.redirectPolicyName = void 0; - exports2.redirectPolicy = redirectPolicy; - exports2.redirectPolicyName = "redirectPolicy"; - var allowedRedirect = ["GET", "HEAD"]; - function redirectPolicy(options = {}) { - const { maxRetries = 20 } = options; - return { - name: exports2.redirectPolicyName, - async sendRequest(request2, next) { - const response = await next(request2); - return handleRedirect(next, response, maxRetries); - } - }; + exports2.compare = compare3; + function compare3(a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)); } - async function handleRedirect(next, response, maxRetries, currentRetries = 0) { - const { request: request2, status, headers } = response; - const locationHeader = headers.get("location"); - if (locationHeader && (status === 300 || status === 301 && allowedRedirect.includes(request2.method) || status === 302 && allowedRedirect.includes(request2.method) || status === 303 && request2.method === "POST" || status === 307) && currentRetries < maxRetries) { - const url2 = new URL(locationHeader, request2.url); - request2.url = url2.toString(); - if (status === 303) { - request2.method = "GET"; - request2.headers.delete("Content-Length"); - delete request2.body; - } - request2.headers.delete("Authorization"); - const res = await next(request2); - return handleRedirect(next, res, maxRetries, currentRetries + 1); - } - return response; + exports2.compareLoose = compareLoose; + function compareLoose(a, b) { + return compare3(a, b, true); } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js -var require_userAgentPlatform = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getHeaderName = getHeaderName; - exports2.setPlatformSpecificData = setPlatformSpecificData; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_os_1 = tslib_1.__importDefault(require("node:os")); - var node_process_1 = tslib_1.__importDefault(require("node:process")); - function getHeaderName() { - return "User-Agent"; + exports2.compareBuild = compareBuild; + function compareBuild(a, b, loose) { + var versionA = new SemVer(a, loose); + var versionB = new SemVer(b, loose); + return versionA.compare(versionB) || versionA.compareBuild(versionB); } - async function setPlatformSpecificData(map2) { - if (node_process_1.default && node_process_1.default.versions) { - const osInfo = `${node_os_1.default.type()} ${node_os_1.default.release()}; ${node_os_1.default.arch()}`; - const versions = node_process_1.default.versions; - if (versions.bun) { - map2.set("Bun", `${versions.bun} (${osInfo})`); - } else if (versions.deno) { - map2.set("Deno", `${versions.deno} (${osInfo})`); - } else if (versions.node) { - map2.set("Node", `${versions.node} (${osInfo})`); - } - } + exports2.rcompare = rcompare; + function rcompare(a, b, loose) { + return compare3(b, a, loose); } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js -var require_constants13 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DEFAULT_RETRY_POLICY_COUNT = exports2.SDK_VERSION = void 0; - exports2.SDK_VERSION = "0.3.2"; - exports2.DEFAULT_RETRY_POLICY_COUNT = 3; - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js -var require_userAgent = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUserAgentHeaderName = getUserAgentHeaderName; - exports2.getUserAgentValue = getUserAgentValue; - var userAgentPlatform_js_1 = require_userAgentPlatform(); - var constants_js_1 = require_constants13(); - function getUserAgentString(telemetryInfo) { - const parts = []; - for (const [key, value] of telemetryInfo) { - const token = value ? `${key}/${value}` : key; - parts.push(token); - } - return parts.join(" "); + exports2.sort = sort; + function sort(list, loose) { + return list.sort(function(a, b) { + return exports2.compareBuild(a, b, loose); + }); } - function getUserAgentHeaderName() { - return (0, userAgentPlatform_js_1.getHeaderName)(); + exports2.rsort = rsort; + function rsort(list, loose) { + return list.sort(function(a, b) { + return exports2.compareBuild(b, a, loose); + }); } - async function getUserAgentValue(prefix) { - const runtimeInfo = /* @__PURE__ */ new Map(); - runtimeInfo.set("ts-http-runtime", constants_js_1.SDK_VERSION); - await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); - const defaultAgent = getUserAgentString(runtimeInfo); - const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; - return userAgentValue; + exports2.gt = gt; + function gt(a, b, loose) { + return compare3(a, b, loose) > 0; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js -var require_userAgentPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.userAgentPolicyName = void 0; - exports2.userAgentPolicy = userAgentPolicy; - var userAgent_js_1 = require_userAgent(); - var UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); - exports2.userAgentPolicyName = "userAgentPolicy"; - function userAgentPolicy(options = {}) { - const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); - return { - name: exports2.userAgentPolicyName, - async sendRequest(request2, next) { - if (!request2.headers.has(UserAgentHeaderName)) { - request2.headers.set(UserAgentHeaderName, await userAgentValue); - } - return next(request2); - } - }; + exports2.lt = lt; + function lt(a, b, loose) { + return compare3(a, b, loose) < 0; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js -var require_decompressResponsePolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.decompressResponsePolicyName = void 0; - exports2.decompressResponsePolicy = decompressResponsePolicy; - exports2.decompressResponsePolicyName = "decompressResponsePolicy"; - function decompressResponsePolicy() { - return { - name: exports2.decompressResponsePolicyName, - async sendRequest(request2, next) { - if (request2.method !== "HEAD") { - request2.headers.set("Accept-Encoding", "gzip,deflate"); - } - return next(request2); - } - }; + exports2.eq = eq; + function eq(a, b, loose) { + return compare3(a, b, loose) === 0; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js -var require_random = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; - function getRandomIntegerInclusive(min, max) { - min = Math.ceil(min); - max = Math.floor(max); - const offset = Math.floor(Math.random() * (max - min + 1)); - return offset + min; + exports2.neq = neq; + function neq(a, b, loose) { + return compare3(a, b, loose) !== 0; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js -var require_delay = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.calculateRetryDelay = calculateRetryDelay; - var random_js_1 = require_random(); - function calculateRetryDelay(retryAttempt, config) { - const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); - const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); - const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); - return { retryAfterInMs }; + exports2.gte = gte6; + function gte6(a, b, loose) { + return compare3(a, b, loose) >= 0; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js -var require_helpers = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.delay = delay2; - exports2.parseHeaderValueAsNumber = parseHeaderValueAsNumber; - var AbortError_js_1 = require_AbortError(); - var StandardAbortMessage = "The operation was aborted."; - function delay2(delayInMs, value, options) { - return new Promise((resolve6, reject) => { - let timer = void 0; - let onAborted = void 0; - const rejectOnAbort = () => { - return reject(new AbortError_js_1.AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)); - }; - const removeListeners = () => { - if (options?.abortSignal && onAborted) { - options.abortSignal.removeEventListener("abort", onAborted); - } - }; - onAborted = () => { - if (timer) { - clearTimeout(timer); - } - removeListeners(); - return rejectOnAbort(); - }; - if (options?.abortSignal && options.abortSignal.aborted) { - return rejectOnAbort(); - } - timer = setTimeout(() => { - removeListeners(); - resolve6(value); - }, delayInMs); - if (options?.abortSignal) { - options.abortSignal.addEventListener("abort", onAborted); - } - }); + exports2.lte = lte; + function lte(a, b, loose) { + return compare3(a, b, loose) <= 0; } - function parseHeaderValueAsNumber(response, headerName) { - const value = response.headers.get(headerName); - if (!value) - return; - const valueAsNum = Number(value); - if (Number.isNaN(valueAsNum)) - return; - return valueAsNum; + exports2.cmp = cmp; + function cmp(a, op, b, loose) { + switch (op) { + case "===": + if (typeof a === "object") + a = a.version; + if (typeof b === "object") + b = b.version; + return a === b; + case "!==": + if (typeof a === "object") + a = a.version; + if (typeof b === "object") + b = b.version; + return a !== b; + case "": + case "=": + case "==": + return eq(a, b, loose); + case "!=": + return neq(a, b, loose); + case ">": + return gt(a, b, loose); + case ">=": + return gte6(a, b, loose); + case "<": + return lt(a, b, loose); + case "<=": + return lte(a, b, loose); + default: + throw new TypeError("Invalid operator: " + op); + } } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js -var require_throttlingRetryStrategy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isThrottlingRetryResponse = isThrottlingRetryResponse; - exports2.throttlingRetryStrategy = throttlingRetryStrategy; - var helpers_js_1 = require_helpers(); - var RetryAfterHeader = "Retry-After"; - var AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; - function getRetryAfterInMs(response) { - if (!(response && [429, 503].includes(response.status))) - return void 0; - try { - for (const header of AllRetryAfterHeaders) { - const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); - if (retryAfterValue === 0 || retryAfterValue) { - const multiplyingFactor = header === RetryAfterHeader ? 1e3 : 1; - return retryAfterValue * multiplyingFactor; - } + exports2.Comparator = Comparator; + function Comparator(comp, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp; + } else { + comp = comp.value; } - const retryAfterHeader = response.headers.get(RetryAfterHeader); - if (!retryAfterHeader) - return; - const date = Date.parse(retryAfterHeader); - const diff = date - Date.now(); - return Number.isFinite(diff) ? Math.max(0, diff) : void 0; - } catch { - return void 0; } - } - function isThrottlingRetryResponse(response) { - return Number.isFinite(getRetryAfterInMs(response)); - } - function throttlingRetryStrategy() { - return { - name: "throttlingRetryStrategy", - retry({ response }) { - const retryAfterInMs = getRetryAfterInMs(response); - if (!Number.isFinite(retryAfterInMs)) { - return { skipStrategy: true }; - } - return { - retryAfterInMs - }; - } - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js -var require_exponentialRetryStrategy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.exponentialRetryStrategy = exponentialRetryStrategy; - exports2.isExponentialRetryResponse = isExponentialRetryResponse; - exports2.isSystemError = isSystemError; - var delay_js_1 = require_delay(); - var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); - var DEFAULT_CLIENT_RETRY_INTERVAL = 1e3; - var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1e3 * 64; - function exponentialRetryStrategy(options = {}) { - const retryInterval = options.retryDelayInMs ?? DEFAULT_CLIENT_RETRY_INTERVAL; - const maxRetryInterval = options.maxRetryDelayInMs ?? DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - return { - name: "exponentialRetryStrategy", - retry({ retryCount, response, responseError }) { - const matchedSystemError = isSystemError(responseError); - const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; - const isExponential = isExponentialRetryResponse(response); - const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; - const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); - if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { - return { skipStrategy: true }; - } - if (responseError && !matchedSystemError && !isExponential) { - return { errorToThrow: responseError }; - } - return (0, delay_js_1.calculateRetryDelay)(retryCount, { - retryDelayInMs: retryInterval, - maxRetryDelayInMs: maxRetryInterval - }); - } - }; - } - function isExponentialRetryResponse(response) { - return Boolean(response && response.status !== void 0 && (response.status >= 500 || response.status === 408) && response.status !== 501 && response.status !== 505); - } - function isSystemError(err) { - if (!err) { - return false; - } - return err.code === "ETIMEDOUT" || err.code === "ESOCKETTIMEDOUT" || err.code === "ECONNREFUSED" || err.code === "ECONNRESET" || err.code === "ENOENT" || err.code === "ENOTFOUND"; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js -var require_retryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.retryPolicy = retryPolicy; - var helpers_js_1 = require_helpers(); - var AbortError_js_1 = require_AbortError(); - var logger_js_1 = require_logger(); - var constants_js_1 = require_constants13(); - var retryPolicyLogger = (0, logger_js_1.createClientLogger)("ts-http-runtime retryPolicy"); - var retryPolicyName = "retryPolicy"; - function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { - const logger = options.logger || retryPolicyLogger; - return { - name: retryPolicyName, - async sendRequest(request2, next) { - let response; - let responseError; - let retryCount = -1; - retryRequest: while (true) { - retryCount += 1; - response = void 0; - responseError = void 0; - try { - logger.info(`Retry ${retryCount}: Attempting to send request`, request2.requestId); - response = await next(request2); - logger.info(`Retry ${retryCount}: Received a response from request`, request2.requestId); - } catch (e) { - logger.error(`Retry ${retryCount}: Received an error from request`, request2.requestId); - responseError = e; - if (!e || responseError.name !== "RestError") { - throw e; - } - response = responseError.response; - } - if (request2.abortSignal?.aborted) { - logger.error(`Retry ${retryCount}: Request aborted.`); - const abortError = new AbortError_js_1.AbortError(); - throw abortError; - } - if (retryCount >= (options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { - logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); - if (responseError) { - throw responseError; - } else if (response) { - return response; - } else { - throw new Error("Maximum retries reached with no response or error to throw"); - } - } - logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); - strategiesLoop: for (const strategy of strategies) { - const strategyLogger = strategy.logger || logger; - strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); - const modifiers = strategy.retry({ - retryCount, - response, - responseError - }); - if (modifiers.skipStrategy) { - strategyLogger.info(`Retry ${retryCount}: Skipped.`); - continue strategiesLoop; - } - const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; - if (errorToThrow) { - strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); - throw errorToThrow; - } - if (retryAfterInMs || retryAfterInMs === 0) { - strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); - await (0, helpers_js_1.delay)(retryAfterInMs, void 0, { abortSignal: request2.abortSignal }); - continue retryRequest; - } - if (redirectTo) { - strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); - request2.url = redirectTo; - continue retryRequest; - } - } - if (responseError) { - logger.info(`None of the retry strategies could work with the received error. Throwing it.`); - throw responseError; - } - if (response) { - logger.info(`None of the retry strategies could work with the received response. Returning it.`); - return response; - } - } - } - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js -var require_defaultRetryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.defaultRetryPolicyName = void 0; - exports2.defaultRetryPolicy = defaultRetryPolicy; - var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); - var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); - var retryPolicy_js_1 = require_retryPolicy(); - var constants_js_1 = require_constants13(); - exports2.defaultRetryPolicyName = "defaultRetryPolicy"; - function defaultRetryPolicy(options = {}) { - return { - name: exports2.defaultRetryPolicyName, - sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { - maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT - }).sendRequest - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js -var require_checkEnvironment = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isReactNative = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isBun = exports2.isDeno = exports2.isWebWorker = exports2.isBrowser = void 0; - exports2.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; - exports2.isWebWorker = typeof self === "object" && typeof self?.importScripts === "function" && (self.constructor?.name === "DedicatedWorkerGlobalScope" || self.constructor?.name === "ServiceWorkerGlobalScope" || self.constructor?.name === "SharedWorkerGlobalScope"); - exports2.isDeno = typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; - exports2.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; - exports2.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && Boolean(globalThis.process.versions?.node); - exports2.isNodeRuntime = exports2.isNodeLike && !exports2.isBun && !exports2.isDeno; - exports2.isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js -var require_formDataPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.formDataPolicyName = void 0; - exports2.formDataPolicy = formDataPolicy; - var bytesEncoding_js_1 = require_bytesEncoding(); - var checkEnvironment_js_1 = require_checkEnvironment(); - var httpHeaders_js_1 = require_httpHeaders(); - exports2.formDataPolicyName = "formDataPolicy"; - function formDataToFormDataMap(formData) { - const formDataMap = {}; - for (const [key, value] of formData.entries()) { - formDataMap[key] ??= []; - formDataMap[key].push(value); + if (!(this instanceof Comparator)) { + return new Comparator(comp, options); } - return formDataMap; - } - function formDataPolicy() { - return { - name: exports2.formDataPolicyName, - async sendRequest(request2, next) { - if (checkEnvironment_js_1.isNodeLike && typeof FormData !== "undefined" && request2.body instanceof FormData) { - request2.formData = formDataToFormDataMap(request2.body); - request2.body = void 0; - } - if (request2.formData) { - const contentType = request2.headers.get("Content-Type"); - if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { - request2.body = wwwFormUrlEncode(request2.formData); - } else { - await prepareFormData(request2.formData, request2); - } - request2.formData = void 0; - } - return next(request2); - } - }; - } - function wwwFormUrlEncode(formData) { - const urlSearchParams = new URLSearchParams(); - for (const [key, value] of Object.entries(formData)) { - if (Array.isArray(value)) { - for (const subValue of value) { - urlSearchParams.append(key, subValue.toString()); - } - } else { - urlSearchParams.append(key, value.toString()); - } + comp = comp.trim().split(/\s+/).join(" "); + debug6("comparator", comp, options); + this.options = options; + this.loose = !!options.loose; + this.parse(comp); + if (this.semver === ANY) { + this.value = ""; + } else { + this.value = this.operator + this.semver.version; } - return urlSearchParams.toString(); + debug6("comp", this); } - async function prepareFormData(formData, request2) { - const contentType = request2.headers.get("Content-Type"); - if (contentType && !contentType.startsWith("multipart/form-data")) { - return; + var ANY = {}; + Comparator.prototype.parse = function(comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; + var m = comp.match(r); + if (!m) { + throw new TypeError("Invalid comparator: " + comp); } - request2.headers.set("Content-Type", contentType ?? "multipart/form-data"); - const parts = []; - for (const [fieldName, values] of Object.entries(formData)) { - for (const value of Array.isArray(values) ? values : [values]) { - if (typeof value === "string") { - parts.push({ - headers: (0, httpHeaders_js_1.createHttpHeaders)({ - "Content-Disposition": `form-data; name="${fieldName}"` - }), - body: (0, bytesEncoding_js_1.stringToUint8Array)(value, "utf-8") - }); - } else if (value === void 0 || value === null || typeof value !== "object") { - throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); - } else { - const fileName = value.name || "blob"; - const headers = (0, httpHeaders_js_1.createHttpHeaders)(); - headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); - headers.set("Content-Type", value.type || "application/octet-stream"); - parts.push({ - headers, - body: value - }); - } - } + this.operator = m[1] !== void 0 ? m[1] : ""; + if (this.operator === "=") { + this.operator = ""; } - request2.multipartBody = { parts }; - } - } -}); - -// node_modules/ms/index.js -var require_ms = __commonJS({ - "node_modules/ms/index.js"(exports2, module2) { - var s = 1e3; - var m = s * 60; - var h = m * 60; - var d = h * 24; - var w = d * 7; - var y = d * 365.25; - module2.exports = function(val, options) { - options = options || {}; - var type2 = typeof val; - if (type2 === "string" && val.length > 0) { - return parse2(val); - } else if (type2 === "number" && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); + if (!m[2]) { + this.semver = ANY; + } else { + this.semver = new SemVer(m[2], this.options.loose); } - throw new Error( - "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) - ); }; - function parse2(str2) { - str2 = String(str2); - if (str2.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str2 - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type2 = (match[2] || "ms").toLowerCase(); - switch (type2) { - case "years": - case "year": - case "yrs": - case "yr": - case "y": - return n * y; - case "weeks": - case "week": - case "w": - return n * w; - case "days": - case "day": - case "d": - return n * d; - case "hours": - case "hour": - case "hrs": - case "hr": - case "h": - return n * h; - case "minutes": - case "minute": - case "mins": - case "min": - case "m": - return n * m; - case "seconds": - case "second": - case "secs": - case "sec": - case "s": - return n * s; - case "milliseconds": - case "millisecond": - case "msecs": - case "msec": - case "ms": - return n; - default: - return void 0; + Comparator.prototype.toString = function() { + return this.value; + }; + Comparator.prototype.test = function(version) { + debug6("Comparator.test", version, this.options.loose); + if (this.semver === ANY || version === ANY) { + return true; } - } - function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + "d"; + if (typeof version === "string") { + try { + version = new SemVer(version, this.options); + } catch (er) { + return false; + } } - if (msAbs >= h) { - return Math.round(ms / h) + "h"; + return cmp(version, this.operator, this.semver, this.options); + }; + Comparator.prototype.intersects = function(comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError("a Comparator is required"); } - if (msAbs >= m) { - return Math.round(ms / m) + "m"; + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - if (msAbs >= s) { - return Math.round(ms / s) + "s"; + var rangeTmp; + if (this.operator === "") { + if (this.value === "") { + return true; + } + rangeTmp = new Range2(comp.value, options); + return satisfies2(this.value, rangeTmp, options); + } else if (comp.operator === "") { + if (comp.value === "") { + return true; + } + rangeTmp = new Range2(this.value, options); + return satisfies2(comp.semver, rangeTmp, options); } - return ms + "ms"; - } - function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, "day"); + var sameDirectionIncreasing = (this.operator === ">=" || this.operator === ">") && (comp.operator === ">=" || comp.operator === ">"); + var sameDirectionDecreasing = (this.operator === "<=" || this.operator === "<") && (comp.operator === "<=" || comp.operator === "<"); + var sameSemVer = this.semver.version === comp.semver.version; + var differentDirectionsInclusive = (this.operator === ">=" || this.operator === "<=") && (comp.operator === ">=" || comp.operator === "<="); + var oppositeDirectionsLessThan = cmp(this.semver, "<", comp.semver, options) && ((this.operator === ">=" || this.operator === ">") && (comp.operator === "<=" || comp.operator === "<")); + var oppositeDirectionsGreaterThan = cmp(this.semver, ">", comp.semver, options) && ((this.operator === "<=" || this.operator === "<") && (comp.operator === ">=" || comp.operator === ">")); + return sameDirectionIncreasing || sameDirectionDecreasing || sameSemVer && differentDirectionsInclusive || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; + }; + exports2.Range = Range2; + function Range2(range, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - if (msAbs >= h) { - return plural(ms, msAbs, h, "hour"); + if (range instanceof Range2) { + if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { + return range; + } else { + return new Range2(range.raw, options); + } } - if (msAbs >= m) { - return plural(ms, msAbs, m, "minute"); + if (range instanceof Comparator) { + return new Range2(range.value, options); } - if (msAbs >= s) { - return plural(ms, msAbs, s, "second"); + if (!(this instanceof Range2)) { + return new Range2(range, options); } - return ms + " ms"; - } - function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); - } - } -}); - -// node_modules/debug/src/common.js -var require_common = __commonJS({ - "node_modules/debug/src/common.js"(exports2, module2) { - function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce3; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = require_ms(); - createDebug.destroy = destroy; - Object.keys(env).forEach((key) => { - createDebug[key] = env[key]; + this.options = options; + this.loose = !!options.loose; + this.includePrerelease = !!options.includePrerelease; + this.raw = range.trim().split(/\s+/).join(" "); + this.set = this.raw.split("||").map(function(range2) { + return this.parseRange(range2.trim()); + }, this).filter(function(c) { + return c.length; }); - createDebug.names = []; - createDebug.skips = []; - createDebug.formatters = {}; - function selectColor(namespace) { - let hash2 = 0; - for (let i = 0; i < namespace.length; i++) { - hash2 = (hash2 << 5) - hash2 + namespace.charCodeAt(i); - hash2 |= 0; - } - return createDebug.colors[Math.abs(hash2) % createDebug.colors.length]; + if (!this.set.length) { + throw new TypeError("Invalid SemVer Range: " + this.raw); } - createDebug.selectColor = selectColor; - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - function debug6(...args) { - if (!debug6.enabled) { - return; - } - const self2 = debug6; - const curr = Number(/* @__PURE__ */ new Date()); - const ms = curr - (prevTime || curr); - self2.diff = ms; - self2.prev = prevTime; - self2.curr = curr; - prevTime = curr; - args[0] = createDebug.coerce(args[0]); - if (typeof args[0] !== "string") { - args.unshift("%O"); - } - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - if (match === "%%") { - return "%"; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === "function") { - const val = args[index]; - match = formatter.call(self2, val); - args.splice(index, 1); - index--; - } - return match; - }); - createDebug.formatArgs.call(self2, args); - const logFn = self2.log || createDebug.log; - logFn.apply(self2, args); - } - debug6.namespace = namespace; - debug6.useColors = createDebug.useColors(); - debug6.color = createDebug.selectColor(namespace); - debug6.extend = extend3; - debug6.destroy = createDebug.destroy; - Object.defineProperty(debug6, "enabled", { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); - } - return enabledCache; - }, - set: (v) => { - enableOverride = v; - } + this.format(); + } + Range2.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(" ").trim(); + }).join("||").trim(); + return this.range; + }; + Range2.prototype.toString = function() { + return this.range; + }; + Range2.prototype.parseRange = function(range) { + var loose = this.options.loose; + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + debug6("hyphen replace", range); + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace); + debug6("comparator trim", range, safeRe[t.COMPARATORTRIM]); + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace); + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace); + range = range.split(/\s+/).join(" "); + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; + var set2 = range.split(" ").map(function(comp) { + return parseComparator(comp, this.options); + }, this).join(" ").split(/\s+/); + if (this.options.loose) { + set2 = set2.filter(function(comp) { + return !!comp.match(compRe); }); - if (typeof createDebug.init === "function") { - createDebug.init(debug6); - } - return debug6; } - function extend3(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; + set2 = set2.map(function(comp) { + return new Comparator(comp, this.options); + }, this); + return set2; + }; + Range2.prototype.intersects = function(range, options) { + if (!(range instanceof Range2)) { + throw new TypeError("a Range is required"); } - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - createDebug.names = []; - createDebug.skips = []; - const split = (typeof namespaces === "string" ? namespaces : "").trim().replace(/\s+/g, ",").split(",").filter(Boolean); - for (const ns of split) { - if (ns[0] === "-") { - createDebug.skips.push(ns.slice(1)); + return this.set.some(function(thisComparators) { + return isSatisfiable(thisComparators, options) && range.set.some(function(rangeComparators) { + return isSatisfiable(rangeComparators, options) && thisComparators.every(function(thisComparator) { + return rangeComparators.every(function(rangeComparator) { + return thisComparator.intersects(rangeComparator, options); + }); + }); + }); + }); + }; + function isSatisfiable(comparators, options) { + var result = true; + var remainingComparators = comparators.slice(); + var testComparator = remainingComparators.pop(); + while (result && remainingComparators.length) { + result = remainingComparators.every(function(otherComparator) { + return testComparator.intersects(otherComparator, options); + }); + testComparator = remainingComparators.pop(); + } + return result; + } + exports2.toComparators = toComparators; + function toComparators(range, options) { + return new Range2(range, options).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(" ").trim().split(" "); + }); + } + function parseComparator(comp, options) { + debug6("comp", comp, options); + comp = replaceCarets(comp, options); + debug6("caret", comp); + comp = replaceTildes(comp, options); + debug6("tildes", comp); + comp = replaceXRanges(comp, options); + debug6("xrange", comp); + comp = replaceStars(comp, options); + debug6("stars", comp); + return comp; + } + function isX(id) { + return !id || id.toLowerCase() === "x" || id === "*"; + } + function replaceTildes(comp, options) { + return comp.trim().split(/\s+/).map(function(comp2) { + return replaceTilde(comp2, options); + }).join(" "); + } + function replaceTilde(comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]; + return comp.replace(r, function(_, M, m, p, pr) { + debug6("tilde", comp, _, M, m, p, pr); + var ret; + if (isX(M)) { + ret = ""; + } else if (isX(m)) { + ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; + } else if (isX(p)) { + ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; + } else if (pr) { + debug6("replaceTilde pr", pr); + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; + } else { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; + } + debug6("tilde return", ret); + return ret; + }); + } + function replaceCarets(comp, options) { + return comp.trim().split(/\s+/).map(function(comp2) { + return replaceCaret(comp2, options); + }).join(" "); + } + function replaceCaret(comp, options) { + debug6("caret", comp, options); + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]; + return comp.replace(r, function(_, M, m, p, pr) { + debug6("caret", comp, _, M, m, p, pr); + var ret; + if (isX(M)) { + ret = ""; + } else if (isX(m)) { + ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; + } else if (isX(p)) { + if (M === "0") { + ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; } else { - createDebug.names.push(ns); + ret = ">=" + M + "." + m + ".0 <" + (+M + 1) + ".0.0"; } - } - } - function matchesTemplate(search, template) { - let searchIndex = 0; - let templateIndex = 0; - let starIndex = -1; - let matchIndex = 0; - while (searchIndex < search.length) { - if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === "*")) { - if (template[templateIndex] === "*") { - starIndex = templateIndex; - matchIndex = searchIndex; - templateIndex++; + } else if (pr) { + debug6("replaceCaret pr", pr); + if (M === "0") { + if (m === "0") { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + m + "." + (+p + 1); } else { - searchIndex++; - templateIndex++; + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; } - } else if (starIndex !== -1) { - templateIndex = starIndex + 1; - matchIndex++; - searchIndex = matchIndex; } else { - return false; + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + (+M + 1) + ".0.0"; + } + } else { + debug6("no pr"); + if (M === "0") { + if (m === "0") { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + m + "." + (+p + 1); + } else { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; + } + } else { + ret = ">=" + M + "." + m + "." + p + " <" + (+M + 1) + ".0.0"; } } - while (templateIndex < template.length && template[templateIndex] === "*") { - templateIndex++; + debug6("caret return", ret); + return ret; + }); + } + function replaceXRanges(comp, options) { + debug6("replaceXRanges", comp, options); + return comp.split(/\s+/).map(function(comp2) { + return replaceXRange(comp2, options); + }).join(" "); + } + function replaceXRange(comp, options) { + comp = comp.trim(); + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + debug6("xRange", comp, ret, gtlt, M, m, p, pr); + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + if (gtlt === "=" && anyX) { + gtlt = ""; } - return templateIndex === template.length; - } - function disable() { - const namespaces = [ - ...createDebug.names, - ...createDebug.skips.map((namespace) => "-" + namespace) - ].join(","); - createDebug.enable(""); - return namespaces; - } - function enabled(name) { - for (const skip of createDebug.skips) { - if (matchesTemplate(name, skip)) { - return false; + pr = options.includePrerelease ? "-0" : ""; + if (xM) { + if (gtlt === ">" || gtlt === "<") { + ret = "<0.0.0-0"; + } else { + ret = "*"; } - } - for (const ns of createDebug.names) { - if (matchesTemplate(name, ns)) { - return true; + } else if (gtlt && anyX) { + if (xm) { + m = 0; + } + p = 0; + if (gtlt === ">") { + gtlt = ">="; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else { + m = +m + 1; + p = 0; + } + } else if (gtlt === "<=") { + gtlt = "<"; + if (xm) { + M = +M + 1; + } else { + m = +m + 1; + } } + ret = gtlt + M + "." + m + "." + p + pr; + } else if (xm) { + ret = ">=" + M + ".0.0" + pr + " <" + (+M + 1) + ".0.0" + pr; + } else if (xp) { + ret = ">=" + M + "." + m + ".0" + pr + " <" + M + "." + (+m + 1) + ".0" + pr; } + debug6("xRange return", ret); + return ret; + }); + } + function replaceStars(comp, options) { + debug6("replaceStars", comp, options); + return comp.trim().replace(safeRe[t.STAR], ""); + } + function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = ""; + } else if (isX(fm)) { + from = ">=" + fM + ".0.0"; + } else if (isX(fp)) { + from = ">=" + fM + "." + fm + ".0"; + } else { + from = ">=" + from; + } + if (isX(tM)) { + to = ""; + } else if (isX(tm)) { + to = "<" + (+tM + 1) + ".0.0"; + } else if (isX(tp)) { + to = "<" + tM + "." + (+tm + 1) + ".0"; + } else if (tpr) { + to = "<=" + tM + "." + tm + "." + tp + "-" + tpr; + } else { + to = "<=" + to; + } + return (from + " " + to).trim(); + } + Range2.prototype.test = function(version) { + if (!version) { return false; } - function coerce3(val) { - if (val instanceof Error) { - return val.stack || val.message; + if (typeof version === "string") { + try { + version = new SemVer(version, this.options); + } catch (er) { + return false; } - return val; } - function destroy() { - console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + for (var i2 = 0; i2 < this.set.length; i2++) { + if (testSet(this.set[i2], version, this.options)) { + return true; + } } - createDebug.enable(createDebug.load()); - return createDebug; - } - module2.exports = setup; - } -}); - -// node_modules/debug/src/browser.js -var require_browser = __commonJS({ - "node_modules/debug/src/browser.js"(exports2, module2) { - exports2.formatArgs = formatArgs; - exports2.save = save; - exports2.load = load2; - exports2.useColors = useColors; - exports2.storage = localstorage(); - exports2.destroy = /* @__PURE__ */ (() => { - let warned = false; - return () => { - if (!warned) { - warned = true; - console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + return false; + }; + function testSet(set2, version, options) { + for (var i2 = 0; i2 < set2.length; i2++) { + if (!set2[i2].test(version)) { + return false; } - }; - })(); - exports2.colors = [ - "#0000CC", - "#0000FF", - "#0033CC", - "#0033FF", - "#0066CC", - "#0066FF", - "#0099CC", - "#0099FF", - "#00CC00", - "#00CC33", - "#00CC66", - "#00CC99", - "#00CCCC", - "#00CCFF", - "#3300CC", - "#3300FF", - "#3333CC", - "#3333FF", - "#3366CC", - "#3366FF", - "#3399CC", - "#3399FF", - "#33CC00", - "#33CC33", - "#33CC66", - "#33CC99", - "#33CCCC", - "#33CCFF", - "#6600CC", - "#6600FF", - "#6633CC", - "#6633FF", - "#66CC00", - "#66CC33", - "#9900CC", - "#9900FF", - "#9933CC", - "#9933FF", - "#99CC00", - "#99CC33", - "#CC0000", - "#CC0033", - "#CC0066", - "#CC0099", - "#CC00CC", - "#CC00FF", - "#CC3300", - "#CC3333", - "#CC3366", - "#CC3399", - "#CC33CC", - "#CC33FF", - "#CC6600", - "#CC6633", - "#CC9900", - "#CC9933", - "#CCCC00", - "#CCCC33", - "#FF0000", - "#FF0033", - "#FF0066", - "#FF0099", - "#FF00CC", - "#FF00FF", - "#FF3300", - "#FF3333", - "#FF3366", - "#FF3399", - "#FF33CC", - "#FF33FF", - "#FF6600", - "#FF6633", - "#FF9900", - "#FF9933", - "#FFCC00", - "#FFCC33" - ]; - function useColors() { - if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { - return true; } - if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + if (version.prerelease.length && !options.includePrerelease) { + for (i2 = 0; i2 < set2.length; i2++) { + debug6(set2[i2].semver); + if (set2[i2].semver === ANY) { + continue; + } + if (set2[i2].semver.prerelease.length > 0) { + var allowed = set2[i2].semver; + if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) { + return true; + } + } + } return false; } - let m; - return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 - typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - typeof navigator !== "undefined" && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker - typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); + return true; } - function formatArgs(args) { - args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); - if (!this.useColors) { - return; + exports2.satisfies = satisfies2; + function satisfies2(version, range, options) { + try { + range = new Range2(range, options); + } catch (er) { + return false; } - const c = "color: " + this.color; - args.splice(1, 0, c, "color: inherit"); - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, (match) => { - if (match === "%%") { - return; - } - index++; - if (match === "%c") { - lastC = index; - } - }); - args.splice(lastC, 0, c); + return range.test(version); } - exports2.log = console.debug || console.log || (() => { - }); - function save(namespaces) { - try { - if (namespaces) { - exports2.storage.setItem("debug", namespaces); - } else { - exports2.storage.removeItem("debug"); - } - } catch (error3) { + exports2.maxSatisfying = maxSatisfying; + function maxSatisfying(versions, range, options) { + var max = null; + var maxSV = null; + try { + var rangeObj = new Range2(range, options); + } catch (er) { + return null; } + versions.forEach(function(v) { + if (rangeObj.test(v)) { + if (!max || maxSV.compare(v) === -1) { + max = v; + maxSV = new SemVer(max, options); + } + } + }); + return max; } - function load2() { - let r; + exports2.minSatisfying = minSatisfying; + function minSatisfying(versions, range, options) { + var min = null; + var minSV = null; try { - r = exports2.storage.getItem("debug") || exports2.storage.getItem("DEBUG"); - } catch (error3) { - } - if (!r && typeof process !== "undefined" && "env" in process) { - r = process.env.DEBUG; + var rangeObj = new Range2(range, options); + } catch (er) { + return null; } - return r; + versions.forEach(function(v) { + if (rangeObj.test(v)) { + if (!min || minSV.compare(v) === 1) { + min = v; + minSV = new SemVer(min, options); + } + } + }); + return min; } - function localstorage() { - try { - return localStorage; - } catch (error3) { + exports2.minVersion = minVersion; + function minVersion(range, loose) { + range = new Range2(range, loose); + var minver = new SemVer("0.0.0"); + if (range.test(minver)) { + return minver; + } + minver = new SemVer("0.0.0-0"); + if (range.test(minver)) { + return minver; + } + minver = null; + for (var i2 = 0; i2 < range.set.length; ++i2) { + var comparators = range.set[i2]; + comparators.forEach(function(comparator) { + var compver = new SemVer(comparator.semver.version); + switch (comparator.operator) { + case ">": + if (compver.prerelease.length === 0) { + compver.patch++; + } else { + compver.prerelease.push(0); + } + compver.raw = compver.format(); + /* fallthrough */ + case "": + case ">=": + if (!minver || gt(minver, compver)) { + minver = compver; + } + break; + case "<": + case "<=": + break; + /* istanbul ignore next */ + default: + throw new Error("Unexpected operation: " + comparator.operator); + } + }); + } + if (minver && range.test(minver)) { + return minver; } + return null; } - module2.exports = require_common()(exports2); - var { formatters } = module2.exports; - formatters.j = function(v) { + exports2.validRange = validRange; + function validRange(range, options) { try { - return JSON.stringify(v); - } catch (error3) { - return "[UnexpectedJSONParseError]: " + error3.message; + return new Range2(range, options).range || "*"; + } catch (er) { + return null; } - }; - } -}); - -// node_modules/has-flag/index.js -var require_has_flag = __commonJS({ - "node_modules/has-flag/index.js"(exports2, module2) { - "use strict"; - module2.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf("--"); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); - }; - } -}); - -// node_modules/supports-color/index.js -var require_supports_color = __commonJS({ - "node_modules/supports-color/index.js"(exports2, module2) { - "use strict"; - var os3 = require("os"); - var tty = require("tty"); - var hasFlag = require_has_flag(); - var { env } = process; - var forceColor; - if (hasFlag("no-color") || hasFlag("no-colors") || hasFlag("color=false") || hasFlag("color=never")) { - forceColor = 0; - } else if (hasFlag("color") || hasFlag("colors") || hasFlag("color=true") || hasFlag("color=always")) { - forceColor = 1; } - if ("FORCE_COLOR" in env) { - if (env.FORCE_COLOR === "true") { - forceColor = 1; - } else if (env.FORCE_COLOR === "false") { - forceColor = 0; - } else { - forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); - } + exports2.ltr = ltr; + function ltr(version, range, options) { + return outside(version, range, "<", options); } - function translateLevel(level) { - if (level === 0) { - return false; - } - return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 - }; + exports2.gtr = gtr; + function gtr(version, range, options) { + return outside(version, range, ">", options); } - function supportsColor(haveStream, streamIsTTY) { - if (forceColor === 0) { - return 0; - } - if (hasFlag("color=16m") || hasFlag("color=full") || hasFlag("color=truecolor")) { - return 3; - } - if (hasFlag("color=256")) { - return 2; - } - if (haveStream && !streamIsTTY && forceColor === void 0) { - return 0; - } - const min = forceColor || 0; - if (env.TERM === "dumb") { - return min; + exports2.outside = outside; + function outside(version, range, hilo, options) { + version = new SemVer(version, options); + range = new Range2(range, options); + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case ">": + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = ">"; + ecomp = ">="; + break; + case "<": + gtfn = lt; + ltefn = gte6; + ltfn = gt; + comp = "<"; + ecomp = "<="; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); } - if (process.platform === "win32") { - const osRelease = os3.release().split("."); - if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; - } - return 1; + if (satisfies2(version, range, options)) { + return false; } - if ("CI" in env) { - if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "GITHUB_ACTIONS", "BUILDKITE"].some((sign) => sign in env) || env.CI_NAME === "codeship") { - return 1; + for (var i2 = 0; i2 < range.set.length; ++i2) { + var comparators = range.set[i2]; + var high = null; + var low = null; + comparators.forEach(function(comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator(">=0.0.0"); + } + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator; + } + }); + if (high.operator === comp || high.operator === ecomp) { + return false; } - return min; - } - if ("TEAMCITY_VERSION" in env) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; - } - if (env.COLORTERM === "truecolor") { - return 3; - } - if ("TERM_PROGRAM" in env) { - const version = parseInt((env.TERM_PROGRAM_VERSION || "").split(".")[0], 10); - switch (env.TERM_PROGRAM) { - case "iTerm.app": - return version >= 3 ? 3 : 2; - case "Apple_Terminal": - return 2; + if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false; } } - if (/-256(color)?$/i.test(env.TERM)) { - return 2; - } - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { - return 1; - } - if ("COLORTERM" in env) { - return 1; - } - return min; + return true; } - function getSupportLevel(stream2) { - const level = supportsColor(stream2, stream2 && stream2.isTTY); - return translateLevel(level); + exports2.prerelease = prerelease; + function prerelease(version, options) { + var parsed = parse2(version, options); + return parsed && parsed.prerelease.length ? parsed.prerelease : null; } - module2.exports = { - supportsColor: getSupportLevel, - stdout: translateLevel(supportsColor(true, tty.isatty(1))), - stderr: translateLevel(supportsColor(true, tty.isatty(2))) - }; - } -}); - -// node_modules/debug/src/node.js -var require_node = __commonJS({ - "node_modules/debug/src/node.js"(exports2, module2) { - var tty = require("tty"); - var util = require("util"); - exports2.init = init; - exports2.log = log; - exports2.formatArgs = formatArgs; - exports2.save = save; - exports2.load = load2; - exports2.useColors = useColors; - exports2.destroy = util.deprecate( - () => { - }, - "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." - ); - exports2.colors = [6, 2, 3, 4, 5, 1]; - try { - const supportsColor = require_supports_color(); - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports2.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } - } catch (error3) { + exports2.intersects = intersects; + function intersects(r1, r2, options) { + r1 = new Range2(r1, options); + r2 = new Range2(r2, options); + return r1.intersects(r2); } - exports2.inspectOpts = Object.keys(process.env).filter((key) => { - return /^debug_/i.test(key); - }).reduce((obj, key) => { - const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === "null") { - val = null; - } else { - val = Number(val); + exports2.coerce = coerce3; + function coerce3(version, options) { + if (version instanceof SemVer) { + return version; } - obj[prop] = val; - return obj; - }, {}); - function useColors() { - return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty.isatty(process.stderr.fd); - } - function formatArgs(args) { - const { namespace: name, useColors: useColors2 } = this; - if (useColors2) { - const c = this.color; - const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); - const prefix = ` ${colorCode};1m${name} \x1B[0m`; - args[0] = prefix + args[0].split("\n").join("\n" + prefix); - args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); - } else { - args[0] = getDate() + name + " " + args[0]; + if (typeof version === "number") { + version = String(version); } - } - function getDate() { - if (exports2.inspectOpts.hideDate) { - return ""; + if (typeof version !== "string") { + return null; } - return (/* @__PURE__ */ new Date()).toISOString() + " "; - } - function log(...args) { - return process.stderr.write(util.formatWithOptions(exports2.inspectOpts, ...args) + "\n"); - } - function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; + options = options || {}; + var match = null; + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]); } else { - delete process.env.DEBUG; + var next; + while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length)) { + if (!match || next.index + next[0].length !== match.index + match[0].length) { + match = next; + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length; + } + safeRe[t.COERCERTL].lastIndex = -1; } - } - function load2() { - return process.env.DEBUG; - } - function init(debug6) { - debug6.inspectOpts = {}; - const keys = Object.keys(exports2.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug6.inspectOpts[keys[i]] = exports2.inspectOpts[keys[i]]; + if (match === null) { + return null; } + return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); } - module2.exports = require_common()(exports2); - var { formatters } = module2.exports; - formatters.o = function(v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts).split("\n").map((str2) => str2.trim()).join(" "); - }; - formatters.O = function(v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); - }; } }); -// node_modules/debug/src/index.js -var require_src = __commonJS({ - "node_modules/debug/src/index.js"(exports2, module2) { - if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { - module2.exports = require_browser(); - } else { - module2.exports = require_node(); - } +// node_modules/@actions/cache/lib/internal/constants.js +var require_constants12 = __commonJS({ + "node_modules/@actions/cache/lib/internal/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheFileSizeLimit = exports2.ManifestFilename = exports2.TarFilename = exports2.SystemTarPathOnWindows = exports2.GnuTarPathOnWindows = exports2.SocketTimeout = exports2.DefaultRetryDelay = exports2.DefaultRetryAttempts = exports2.ArchiveToolType = exports2.CompressionMethod = exports2.CacheFilename = void 0; + var CacheFilename; + (function(CacheFilename2) { + CacheFilename2["Gzip"] = "cache.tgz"; + CacheFilename2["Zstd"] = "cache.tzst"; + })(CacheFilename || (exports2.CacheFilename = CacheFilename = {})); + var CompressionMethod; + (function(CompressionMethod2) { + CompressionMethod2["Gzip"] = "gzip"; + CompressionMethod2["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod2["Zstd"] = "zstd"; + })(CompressionMethod || (exports2.CompressionMethod = CompressionMethod = {})); + var ArchiveToolType; + (function(ArchiveToolType2) { + ArchiveToolType2["GNU"] = "gnu"; + ArchiveToolType2["BSD"] = "bsd"; + })(ArchiveToolType || (exports2.ArchiveToolType = ArchiveToolType = {})); + exports2.DefaultRetryAttempts = 2; + exports2.DefaultRetryDelay = 5e3; + exports2.SocketTimeout = 5e3; + exports2.GnuTarPathOnWindows = `${process.env["PROGRAMFILES"]}\\Git\\usr\\bin\\tar.exe`; + exports2.SystemTarPathOnWindows = `${process.env["SYSTEMDRIVE"]}\\Windows\\System32\\tar.exe`; + exports2.TarFilename = "cache.tar"; + exports2.ManifestFilename = "manifest.txt"; + exports2.CacheFileSizeLimit = 10 * Math.pow(1024, 3); } }); -// node_modules/agent-base/dist/helpers.js -var require_helpers2 = __commonJS({ - "node_modules/agent-base/dist/helpers.js"(exports2) { +// node_modules/@actions/cache/lib/internal/cacheUtils.js +var require_cacheUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/cacheUtils.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -53991,2180 +51877,2159 @@ var require_helpers2 = __commonJS({ }) : function(o, v) { o["default"] = v; }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.req = exports2.json = exports2.toBuffer = void 0; - var http = __importStar2(require("http")); - var https2 = __importStar2(require("https")); - async function toBuffer(stream2) { - let length = 0; - const chunks = []; - for await (const chunk of stream2) { - length += chunk.length; - chunks.push(chunk); - } - return Buffer.concat(chunks, length); - } - exports2.toBuffer = toBuffer; - async function json2(stream2) { - const buf = await toBuffer(stream2); - const str2 = buf.toString("utf8"); - try { - return JSON.parse(str2); - } catch (_err) { - const err = _err; - err.message += ` (input: ${str2})`; - throw err; - } - } - exports2.json = json2; - function req(url2, opts = {}) { - const href = typeof url2 === "string" ? url2 : url2.href; - const req2 = (href.startsWith("https:") ? https2 : http).request(url2, opts); - const promise = new Promise((resolve6, reject) => { - req2.once("response", resolve6).once("error", reject).end(); - }); - req2.then = promise.then.bind(promise); - return req2; - } - exports2.req = req; - } -}); - -// node_modules/agent-base/dist/index.js -var require_dist = __commonJS({ - "node_modules/agent-base/dist/index.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; - }; - var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding2(exports3, m, p); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Agent = void 0; - var net = __importStar2(require("net")); - var http = __importStar2(require("http")); - var https_1 = require("https"); - __exportStar2(require_helpers2(), exports2); - var INTERNAL = /* @__PURE__ */ Symbol("AgentBaseInternalState"); - var Agent = class extends http.Agent { - constructor(opts) { - super(opts); - this[INTERNAL] = {}; - } - /** - * Determine whether this is an `http` or `https` request. - */ - isSecureEndpoint(options) { - if (options) { - if (typeof options.secureEndpoint === "boolean") { - return options.secureEndpoint; - } - if (typeof options.protocol === "string") { - return options.protocol === "https:"; - } - } - const { stack } = new Error(); - if (typeof stack !== "string") - return false; - return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1); - } - // In order to support async signatures in `connect()` and Node's native - // connection pooling in `http.Agent`, the array of sockets for each origin - // has to be updated synchronously. This is so the length of the array is - // accurate when `addRequest()` is next called. We achieve this by creating a - // fake socket and adding it to `sockets[origin]` and incrementing - // `totalSocketCount`. - incrementSockets(name) { - if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { - return null; - } - if (!this.sockets[name]) { - this.sockets[name] = []; + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - const fakeSocket = new net.Socket({ writable: false }); - this.sockets[name].push(fakeSocket); - this.totalSocketCount++; - return fakeSocket; + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - decrementSockets(name, socket) { - if (!this.sockets[name] || socket === null) { - return; - } - const sockets = this.sockets[name]; - const index = sockets.indexOf(socket); - if (index !== -1) { - sockets.splice(index, 1); - this.totalSocketCount--; - if (sockets.length === 0) { - delete this.sockets[name]; + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } } - } - // In order to properly update the socket pool, we need to call `getName()` on - // the core `https.Agent` if it is a secureEndpoint. - getName(options) { - const secureEndpoint = typeof options.secureEndpoint === "boolean" ? options.secureEndpoint : this.isSecureEndpoint(options); - if (secureEndpoint) { - return https_1.Agent.prototype.getName.call(this, options); - } - return super.getName(options); - } - createSocket(req, options, cb) { - const connectOpts = { - ...options, - secureEndpoint: this.isSecureEndpoint(options) - }; - const name = this.getName(connectOpts); - const fakeSocket = this.incrementSockets(name); - Promise.resolve().then(() => this.connect(req, connectOpts)).then((socket) => { - this.decrementSockets(name, fakeSocket); - if (socket instanceof http.Agent) { - try { - return socket.addRequest(req, connectOpts); - } catch (err) { - return cb(err); - } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - this[INTERNAL].currentSocket = socket; - super.createSocket(req, options, cb); - }, (err) => { - this.decrementSockets(name, fakeSocket); - cb(err); - }); - } - createConnection() { - const socket = this[INTERNAL].currentSocket; - this[INTERNAL].currentSocket = void 0; - if (!socket) { - throw new Error("No socket was returned in the `connect()` function"); } - return socket; - } - get defaultPort() { - return this[INTERNAL].defaultPort ?? (this.protocol === "https:" ? 443 : 80); - } - set defaultPort(v) { - if (this[INTERNAL]) { - this[INTERNAL].defaultPort = v; + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve6, reject) { + v = o[n](v), settle(resolve6, reject, v.done, v.value); + }); + }; } - get protocol() { - return this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? "https:" : "http:"); - } - set protocol(v) { - if (this[INTERNAL]) { - this[INTERNAL].protocol = v; - } + function settle(resolve6, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve6({ value: v2, done: d }); + }, reject); } }; - exports2.Agent = Agent; - } -}); - -// node_modules/https-proxy-agent/dist/parse-proxy-response.js -var require_parse_proxy_response = __commonJS({ - "node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports2) { - "use strict"; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.parseProxyResponse = void 0; - var debug_1 = __importDefault2(require_src()); - var debug6 = (0, debug_1.default)("https-proxy-agent:parse-proxy-response"); - function parseProxyResponse(socket) { - return new Promise((resolve6, reject) => { - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once("readable", read); - } - function cleanup() { - socket.removeListener("end", onend); - socket.removeListener("error", onerror); - socket.removeListener("readable", read); - } - function onend() { - cleanup(); - debug6("onend"); - reject(new Error("Proxy connection ended before receiving CONNECT response")); - } - function onerror(err) { - cleanup(); - debug6("onerror %o", err); - reject(err); - } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf("\r\n\r\n"); - if (endOfHeaders === -1) { - debug6("have not received end of HTTP headers yet..."); - read(); - return; - } - const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n"); - const firstLine = headerParts.shift(); - if (!firstLine) { - socket.destroy(); - return reject(new Error("No header received from proxy CONNECT response")); - } - const firstLineParts = firstLine.split(" "); - const statusCode = +firstLineParts[1]; - const statusText = firstLineParts.slice(2).join(" "); - const headers = {}; - for (const header of headerParts) { - if (!header) - continue; - const firstColon = header.indexOf(":"); - if (firstColon === -1) { - socket.destroy(); - return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + exports2.createTempDirectory = createTempDirectory; + exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; + exports2.resolvePaths = resolvePaths; + exports2.unlinkFile = unlinkFile; + exports2.getCompressionMethod = getCompressionMethod; + exports2.getCacheFileName = getCacheFileName; + exports2.getGnuTarPathOnWindows = getGnuTarPathOnWindows; + exports2.assertDefined = assertDefined; + exports2.getCacheVersion = getCacheVersion; + exports2.getRuntimeToken = getRuntimeToken; + var core14 = __importStar2(require_core()); + var exec = __importStar2(require_exec()); + var glob = __importStar2(require_glob()); + var io6 = __importStar2(require_io()); + var crypto2 = __importStar2(require("crypto")); + var fs13 = __importStar2(require("fs")); + var path13 = __importStar2(require("path")); + var semver9 = __importStar2(require_semver3()); + var util = __importStar2(require("util")); + var constants_1 = require_constants12(); + var versionSalt = "1.0"; + function createTempDirectory() { + return __awaiter2(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === "win32"; + let tempDirectory = process.env["RUNNER_TEMP"] || ""; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + baseLocation = process.env["USERPROFILE"] || "C:\\"; + } else { + if (process.platform === "darwin") { + baseLocation = "/Users"; + } else { + baseLocation = "/home"; } - const key = header.slice(0, firstColon).toLowerCase(); - const value = header.slice(firstColon + 1).trimStart(); - const current = headers[key]; - if (typeof current === "string") { - headers[key] = [current, value]; - } else if (Array.isArray(current)) { - current.push(value); + } + tempDirectory = path13.join(baseLocation, "actions", "temp"); + } + const dest = path13.join(tempDirectory, crypto2.randomUUID()); + yield io6.mkdirP(dest); + return dest; + }); + } + function getArchiveFileSizeInBytes(filePath) { + return fs13.statSync(filePath).size; + } + function resolvePaths(patterns) { + return __awaiter2(this, void 0, void 0, function* () { + var _a, e_1, _b, _c; + var _d; + const paths = []; + const workspace = (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); + const globber = yield glob.create(patterns.join("\n"), { + implicitDescendants: false + }); + try { + for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { + _c = _g.value; + _e = false; + const file = _c; + const relativeFile = path13.relative(workspace, file).replace(new RegExp(`\\${path13.sep}`, "g"), "/"); + core14.debug(`Matched: ${relativeFile}`); + if (relativeFile === "") { + paths.push("."); } else { - headers[key] = value; + paths.push(`${relativeFile}`); } } - debug6("got proxy server response: %o %o", firstLine, headers); - cleanup(); - resolve6({ - connect: { - statusCode, - statusText, - headers - }, - buffered + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); + } finally { + if (e_1) throw e_1.error; + } + } + return paths; + }); + } + function unlinkFile(filePath) { + return __awaiter2(this, void 0, void 0, function* () { + return util.promisify(fs13.unlink)(filePath); + }); + } + function getVersion(app_1) { + return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { + let versionOutput = ""; + additionalArgs.push("--version"); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + try { + yield exec.exec(`${app}`, additionalArgs, { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => versionOutput += data.toString(), + stderr: (data) => versionOutput += data.toString() + } }); + } catch (err) { + core14.debug(err.message); } - socket.on("error", onerror); - socket.on("end", onend); - read(); + versionOutput = versionOutput.trim(); + core14.debug(versionOutput); + return versionOutput; }); } - exports2.parseProxyResponse = parseProxyResponse; - } -}); - -// node_modules/https-proxy-agent/dist/index.js -var require_dist2 = __commonJS({ - "node_modules/https-proxy-agent/dist/index.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + function getCompressionMethod() { + return __awaiter2(this, void 0, void 0, function* () { + const versionOutput = yield getVersion("zstd", ["--quiet"]); + const version = semver9.clean(versionOutput); + core14.debug(`zstd version: ${version}`); + if (versionOutput === "") { + return constants_1.CompressionMethod.Gzip; + } else { + return constants_1.CompressionMethod.ZstdWithoutLong; + } + }); + } + function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; + } + function getGnuTarPathOnWindows() { + return __awaiter2(this, void 0, void 0, function* () { + if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } + const versionOutput = yield getVersion("tar"); + return versionOutput.toLowerCase().includes("gnu tar") ? io6.which("tar") : ""; + }); + } + function assertDefined(name, value) { + if (value === void 0) { + throw Error(`Expected ${name} but value was undefiend`); } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + return value; + } + function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths.slice(); + if (compressionMethod) { + components.push(compressionMethod); } - __setModuleDefault2(result, mod); - return result; - }; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.HttpsProxyAgent = void 0; - var net = __importStar2(require("net")); - var tls = __importStar2(require("tls")); - var assert_1 = __importDefault2(require("assert")); - var debug_1 = __importDefault2(require_src()); - var agent_base_1 = require_dist(); - var url_1 = require("url"); - var parse_proxy_response_1 = require_parse_proxy_response(); - var debug6 = (0, debug_1.default)("https-proxy-agent"); - var setServernameFromNonIpHost = (options) => { - if (options.servername === void 0 && options.host && !net.isIP(options.host)) { - return { - ...options, - servername: options.host - }; + if (process.platform === "win32" && !enableCrossOsArchive) { + components.push("windows-only"); } - return options; - }; - var HttpsProxyAgent = class extends agent_base_1.Agent { - constructor(proxy, opts) { - super(opts); - this.options = { path: void 0 }; - this.proxy = typeof proxy === "string" ? new url_1.URL(proxy) : proxy; - this.proxyHeaders = opts?.headers ?? {}; - debug6("Creating new HttpsProxyAgent instance: %o", this.proxy.href); - const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ""); - const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80; - this.connectOpts = { - // Attempt to negotiate http/1.1 for proxy servers that support http/2 - ALPNProtocols: ["http/1.1"], - ...opts ? omit2(opts, "headers") : null, - host, - port - }; - } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - */ - async connect(req, opts) { - const { proxy } = this; - if (!opts.host) { - throw new TypeError('No "host" provided'); - } - let socket; - if (proxy.protocol === "https:") { - debug6("Creating `tls.Socket`: %o", this.connectOpts); - socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); - } else { - debug6("Creating `net.Socket`: %o", this.connectOpts); - socket = net.connect(this.connectOpts); - } - const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders }; - const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; - let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r -`; - if (proxy.username || proxy.password) { - const auth2 = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; - headers["Proxy-Authorization"] = `Basic ${Buffer.from(auth2).toString("base64")}`; - } - headers.Host = `${host}:${opts.port}`; - if (!headers["Proxy-Connection"]) { - headers["Proxy-Connection"] = this.keepAlive ? "Keep-Alive" : "close"; - } - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r -`; - } - const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); - socket.write(`${payload}\r -`); - const { connect, buffered } = await proxyResponsePromise; - req.emit("proxyConnect", connect); - this.emit("proxyConnect", connect, req); - if (connect.statusCode === 200) { - req.once("socket", resume); - if (opts.secureEndpoint) { - debug6("Upgrading socket connection to TLS"); - return tls.connect({ - ...omit2(setServernameFromNonIpHost(opts), "host", "path", "port"), - socket - }); - } - return socket; - } - socket.destroy(); - const fakeSocket = new net.Socket({ writable: false }); - fakeSocket.readable = true; - req.once("socket", (s) => { - debug6("Replaying proxy buffer for failed request"); - (0, assert_1.default)(s.listenerCount("data") > 0); - s.push(buffered); - s.push(null); - }); - return fakeSocket; - } - }; - HttpsProxyAgent.protocols = ["http", "https"]; - exports2.HttpsProxyAgent = HttpsProxyAgent; - function resume(socket) { - socket.resume(); + components.push(versionSalt); + return crypto2.createHash("sha256").update(components.join("|")).digest("hex"); } - function omit2(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } + function getRuntimeToken() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"]; + if (!token) { + throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); } - return ret; + return token; } } }); -// node_modules/http-proxy-agent/dist/index.js -var require_dist3 = __commonJS({ - "node_modules/http-proxy-agent/dist/index.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; - }; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.HttpProxyAgent = void 0; - var net = __importStar2(require("net")); - var tls = __importStar2(require("tls")); - var debug_1 = __importDefault2(require_src()); - var events_1 = require("events"); - var agent_base_1 = require_dist(); - var url_1 = require("url"); - var debug6 = (0, debug_1.default)("http-proxy-agent"); - var HttpProxyAgent = class extends agent_base_1.Agent { - constructor(proxy, opts) { - super(opts); - this.proxy = typeof proxy === "string" ? new url_1.URL(proxy) : proxy; - this.proxyHeaders = opts?.headers ?? {}; - debug6("Creating new HttpProxyAgent instance: %o", this.proxy.href); - const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ""); - const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80; - this.connectOpts = { - ...opts ? omit2(opts, "headers") : null, - host, - port - }; - } - addRequest(req, opts) { - req._header = null; - this.setRequestProps(req, opts); - super.addRequest(req, opts); - } - setRequestProps(req, opts) { - const { proxy } = this; - const protocol = opts.secureEndpoint ? "https:" : "http:"; - const hostname = req.getHeader("host") || "localhost"; - const base = `${protocol}//${hostname}`; - const url2 = new url_1.URL(req.path, base); - if (opts.port !== 80) { - url2.port = String(opts.port); - } - req.path = String(url2); - const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders }; - if (proxy.username || proxy.password) { - const auth2 = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; - headers["Proxy-Authorization"] = `Basic ${Buffer.from(auth2).toString("base64")}`; - } - if (!headers["Proxy-Connection"]) { - headers["Proxy-Connection"] = this.keepAlive ? "Keep-Alive" : "close"; - } - for (const name of Object.keys(headers)) { - const value = headers[name]; - if (value) { - req.setHeader(name, value); - } - } - } - async connect(req, opts) { - req._header = null; - if (!req.path.includes("://")) { - this.setRequestProps(req, opts); - } - let first; - let endOfHeaders; - debug6("Regenerating stored HTTP header string for request"); - req._implicitHeader(); - if (req.outputData && req.outputData.length > 0) { - debug6("Patching connection write() output buffer with updated header"); - first = req.outputData[0].data; - endOfHeaders = first.indexOf("\r\n\r\n") + 4; - req.outputData[0].data = req._header + first.substring(endOfHeaders); - debug6("Output buffer: %o", req.outputData[0].data); - } - let socket; - if (this.proxy.protocol === "https:") { - debug6("Creating `tls.Socket`: %o", this.connectOpts); - socket = tls.connect(this.connectOpts); - } else { - debug6("Creating `net.Socket`: %o", this.connectOpts); - socket = net.connect(this.connectOpts); - } - await (0, events_1.once)(socket, "connect"); - return socket; - } +// node_modules/tslib/tslib.es6.mjs +var tslib_es6_exports = {}; +__export(tslib_es6_exports, { + __addDisposableResource: () => __addDisposableResource, + __assign: () => __assign, + __asyncDelegator: () => __asyncDelegator, + __asyncGenerator: () => __asyncGenerator, + __asyncValues: () => __asyncValues, + __await: () => __await, + __awaiter: () => __awaiter, + __classPrivateFieldGet: () => __classPrivateFieldGet, + __classPrivateFieldIn: () => __classPrivateFieldIn, + __classPrivateFieldSet: () => __classPrivateFieldSet, + __createBinding: () => __createBinding, + __decorate: () => __decorate, + __disposeResources: () => __disposeResources, + __esDecorate: () => __esDecorate, + __exportStar: () => __exportStar, + __extends: () => __extends, + __generator: () => __generator, + __importDefault: () => __importDefault, + __importStar: () => __importStar, + __makeTemplateObject: () => __makeTemplateObject, + __metadata: () => __metadata, + __param: () => __param, + __propKey: () => __propKey, + __read: () => __read, + __rest: () => __rest, + __rewriteRelativeImportExtension: () => __rewriteRelativeImportExtension, + __runInitializers: () => __runInitializers, + __setFunctionName: () => __setFunctionName, + __spread: () => __spread, + __spreadArray: () => __spreadArray, + __spreadArrays: () => __spreadArrays, + __values: () => __values2, + default: () => tslib_es6_default +}); +function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { + this.constructor = d; + } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} +function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} +function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} +function __param(paramIndex, decorator) { + return function(target, key) { + decorator(target, key, paramIndex); + }; +} +function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { + if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); + return f; + } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context2 = {}; + for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; + context2.addInitializer = function(f) { + if (done) throw new TypeError("Cannot add initializers after decoration has completed"); + extraInitializers.push(accept(f || null)); }; - HttpProxyAgent.protocols = ["http", "https"]; - exports2.HttpProxyAgent = HttpProxyAgent; - function omit2(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } - } - return ret; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; } } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/proxyPolicy.js -var require_proxyPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/proxyPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.globalNoProxyList = exports2.proxyPolicyName = void 0; - exports2.loadNoProxy = loadNoProxy; - exports2.getDefaultProxySettings = getDefaultProxySettings; - exports2.proxyPolicy = proxyPolicy; - var https_proxy_agent_1 = require_dist2(); - var http_proxy_agent_1 = require_dist3(); - var log_js_1 = require_log2(); - var HTTPS_PROXY = "HTTPS_PROXY"; - var HTTP_PROXY = "HTTP_PROXY"; - var ALL_PROXY = "ALL_PROXY"; - var NO_PROXY = "NO_PROXY"; - exports2.proxyPolicyName = "proxyPolicy"; - exports2.globalNoProxyList = []; - var noProxyListLoaded = false; - var globalBypassedMap = /* @__PURE__ */ new Map(); - function getEnvironmentValue(name) { - if (process.env[name]) { - return process.env[name]; - } else if (process.env[name.toLowerCase()]) { - return process.env[name.toLowerCase()]; + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +} +function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +} +function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +} +function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +} +function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} +function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); + } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - return void 0; } - function loadEnvironmentProxyValue() { - if (!process) { - return void 0; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - const httpsProxy = getEnvironmentValue(HTTPS_PROXY); - const allProxy = getEnvironmentValue(ALL_PROXY); - const httpProxy = getEnvironmentValue(HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; } - function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = new URL(uri).hostname; - if (bypassedMap?.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} +function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { + if (t[0] & 1) throw t[1]; + return t[1]; + }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { + return this; + }), g; + function verb(n) { + return function(v) { + return step([n, v]); + }; + } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: + case 1: + t = op; + break; + case 4: + _.label++; + return { value: op[1], done: false }; + case 5: + _.label++; + y = op[1]; + op = [0]; + continue; + case 7: + op = _.ops.pop(); + _.trys.pop(); + continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { + _ = 0; + continue; } - } else { - if (host === pattern) { - isBypassedFlag = true; + if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { + _.label = op[1]; + break; } - } + if (op[0] === 6 && _.label < t[1]) { + _.label = t[1]; + t = op; + break; + } + if (t && _.label < t[2]) { + _.label = t[2]; + _.ops.push(op); + break; + } + if (t[2]) _.ops.pop(); + _.trys.pop(); + continue; } - bypassedMap?.set(host, isBypassedFlag); - return isBypassedFlag; + op = body.call(thisArg, _); + } catch (e) { + op = [6, e]; + y = 0; + } finally { + f = t = 0; } - function loadNoProxy() { - const noProxy = getEnvironmentValue(NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy.split(",").map((item) => item.trim()).filter((item) => item.length); - } - return []; + if (op[0] & 5) throw op[1]; + return { value: op[0] ? op[1] : void 0, done: true }; + } +} +function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} +function __values2(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function() { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; } - function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return void 0; - } - } - const parsedUrl = new URL(proxyUrl); - const schema2 = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; - return { - host: schema2 + parsedUrl.hostname, - port: Number.parseInt(parsedUrl.port || "80"), - username: parsedUrl.username, - password: parsedUrl.password + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} +function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } catch (error3) { + e = { error: error3 }; + } finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } finally { + if (e) throw e.error; + } + } + return ar; +} +function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} +function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} +function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} +function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} +function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { + return this; + }, i; + function awaitReturn(f) { + return function(v) { + return Promise.resolve(v).then(f, reject); + }; + } + function verb(n, f) { + if (g[n]) { + i[n] = function(v) { + return new Promise(function(a, b) { + q.push([n, v, a, b]) > 1 || resume(n, v); + }); }; + if (f) i[n] = f(i[n]); } - function getDefaultProxySettingsInternal() { - const envProxy = loadEnvironmentProxyValue(); - return envProxy ? new URL(envProxy) : void 0; + } + function resume(n, v) { + try { + step(g[n](v)); + } catch (e) { + settle(q[0][3], e); } - function getUrlFromProxySettings(settings) { - let parsedProxyUrl; + } + function step(r) { + r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); + } + function fulfill(value) { + resume("next", value); + } + function reject(value) { + resume("throw", value); + } + function settle(f, v) { + if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); + } +} +function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function(e) { + throw e; + }), verb("return"), i[Symbol.iterator] = function() { + return this; + }, i; + function verb(n, f) { + i[n] = o[n] ? function(v) { + return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; + } : f; + } +} +function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values2 === "function" ? __values2(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve6, reject) { + v = o[n](v), settle(resolve6, reject, v.done, v.value); + }); + }; + } + function settle(resolve6, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve6({ value: v2, done: d }); + }, reject); + } +} +function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { + Object.defineProperty(cooked, "raw", { value: raw }); + } else { + cooked.raw = raw; + } + return cooked; +} +function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + } + __setModuleDefault(result, mod); + return result; +} +function __importDefault(mod) { + return mod && mod.__esModule ? mod : { default: mod }; +} +function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} +function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value; +} +function __classPrivateFieldIn(state, receiver) { + if (receiver === null || typeof receiver !== "object" && typeof receiver !== "function") throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} +function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { - parsedProxyUrl = new URL(settings.host); - } catch { - throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); - } - parsedProxyUrl.port = String(settings.port); - if (settings.username) { - parsedProxyUrl.username = settings.username; + inner.call(this); + } catch (e) { + return Promise.reject(e); } - if (settings.password) { - parsedProxyUrl.password = settings.password; + }; + env.stack.push({ value, dispose, async }); + } else if (async) { + env.stack.push({ async: true }); + } + return value; +} +function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { + fail(e); + return next(); + }); + } else s |= 1; + } catch (e) { + fail(e); } - return parsedProxyUrl; } - function setProxyAgentOnRequest(request2, cachedAgents, proxyUrl) { - if (request2.agent) { - return; - } - const url2 = new URL(request2.url); - const isInsecure = url2.protocol !== "https:"; - if (request2.tlsSettings) { - log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); - } - const headers = request2.headers.toJSON(); - if (isInsecure) { - if (!cachedAgents.httpProxyAgent) { - cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); - } - request2.agent = cachedAgents.httpProxyAgent; - } else { - if (!cachedAgents.httpsProxyAgent) { - cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} +function __rewriteRelativeImportExtension(path13, preserveJsx) { + if (typeof path13 === "string" && /^\.\.?\//.test(path13)) { + return path13.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; + }); + } + return path13; +} +var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; +var init_tslib_es6 = __esm({ + "node_modules/tslib/tslib.es6.mjs"() { + extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function(d2, b2) { + d2.__proto__ = b2; + } || function(d2, b2) { + for (var p in b2) if (Object.prototype.hasOwnProperty.call(b2, p)) d2[p] = b2[p]; + }; + return extendStatics(d, b); + }; + __assign = function() { + __assign = Object.assign || function __assign2(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } - request2.agent = cachedAgents.httpsProxyAgent; - } - } - function proxyPolicy(proxySettings, options) { - if (!noProxyListLoaded) { - exports2.globalNoProxyList.push(...loadNoProxy()); + return t; + }; + return __assign.apply(this, arguments); + }; + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - const defaultProxy = proxySettings ? getUrlFromProxySettings(proxySettings) : getDefaultProxySettingsInternal(); - const cachedAgents = {}; - return { - name: exports2.proxyPolicyName, - async sendRequest(request2, next) { - if (!request2.proxySettings && defaultProxy && !isBypassed(request2.url, options?.customNoProxyList ?? exports2.globalNoProxyList, options?.customNoProxyList ? void 0 : globalBypassedMap)) { - setProxyAgentOnRequest(request2, cachedAgents, defaultProxy); - } else if (request2.proxySettings) { - setProxyAgentOnRequest(request2, cachedAgents, getUrlFromProxySettings(request2.proxySettings)); - } - return next(request2); - } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; }; - } + return ownKeys(o); + }; + _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error3, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error3, e.suppressed = suppressed, e; + }; + tslib_es6_default = { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __createBinding, + __exportStar, + __values: __values2, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension + }; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/agentPolicy.js -var require_agentPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/agentPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js +var require_AbortError = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.agentPolicyName = void 0; - exports2.agentPolicy = agentPolicy; - exports2.agentPolicyName = "agentPolicy"; - function agentPolicy(agent) { - return { - name: exports2.agentPolicyName, - sendRequest: async (req, next) => { - if (!req.agent) { - req.agent = agent; - } - return next(req); - } - }; - } + exports2.AbortError = void 0; + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } + }; + exports2.AbortError = AbortError; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/tlsPolicy.js -var require_tlsPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/tlsPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js +var require_log = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.tlsPolicyName = void 0; - exports2.tlsPolicy = tlsPolicy; - exports2.tlsPolicyName = "tlsPolicy"; - function tlsPolicy(tlsSettings) { - return { - name: exports2.tlsPolicyName, - sendRequest: async (req, next) => { - if (!req.tlsSettings) { - req.tlsSettings = tlsSettings; - } - return next(req); - } - }; + exports2.log = log; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_os_1 = require("node:os"); + var node_util_1 = tslib_1.__importDefault(require("node:util")); + var node_process_1 = tslib_1.__importDefault(require("node:process")); + function log(message, ...args) { + node_process_1.default.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/typeGuards.js -var require_typeGuards = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/typeGuards.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js +var require_debug2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isNodeReadableStream = isNodeReadableStream; - exports2.isWebReadableStream = isWebReadableStream; - exports2.isBinaryBody = isBinaryBody; - exports2.isReadableStream = isReadableStream; - exports2.isBlob = isBlob; - function isNodeReadableStream(x) { - return Boolean(x && typeof x["pipe"] === "function"); - } - function isWebReadableStream(x) { - return Boolean(x && typeof x.getReader === "function" && typeof x.tee === "function"); - } - function isBinaryBody(body) { - return body !== void 0 && (body instanceof Uint8Array || isReadableStream(body) || typeof body === "function" || body instanceof Blob); - } - function isReadableStream(x) { - return isNodeReadableStream(x) || isWebReadableStream(x); - } - function isBlob(x) { - return typeof x.stream === "function"; + var log_js_1 = require_log(); + var debugEnvVariable = typeof process !== "undefined" && process.env && process.env.DEBUG || void 0; + var enabledString; + var enabledNamespaces = []; + var skippedNamespaces = []; + var debuggers = []; + if (debugEnvVariable) { + enable(debugEnvVariable); } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/concat.js -var require_concat = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/concat.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.concat = concat; - var stream_1 = require("stream"); - var typeGuards_js_1 = require_typeGuards(); - async function* streamAsyncIterator() { - const reader = this.getReader(); - try { - while (true) { - const { done, value } = await reader.read(); - if (done) { - return; - } - yield value; + var debugObj = Object.assign((namespace) => { + return createDebugger(namespace); + }, { + enable, + enabled, + disable, + log: log_js_1.log + }); + function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const namespaceList = namespaces.split(",").map((ns) => ns.trim()); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(ns.substring(1)); + } else { + enabledNamespaces.push(ns); } - } finally { - reader.releaseLock(); + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); } } - function makeAsyncIterable(webStream) { - if (!webStream[Symbol.asyncIterator]) { - webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); + function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; } - if (!webStream.values) { - webStream.values = streamAsyncIterator.bind(webStream); + for (const skipped of skippedNamespaces) { + if (namespaceMatches(namespace, skipped)) { + return false; + } } - } - function ensureNodeStream(stream2) { - if (stream2 instanceof ReadableStream) { - makeAsyncIterable(stream2); - return stream_1.Readable.fromWeb(stream2); - } else { - return stream2; + for (const enabledNamespace of enabledNamespaces) { + if (namespaceMatches(namespace, enabledNamespace)) { + return true; + } } + return false; } - function toStream(source) { - if (source instanceof Uint8Array) { - return stream_1.Readable.from(Buffer.from(source)); - } else if ((0, typeGuards_js_1.isBlob)(source)) { - return ensureNodeStream(source.stream()); - } else { - return ensureNodeStream(source); + function namespaceMatches(namespace, patternToMatch) { + if (patternToMatch.indexOf("*") === -1) { + return namespace === patternToMatch; } - } - async function concat(sources) { - return function() { - const streams = sources.map((x) => typeof x === "function" ? x() : x).map(toStream); - return stream_1.Readable.from((async function* () { - for (const stream2 of streams) { - for await (const chunk of stream2) { - yield chunk; + let pattern = patternToMatch; + if (patternToMatch.indexOf("**") !== -1) { + const patternParts = []; + let lastCharacter = ""; + for (const character of patternToMatch) { + if (character === "*" && lastCharacter === "*") { + continue; + } else { + lastCharacter = character; + patternParts.push(character); + } + } + pattern = patternParts.join(""); + } + let namespaceIndex = 0; + let patternIndex = 0; + const patternLength = pattern.length; + const namespaceLength = namespace.length; + let lastWildcard = -1; + let lastWildcardNamespace = -1; + while (namespaceIndex < namespaceLength && patternIndex < patternLength) { + if (pattern[patternIndex] === "*") { + lastWildcard = patternIndex; + patternIndex++; + if (patternIndex === patternLength) { + return true; + } + while (namespace[namespaceIndex] !== pattern[patternIndex]) { + namespaceIndex++; + if (namespaceIndex === namespaceLength) { + return false; } } - })()); - }; + lastWildcardNamespace = namespaceIndex; + namespaceIndex++; + patternIndex++; + continue; + } else if (pattern[patternIndex] === namespace[namespaceIndex]) { + patternIndex++; + namespaceIndex++; + } else if (lastWildcard >= 0) { + patternIndex = lastWildcard + 1; + namespaceIndex = lastWildcardNamespace + 1; + if (namespaceIndex === namespaceLength) { + return false; + } + while (namespace[namespaceIndex] !== pattern[patternIndex]) { + namespaceIndex++; + if (namespaceIndex === namespaceLength) { + return false; + } + } + lastWildcardNamespace = namespaceIndex; + namespaceIndex++; + patternIndex++; + continue; + } else { + return false; + } + } + const namespaceDone = namespaceIndex === namespace.length; + const patternDone = patternIndex === pattern.length; + const trailingWildCard = patternIndex === pattern.length - 1 && pattern[patternIndex] === "*"; + return namespaceDone && (patternDone || trailingWildCard); + } + function disable() { + const result = enabledString || ""; + enable(""); + return result; + } + function createDebugger(namespace) { + const newDebugger = Object.assign(debug6, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend: extend3 + }); + function debug6(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; + } + function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; + } + function extend3(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; } + exports2.default = debugObj; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/multipartPolicy.js -var require_multipartPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/multipartPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js +var require_logger = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.multipartPolicyName = void 0; - exports2.multipartPolicy = multipartPolicy; - var bytesEncoding_js_1 = require_bytesEncoding(); - var typeGuards_js_1 = require_typeGuards(); - var uuidUtils_js_1 = require_uuidUtils(); - var concat_js_1 = require_concat(); - function generateBoundary() { - return `----AzSDKFormBoundary${(0, uuidUtils_js_1.randomUUID)()}`; + exports2.TypeSpecRuntimeLogger = void 0; + exports2.createLoggerContext = createLoggerContext; + exports2.setLogLevel = setLogLevel; + exports2.getLogLevel = getLogLevel; + exports2.createClientLogger = createClientLogger; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var debug_js_1 = tslib_1.__importDefault(require_debug2()); + var TYPESPEC_RUNTIME_LOG_LEVELS = ["verbose", "info", "warning", "error"]; + var levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100 + }; + function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; } - function encodeHeaders(headers) { - let result = ""; - for (const [key, value] of headers) { - result += `${key}: ${value}\r -`; - } - return result; + function isTypeSpecRuntimeLogLevel(level) { + return TYPESPEC_RUNTIME_LOG_LEVELS.includes(level); } - function getLength(source) { - if (source instanceof Uint8Array) { - return source.byteLength; - } else if ((0, typeGuards_js_1.isBlob)(source)) { - return source.size === -1 ? void 0 : source.size; - } else { - return void 0; + function createLoggerContext(options) { + const registeredLoggers = /* @__PURE__ */ new Set(); + const logLevelFromEnv = typeof process !== "undefined" && process.env && process.env[options.logLevelEnvVarName] || void 0; + let logLevel; + const clientLogger = (0, debug_js_1.default)(options.namespace); + clientLogger.log = (...args) => { + debug_js_1.default.log(...args); + }; + function contextSetLogLevel(level) { + if (level && !isTypeSpecRuntimeLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(",")}`); + } + logLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug_js_1.default.enable(enabledNamespaces.join(",")); } - } - function getTotalLength(sources) { - let total = 0; - for (const source of sources) { - const partLength = getLength(source); - if (partLength === void 0) { - return void 0; + if (logLevelFromEnv) { + if (isTypeSpecRuntimeLogLevel(logLevelFromEnv)) { + contextSetLogLevel(logLevelFromEnv); } else { - total += partLength; + console.error(`${options.logLevelEnvVarName} set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(", ")}.`); } } - return total; - } - async function buildRequestBody(request2, parts, boundary) { - const sources = [ - (0, bytesEncoding_js_1.stringToUint8Array)(`--${boundary}`, "utf-8"), - ...parts.flatMap((part) => [ - (0, bytesEncoding_js_1.stringToUint8Array)("\r\n", "utf-8"), - (0, bytesEncoding_js_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), - (0, bytesEncoding_js_1.stringToUint8Array)("\r\n", "utf-8"), - part.body, - (0, bytesEncoding_js_1.stringToUint8Array)(`\r ---${boundary}`, "utf-8") - ]), - (0, bytesEncoding_js_1.stringToUint8Array)("--\r\n\r\n", "utf-8") - ]; - const contentLength = getTotalLength(sources); - if (contentLength) { - request2.headers.set("Content-Length", contentLength); + function shouldEnable(logger) { + return Boolean(logLevel && levelMap[logger.level] <= levelMap[logLevel]); } - request2.body = await (0, concat_js_1.concat)(sources); - } - exports2.multipartPolicyName = "multipartPolicy"; - var maxBoundaryLength = 70; - var validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); - function assertValidBoundary(boundary) { - if (boundary.length > maxBoundaryLength) { - throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + function createLogger2(parent, level) { + const logger = Object.assign(parent.extend(level), { + level + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug_js_1.default.disable(); + debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; } - if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { - throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + function contextGetLogLevel() { + return logLevel; + } + function contextCreateClientLogger(namespace) { + const clientRootLogger = clientLogger.extend(namespace); + patchLogMethod(clientLogger, clientRootLogger); + return { + error: createLogger2(clientRootLogger, "error"), + warning: createLogger2(clientRootLogger, "warning"), + info: createLogger2(clientRootLogger, "info"), + verbose: createLogger2(clientRootLogger, "verbose") + }; } - } - function multipartPolicy() { return { - name: exports2.multipartPolicyName, - async sendRequest(request2, next) { - if (!request2.multipartBody) { - return next(request2); - } - if (request2.body) { - throw new Error("multipartBody and regular body cannot be set at the same time"); - } - let boundary = request2.multipartBody.boundary; - const contentTypeHeader = request2.headers.get("Content-Type") ?? "multipart/mixed"; - const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); - if (!parsedHeader) { - throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); - } - const [, contentType, parsedBoundary] = parsedHeader; - if (parsedBoundary && boundary && parsedBoundary !== boundary) { - throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); - } - boundary ??= parsedBoundary; - if (boundary) { - assertValidBoundary(boundary); - } else { - boundary = generateBoundary(); - } - request2.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); - await buildRequestBody(request2, request2.multipartBody.parts, boundary); - request2.multipartBody = void 0; - return next(request2); - } + setLogLevel: contextSetLogLevel, + getLogLevel: contextGetLogLevel, + createClientLogger: contextCreateClientLogger, + logger: clientLogger }; } + var context2 = createLoggerContext({ + logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", + namespace: "typeSpecRuntime" + }); + exports2.TypeSpecRuntimeLogger = context2.logger; + function setLogLevel(logLevel) { + context2.setLogLevel(logLevel); + } + function getLogLevel() { + return context2.getLogLevel(); + } + function createClientLogger(namespace) { + return context2.createClientLogger(namespace); + } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/createPipelineFromOptions.js -var require_createPipelineFromOptions = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/createPipelineFromOptions.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js +var require_httpHeaders = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createPipelineFromOptions = createPipelineFromOptions; - var logPolicy_js_1 = require_logPolicy(); - var pipeline_js_1 = require_pipeline(); - var redirectPolicy_js_1 = require_redirectPolicy(); - var userAgentPolicy_js_1 = require_userAgentPolicy(); - var decompressResponsePolicy_js_1 = require_decompressResponsePolicy(); - var defaultRetryPolicy_js_1 = require_defaultRetryPolicy(); - var formDataPolicy_js_1 = require_formDataPolicy(); - var checkEnvironment_js_1 = require_checkEnvironment(); - var proxyPolicy_js_1 = require_proxyPolicy(); - var agentPolicy_js_1 = require_agentPolicy(); - var tlsPolicy_js_1 = require_tlsPolicy(); - var multipartPolicy_js_1 = require_multipartPolicy(); - function createPipelineFromOptions(options) { - const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); - if (checkEnvironment_js_1.isNodeLike) { - if (options.agent) { - pipeline.addPolicy((0, agentPolicy_js_1.agentPolicy)(options.agent)); + exports2.createHttpHeaders = createHttpHeaders; + function normalizeName(name) { + return name.toLowerCase(); + } + function* headerIterator(map2) { + for (const entry of map2.values()) { + yield [entry.name, entry.value]; + } + } + var HttpHeadersImpl = class { + _headersMap; + constructor(rawHeaders) { + this._headersMap = /* @__PURE__ */ new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } } - if (options.tlsOptions) { - pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + return this._headersMap.get(normalizeName(name))?.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } } - pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); - pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); + return result; } - pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); - pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); - pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); - pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); - if (checkEnvironment_js_1.isNodeLike) { - pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); } - pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); - return pipeline; + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } + }; + function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/apiVersionPolicy.js -var require_apiVersionPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/apiVersionPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js +var require_schemes = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.apiVersionPolicyName = void 0; - exports2.apiVersionPolicy = apiVersionPolicy; - exports2.apiVersionPolicyName = "ApiVersionPolicy"; - function apiVersionPolicy(options) { - return { - name: exports2.apiVersionPolicyName, - sendRequest: (req, next) => { - const url2 = new URL(req.url); - if (!url2.searchParams.get("api-version") && options.apiVersion) { - req.url = `${req.url}${Array.from(url2.searchParams.keys()).length > 0 ? "&" : "?"}api-version=${options.apiVersion}`; - } - return next(req); - } - }; - } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/credentials.js -var require_credentials = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/credentials.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js +var require_oauth2Flows = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isOAuth2TokenCredential = isOAuth2TokenCredential; - exports2.isBearerTokenCredential = isBearerTokenCredential; - exports2.isBasicCredential = isBasicCredential; - exports2.isApiKeyCredential = isApiKeyCredential; - function isOAuth2TokenCredential(credential) { - return "getOAuth2Token" in credential; - } - function isBearerTokenCredential(credential) { - return "getBearerToken" in credential; - } - function isBasicCredential(credential) { - return "username" in credential && "password" in credential; - } - function isApiKeyCredential(credential) { - return "key" in credential; - } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/checkInsecureConnection.js -var require_checkInsecureConnection = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/checkInsecureConnection.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js +var require_uuidUtils = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ensureSecureConnection = ensureSecureConnection; - var log_js_1 = require_log2(); - var insecureConnectionWarningEmmitted = false; - function allowInsecureConnection(request2, options) { - if (options.allowInsecureConnection && request2.allowInsecureConnection) { - const url2 = new URL(request2.url); - if (url2.hostname === "localhost" || url2.hostname === "127.0.0.1") { - return true; - } - } - return false; - } - function emitInsecureConnectionWarning() { - const warning10 = "Sending token over insecure transport. Assume any token issued is compromised."; - log_js_1.logger.warning(warning10); - if (typeof process?.emitWarning === "function" && !insecureConnectionWarningEmmitted) { - insecureConnectionWarningEmmitted = true; - process.emitWarning(warning10); - } - } - function ensureSecureConnection(request2, options) { - if (!request2.url.toLowerCase().startsWith("https://")) { - if (allowInsecureConnection(request2, options)) { - emitInsecureConnectionWarning(); - } else { - throw new Error("Authentication is not permitted for non-TLS protected (non-https) URLs when allowInsecureConnection is false."); - } - } + exports2.randomUUID = randomUUID2; + function randomUUID2() { + return crypto.randomUUID(); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/apiKeyAuthenticationPolicy.js -var require_apiKeyAuthenticationPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/apiKeyAuthenticationPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js +var require_pipelineRequest = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.apiKeyAuthenticationPolicyName = void 0; - exports2.apiKeyAuthenticationPolicy = apiKeyAuthenticationPolicy; - var checkInsecureConnection_js_1 = require_checkInsecureConnection(); - exports2.apiKeyAuthenticationPolicyName = "apiKeyAuthenticationPolicy"; - function apiKeyAuthenticationPolicy(options) { - return { - name: exports2.apiKeyAuthenticationPolicyName, - async sendRequest(request2, next) { - (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); - const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "apiKey"); - if (!scheme) { - return next(request2); - } - if (scheme.apiKeyLocation !== "header") { - throw new Error(`Unsupported API key location: ${scheme.apiKeyLocation}`); - } - request2.headers.set(scheme.name, options.credential.key); - return next(request2); - } - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/basicAuthenticationPolicy.js -var require_basicAuthenticationPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/basicAuthenticationPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.basicAuthenticationPolicyName = void 0; - exports2.basicAuthenticationPolicy = basicAuthenticationPolicy; - var bytesEncoding_js_1 = require_bytesEncoding(); - var checkInsecureConnection_js_1 = require_checkInsecureConnection(); - exports2.basicAuthenticationPolicyName = "bearerAuthenticationPolicy"; - function basicAuthenticationPolicy(options) { - return { - name: exports2.basicAuthenticationPolicyName, - async sendRequest(request2, next) { - (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); - const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "http" && x.scheme === "basic"); - if (!scheme) { - return next(request2); - } - const { username, password } = options.credential; - const headerValue = (0, bytesEncoding_js_1.uint8ArrayToString)((0, bytesEncoding_js_1.stringToUint8Array)(`${username}:${password}`, "utf-8"), "base64"); - request2.headers.set("Authorization", `Basic ${headerValue}`); - return next(request2); - } - }; + exports2.createPipelineRequest = createPipelineRequest; + var httpHeaders_js_1 = require_httpHeaders(); + var uuidUtils_js_1 = require_uuidUtils(); + var PipelineRequestImpl = class { + url; + method; + headers; + timeout; + withCredentials; + body; + multipartBody; + formData; + streamResponseStatusCodes; + enableBrowserStreams; + proxySettings; + disableKeepAlive; + abortSignal; + requestId; + allowInsecureConnection; + onUploadProgress; + onDownloadProgress; + requestOverrides; + authSchemes; + constructor(options) { + this.url = options.url; + this.body = options.body; + this.headers = options.headers ?? (0, httpHeaders_js_1.createHttpHeaders)(); + this.method = options.method ?? "GET"; + this.timeout = options.timeout ?? 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = options.disableKeepAlive ?? false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = options.withCredentials ?? false; + this.abortSignal = options.abortSignal; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || (0, uuidUtils_js_1.randomUUID)(); + this.allowInsecureConnection = options.allowInsecureConnection ?? false; + this.enableBrowserStreams = options.enableBrowserStreams ?? false; + this.requestOverrides = options.requestOverrides; + this.authSchemes = options.authSchemes; + } + }; + function createPipelineRequest(options) { + return new PipelineRequestImpl(options); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/bearerAuthenticationPolicy.js -var require_bearerAuthenticationPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/bearerAuthenticationPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js +var require_pipeline = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.bearerAuthenticationPolicyName = void 0; - exports2.bearerAuthenticationPolicy = bearerAuthenticationPolicy; - var checkInsecureConnection_js_1 = require_checkInsecureConnection(); - exports2.bearerAuthenticationPolicyName = "bearerAuthenticationPolicy"; - function bearerAuthenticationPolicy(options) { - return { - name: exports2.bearerAuthenticationPolicyName, - async sendRequest(request2, next) { - (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); - const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "http" && x.scheme === "bearer"); - if (!scheme) { - return next(request2); + exports2.createEmptyPipeline = createEmptyPipeline; + var ValidPhaseNames = /* @__PURE__ */ new Set(["Deserialize", "Serialize", "Retry", "Sign"]); + var HttpPipeline = class _HttpPipeline { + _policies = []; + _orderedPolicies; + constructor(policies) { + this._policies = policies?.slice(0) ?? []; + this._orderedPolicies = void 0; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options + }); + this._orderedPolicies = void 0; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if (options.name && policyDescriptor.policy.name === options.name || options.phase && policyDescriptor.options.phase === options.phase) { + removedPolicies.push(policyDescriptor.policy); + return false; + } else { + return true; } - const token = await options.credential.getBearerToken({ - abortSignal: request2.abortSignal - }); - request2.headers.set("Authorization", `Bearer ${token}`); - return next(request2); + }); + this._orderedPolicies = void 0; + return removedPolicies; + } + sendRequest(httpClient, request2) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request2); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); } - }; + return this._orderedPolicies; + } + clone() { + return new _HttpPipeline(this._policies); + } + static create() { + return new _HttpPipeline(); + } + orderPolicies() { + const result = []; + const policyMap = /* @__PURE__ */ new Map(); + function createPhase(name) { + return { + name, + policies: /* @__PURE__ */ new Set(), + hasRun: false, + hasAfterPolicies: false + }; + } + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } else if (phase === "Serialize") { + return serializePhase; + } else if (phase === "Deserialize") { + return deserializePhase; + } else if (phase === "Sign") { + return signPhase; + } else { + return noPhase; + } + } + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: /* @__PURE__ */ new Set(), + dependants: /* @__PURE__ */ new Set() + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + continue; + } + if (node.dependsOn.size === 0) { + result.push(node.policy); + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + walkPhase(noPhase); + } + return; + } + if (phase.hasAfterPolicies) { + walkPhase(noPhase); + } + } + } + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + walkPhases(); + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } + }; + function createEmptyPipeline() { + return HttpPipeline.create(); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/oauth2AuthenticationPolicy.js -var require_oauth2AuthenticationPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/oauth2AuthenticationPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js +var require_object = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.oauth2AuthenticationPolicyName = void 0; - exports2.oauth2AuthenticationPolicy = oauth2AuthenticationPolicy; - var checkInsecureConnection_js_1 = require_checkInsecureConnection(); - exports2.oauth2AuthenticationPolicyName = "oauth2AuthenticationPolicy"; - function oauth2AuthenticationPolicy(options) { - return { - name: exports2.oauth2AuthenticationPolicyName, - async sendRequest(request2, next) { - (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); - const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "oauth2"); - if (!scheme) { - return next(request2); - } - const token = await options.credential.getOAuth2Token(scheme.flows, { - abortSignal: request2.abortSignal - }); - request2.headers.set("Authorization", `Bearer ${token}`); - return next(request2); - } - }; + exports2.isObject = isObject2; + function isObject2(input) { + return typeof input === "object" && input !== null && !Array.isArray(input) && !(input instanceof RegExp) && !(input instanceof Date); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/clientHelpers.js -var require_clientHelpers = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/clientHelpers.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js +var require_error = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createDefaultPipeline = createDefaultPipeline; - exports2.getCachedDefaultHttpsClient = getCachedDefaultHttpsClient; - var defaultHttpClient_js_1 = require_defaultHttpClient(); - var createPipelineFromOptions_js_1 = require_createPipelineFromOptions(); - var apiVersionPolicy_js_1 = require_apiVersionPolicy(); - var credentials_js_1 = require_credentials(); - var apiKeyAuthenticationPolicy_js_1 = require_apiKeyAuthenticationPolicy(); - var basicAuthenticationPolicy_js_1 = require_basicAuthenticationPolicy(); - var bearerAuthenticationPolicy_js_1 = require_bearerAuthenticationPolicy(); - var oauth2AuthenticationPolicy_js_1 = require_oauth2AuthenticationPolicy(); - var cachedHttpClient; - function createDefaultPipeline(options = {}) { - const pipeline = (0, createPipelineFromOptions_js_1.createPipelineFromOptions)(options); - pipeline.addPolicy((0, apiVersionPolicy_js_1.apiVersionPolicy)(options)); - const { credential, authSchemes, allowInsecureConnection } = options; - if (credential) { - if ((0, credentials_js_1.isApiKeyCredential)(credential)) { - pipeline.addPolicy((0, apiKeyAuthenticationPolicy_js_1.apiKeyAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); - } else if ((0, credentials_js_1.isBasicCredential)(credential)) { - pipeline.addPolicy((0, basicAuthenticationPolicy_js_1.basicAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); - } else if ((0, credentials_js_1.isBearerTokenCredential)(credential)) { - pipeline.addPolicy((0, bearerAuthenticationPolicy_js_1.bearerAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); - } else if ((0, credentials_js_1.isOAuth2TokenCredential)(credential)) { - pipeline.addPolicy((0, oauth2AuthenticationPolicy_js_1.oauth2AuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); - } - } - return pipeline; - } - function getCachedDefaultHttpsClient() { - if (!cachedHttpClient) { - cachedHttpClient = (0, defaultHttpClient_js_1.createDefaultHttpClient)(); + exports2.isError = isError; + var object_js_1 = require_object(); + function isError(e) { + if ((0, object_js_1.isObject)(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; } - return cachedHttpClient; + return false; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/multipart.js -var require_multipart = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/multipart.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js +var require_inspect = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.buildBodyPart = buildBodyPart; - exports2.buildMultipartBody = buildMultipartBody; - var restError_js_1 = require_restError(); - var httpHeaders_js_1 = require_httpHeaders(); - var bytesEncoding_js_1 = require_bytesEncoding(); - var typeGuards_js_1 = require_typeGuards(); - function getHeaderValue(descriptor, headerName) { - if (descriptor.headers) { - const actualHeaderName = Object.keys(descriptor.headers).find((x) => x.toLowerCase() === headerName.toLowerCase()); - if (actualHeaderName) { - return descriptor.headers[actualHeaderName]; - } - } - return void 0; - } - function getPartContentType(descriptor) { - const contentTypeHeader = getHeaderValue(descriptor, "content-type"); - if (contentTypeHeader) { - return contentTypeHeader; - } - if (descriptor.contentType === null) { - return void 0; - } - if (descriptor.contentType) { - return descriptor.contentType; - } - const { body } = descriptor; - if (body === null || body === void 0) { - return void 0; - } - if (typeof body === "string" || typeof body === "number" || typeof body === "boolean") { - return "text/plain; charset=UTF-8"; - } - if (body instanceof Blob) { - return body.type || "application/octet-stream"; - } - if ((0, typeGuards_js_1.isBinaryBody)(body)) { - return "application/octet-stream"; - } - return "application/json"; - } - function escapeDispositionField(value) { - return JSON.stringify(value); - } - function getContentDisposition(descriptor) { - const contentDispositionHeader = getHeaderValue(descriptor, "content-disposition"); - if (contentDispositionHeader) { - return contentDispositionHeader; - } - if (descriptor.dispositionType === void 0 && descriptor.name === void 0 && descriptor.filename === void 0) { - return void 0; - } - const dispositionType = descriptor.dispositionType ?? "form-data"; - let disposition = dispositionType; - if (descriptor.name) { - disposition += `; name=${escapeDispositionField(descriptor.name)}`; - } - let filename = void 0; - if (descriptor.filename) { - filename = descriptor.filename; - } else if (typeof File !== "undefined" && descriptor.body instanceof File) { - const filenameFromFile = descriptor.body.name; - if (filenameFromFile !== "") { - filename = filenameFromFile; - } - } - if (filename) { - disposition += `; filename=${escapeDispositionField(filename)}`; - } - return disposition; - } - function normalizeBody(body, contentType) { - if (body === void 0) { - return new Uint8Array([]); - } - if ((0, typeGuards_js_1.isBinaryBody)(body)) { - return body; - } - if (typeof body === "string" || typeof body === "number" || typeof body === "boolean") { - return (0, bytesEncoding_js_1.stringToUint8Array)(String(body), "utf-8"); - } - if (contentType && /application\/(.+\+)?json(;.+)?/i.test(String(contentType))) { - return (0, bytesEncoding_js_1.stringToUint8Array)(JSON.stringify(body), "utf-8"); - } - throw new restError_js_1.RestError(`Unsupported body/content-type combination: ${body}, ${contentType}`); - } - function buildBodyPart(descriptor) { - const contentType = getPartContentType(descriptor); - const contentDisposition = getContentDisposition(descriptor); - const headers = (0, httpHeaders_js_1.createHttpHeaders)(descriptor.headers ?? {}); - if (contentType) { - headers.set("content-type", contentType); - } - if (contentDisposition) { - headers.set("content-disposition", contentDisposition); - } - const body = normalizeBody(descriptor.body, contentType); - return { - headers, - body - }; - } - function buildMultipartBody(parts) { - return { parts: parts.map(buildBodyPart) }; - } + exports2.custom = void 0; + var node_util_1 = require("node:util"); + exports2.custom = node_util_1.inspect.custom; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/sendRequest.js -var require_sendRequest = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/sendRequest.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js +var require_sanitizer = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.sendRequest = sendRequest; - var restError_js_1 = require_restError(); - var httpHeaders_js_1 = require_httpHeaders(); - var pipelineRequest_js_1 = require_pipelineRequest(); - var clientHelpers_js_1 = require_clientHelpers(); - var typeGuards_js_1 = require_typeGuards(); - var multipart_js_1 = require_multipart(); - async function sendRequest(method, url2, pipeline, options = {}, customHttpClient) { - const httpClient = customHttpClient ?? (0, clientHelpers_js_1.getCachedDefaultHttpsClient)(); - const request2 = buildPipelineRequest(method, url2, options); - try { - const response = await pipeline.sendRequest(httpClient, request2); - const headers = response.headers.toJSON(); - const stream2 = response.readableStreamBody ?? response.browserStreamBody; - const parsedBody = options.responseAsStream || stream2 !== void 0 ? void 0 : getResponseBody(response); - const body = stream2 ?? parsedBody; - if (options?.onResponse) { - options.onResponse({ ...response, request: request2, rawHeaders: headers, parsedBody }); - } - return { - request: request2, - headers, - status: `${response.status}`, - body - }; - } catch (e) { - if ((0, restError_js_1.isRestError)(e) && e.response && options.onResponse) { - const { response } = e; - const rawHeaders = response.headers.toJSON(); - options?.onResponse({ ...response, request: request2, rawHeaders }, e); - } - throw e; - } - } - function getRequestContentType(options = {}) { - return options.contentType ?? options.headers?.["content-type"] ?? getContentType(options.body); - } - function getContentType(body) { - if (ArrayBuffer.isView(body)) { - return "application/octet-stream"; - } - if (typeof body === "string") { - try { - JSON.parse(body); - return "application/json"; - } catch (error3) { - return void 0; - } - } - return "application/json"; - } - function buildPipelineRequest(method, url2, options = {}) { - const requestContentType = getRequestContentType(options); - const { body, multipartBody } = getRequestBody(options.body, requestContentType); - const hasContent = body !== void 0 || multipartBody !== void 0; - const headers = (0, httpHeaders_js_1.createHttpHeaders)({ - ...options.headers ? options.headers : {}, - accept: options.accept ?? options.headers?.accept ?? "application/json", - ...hasContent && requestContentType && { - "content-type": requestContentType - } - }); - return (0, pipelineRequest_js_1.createPipelineRequest)({ - url: url2, - method, - body, - multipartBody, - headers, - allowInsecureConnection: options.allowInsecureConnection, - abortSignal: options.abortSignal, - onUploadProgress: options.onUploadProgress, - onDownloadProgress: options.onDownloadProgress, - timeout: options.timeout, - enableBrowserStreams: true, - streamResponseStatusCodes: options.responseAsStream ? /* @__PURE__ */ new Set([Number.POSITIVE_INFINITY]) : void 0 - }); - } - function getRequestBody(body, contentType = "") { - if (body === void 0) { - return { body: void 0 }; - } - if (typeof FormData !== "undefined" && body instanceof FormData) { - return { body }; - } - if ((0, typeGuards_js_1.isReadableStream)(body)) { - return { body }; - } - if (ArrayBuffer.isView(body)) { - return { body: body instanceof Uint8Array ? body : JSON.stringify(body) }; + exports2.Sanitizer = void 0; + var object_js_1 = require_object(); + var RedactedString = "REDACTED"; + var defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate" + ]; + var defaultAllowedQueryParameters = ["api-version"]; + var Sanitizer = class { + allowedHeaderNames; + allowedQueryParameters; + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [] } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); } - const firstType = contentType.split(";")[0]; - switch (firstType) { - case "application/json": - return { body: JSON.stringify(body) }; - case "multipart/form-data": - if (Array.isArray(body)) { - return { multipartBody: (0, multipart_js_1.buildMultipartBody)(body) }; + /** + * Sanitizes an object for logging. + * @param obj - The object to sanitize + * @returns - The sanitized object as a string + */ + sanitize(obj) { + const seen = /* @__PURE__ */ new Set(); + return JSON.stringify(obj, (key, value) => { + if (value instanceof Error) { + return { + ...value, + name: value.name, + message: value.message + }; } - return { body: JSON.stringify(body) }; - case "text/plain": - return { body: String(body) }; - default: - if (typeof body === "string") { - return { body }; + if (key === "headers") { + return this.sanitizeHeaders(value); + } else if (key === "url") { + return this.sanitizeUrl(value); + } else if (key === "query") { + return this.sanitizeQuery(value); + } else if (key === "body") { + return void 0; + } else if (key === "response") { + return void 0; + } else if (key === "operationSpec") { + return void 0; + } else if (Array.isArray(value) || (0, object_js_1.isObject)(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); } - return { body: JSON.stringify(body) }; - } - } - function getResponseBody(response) { - const contentType = response.headers.get("content-type") ?? ""; - const firstType = contentType.split(";")[0]; - const bodyToParse = response.bodyAsText ?? ""; - if (firstType === "text/plain") { - return String(bodyToParse); + return value; + }, 2); } - try { - return bodyToParse ? JSON.parse(bodyToParse) : void 0; - } catch (error3) { - if (firstType === "application/json") { - throw createParseError(response, error3); + /** + * Sanitizes a URL for logging. + * @param value - The URL to sanitize + * @returns - The sanitized URL as a string + */ + sanitizeUrl(value) { + if (typeof value !== "string" || value === null || value === "") { + return value; } - return String(bodyToParse); - } - } - function createParseError(response, err) { - const msg = `Error "${err}" occurred while parsing the response body - ${response.bodyAsText}.`; - const errCode = err.code ?? restError_js_1.RestError.PARSE_ERROR; - return new restError_js_1.RestError(msg, { - code: errCode, - statusCode: response.status, - request: response.request, - response - }); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/urlHelpers.js -var require_urlHelpers = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/urlHelpers.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.buildRequestUrl = buildRequestUrl; - exports2.buildBaseUrl = buildBaseUrl; - exports2.replaceAll = replaceAll; - function isQueryParameterWithOptions(x) { - const value = x.value; - return value !== void 0 && value.toString !== void 0 && typeof value.toString === "function"; - } - function buildRequestUrl(endpoint2, routePath, pathParameters, options = {}) { - if (routePath.startsWith("https://") || routePath.startsWith("http://")) { - return routePath; - } - endpoint2 = buildBaseUrl(endpoint2, options); - routePath = buildRoutePath(routePath, pathParameters, options); - const requestUrl = appendQueryParams(`${endpoint2}/${routePath}`, options); - const url2 = new URL(requestUrl); - return url2.toString().replace(/([^:]\/)\/+/g, "$1"); - } - function getQueryParamValue(key, allowReserved, style, param) { - let separator; - if (style === "pipeDelimited") { - separator = "|"; - } else if (style === "spaceDelimited") { - separator = "%20"; - } else { - separator = ","; - } - let paramValues; - if (Array.isArray(param)) { - paramValues = param; - } else if (typeof param === "object" && param.toString === Object.prototype.toString) { - paramValues = Object.entries(param).flat(); - } else { - paramValues = [param]; - } - const value = paramValues.map((p) => { - if (p === null || p === void 0) { - return ""; + const url2 = new URL(value); + if (!url2.search) { + return value; } - if (!p.toString || typeof p.toString !== "function") { - throw new Error(`Query parameters must be able to be represented as string, ${key} can't`); + for (const [key] of url2.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url2.searchParams.set(key, RedactedString); + } } - const rawValue = p.toISOString !== void 0 ? p.toISOString() : p.toString(); - return allowReserved ? rawValue : encodeURIComponent(rawValue); - }).join(separator); - return `${allowReserved ? key : encodeURIComponent(key)}=${value}`; - } - function appendQueryParams(url2, options = {}) { - if (!options.queryParameters) { - return url2; + return url2.toString(); } - const parsedUrl = new URL(url2); - const queryParams = options.queryParameters; - const paramStrings = []; - for (const key of Object.keys(queryParams)) { - const param = queryParams[key]; - if (param === void 0 || param === null) { - continue; - } - const hasMetadata = isQueryParameterWithOptions(param); - const rawValue = hasMetadata ? param.value : param; - const explode = hasMetadata ? param.explode ?? false : false; - const style = hasMetadata && param.style ? param.style : "form"; - if (explode) { - if (Array.isArray(rawValue)) { - for (const item of rawValue) { - paramStrings.push(getQueryParamValue(key, options.skipUrlEncoding ?? false, style, item)); - } - } else if (typeof rawValue === "object") { - for (const [actualKey, value] of Object.entries(rawValue)) { - paramStrings.push(getQueryParamValue(actualKey, options.skipUrlEncoding ?? false, style, value)); - } + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; } else { - throw new Error("explode can only be set to true for objects and arrays"); + sanitized[key] = RedactedString; } - } else { - paramStrings.push(getQueryParamValue(key, options.skipUrlEncoding ?? false, style, rawValue)); } + return sanitized; } - if (parsedUrl.search !== "") { - parsedUrl.search += "&"; - } - parsedUrl.search += paramStrings.join("&"); - return parsedUrl.toString(); - } - function buildBaseUrl(endpoint2, options) { - if (!options.pathParameters) { - return endpoint2; - } - const pathParams = options.pathParameters; - for (const [key, param] of Object.entries(pathParams)) { - if (param === void 0 || param === null) { - throw new Error(`Path parameters ${key} must not be undefined or null`); - } - if (!param.toString || typeof param.toString !== "function") { - throw new Error(`Path parameters must be able to be represented as string, ${key} can't`); + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; } - let value = param.toISOString !== void 0 ? param.toISOString() : String(param); - if (!options.skipUrlEncoding) { - value = encodeURIComponent(param); + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } else { + sanitized[k] = RedactedString; + } } - endpoint2 = replaceAll(endpoint2, `{${key}}`, value) ?? ""; + return sanitized; } - return endpoint2; - } - function buildRoutePath(routePath, pathParameters, options = {}) { - for (const pathParam of pathParameters) { - const allowReserved = typeof pathParam === "object" && (pathParam.allowReserved ?? false); - let value = typeof pathParam === "object" ? pathParam.value : pathParam; - if (!options.skipUrlEncoding && !allowReserved) { - value = encodeURIComponent(value); - } - routePath = routePath.replace(/\{[\w-]+\}/, String(value)); + }; + exports2.Sanitizer = Sanitizer; + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js +var require_restError = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RestError = void 0; + exports2.isRestError = isRestError; + var error_js_1 = require_error(); + var inspect_js_1 = require_inspect(); + var sanitizer_js_1 = require_sanitizer(); + var errorSanitizer = new sanitizer_js_1.Sanitizer(); + var RestError = class _RestError extends Error { + /** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ + static REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + /** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ + static PARSE_ERROR = "PARSE_ERROR"; + /** + * The code of the error itself (use statics on RestError if possible.) + */ + code; + /** + * The HTTP status code of the request (if applicable.) + */ + statusCode; + /** + * The request that was made. + * This property is non-enumerable. + */ + request; + /** + * The response received (if any.) + * This property is non-enumerable. + */ + response; + /** + * Bonus property set by the throw site. + */ + details; + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + Object.defineProperty(this, "request", { value: options.request, enumerable: false }); + Object.defineProperty(this, "response", { value: options.response, enumerable: false }); + const agent = this.request?.agent ? { + maxFreeSockets: this.request.agent.maxFreeSockets, + maxSockets: this.request.agent.maxSockets + } : void 0; + Object.defineProperty(this, inspect_js_1.custom, { + value: () => { + return `RestError: ${this.message} + ${errorSanitizer.sanitize({ + ...this, + request: { ...this.request, agent }, + response: this.response + })}`; + }, + enumerable: false + }); + Object.setPrototypeOf(this, _RestError.prototype); } - return routePath; - } - function replaceAll(value, searchValue, replaceValue) { - return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); + }; + exports2.RestError = RestError; + function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return (0, error_js_1.isError)(e) && e.name === "RestError"; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/getClient.js -var require_getClient = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/getClient.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js +var require_bytesEncoding = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getClient = getClient; - var clientHelpers_js_1 = require_clientHelpers(); - var sendRequest_js_1 = require_sendRequest(); - var urlHelpers_js_1 = require_urlHelpers(); - var checkEnvironment_js_1 = require_checkEnvironment(); - function getClient(endpoint2, clientOptions = {}) { - const pipeline = clientOptions.pipeline ?? (0, clientHelpers_js_1.createDefaultPipeline)(clientOptions); - if (clientOptions.additionalPolicies?.length) { - for (const { policy, position } of clientOptions.additionalPolicies) { - const afterPhase = position === "perRetry" ? "Sign" : void 0; - pipeline.addPolicy(policy, { - afterPhase - }); - } - } - const { allowInsecureConnection, httpClient } = clientOptions; - const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path13, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path13, args, { allowInsecureConnection, ...requestOptions }); - return { - get: (requestOptions = {}) => { - return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - post: (requestOptions = {}) => { - return buildOperation("POST", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - put: (requestOptions = {}) => { - return buildOperation("PUT", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - patch: (requestOptions = {}) => { - return buildOperation("PATCH", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - delete: (requestOptions = {}) => { - return buildOperation("DELETE", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - head: (requestOptions = {}) => { - return buildOperation("HEAD", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - options: (requestOptions = {}) => { - return buildOperation("OPTIONS", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - }, - trace: (requestOptions = {}) => { - return buildOperation("TRACE", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); - } - }; - }; - return { - path: client, - pathUnchecked: client, - pipeline - }; + exports2.uint8ArrayToString = uint8ArrayToString; + exports2.stringToUint8Array = stringToUint8Array; + function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); } - function buildOperation(method, url2, pipeline, options, allowInsecureConnection, httpClient) { - allowInsecureConnection = options.allowInsecureConnection ?? allowInsecureConnection; - return { - then: function(onFulfilled, onrejected) { - return (0, sendRequest_js_1.sendRequest)(method, url2, pipeline, { ...options, allowInsecureConnection }, httpClient).then(onFulfilled, onrejected); - }, - async asBrowserStream() { - if (checkEnvironment_js_1.isNodeLike) { - throw new Error("`asBrowserStream` is supported only in the browser environment. Use `asNodeStream` instead to obtain the response body stream. If you require a Web stream of the response in Node, consider using `Readable.toWeb` on the result of `asNodeStream`."); - } else { - return (0, sendRequest_js_1.sendRequest)(method, url2, pipeline, { ...options, allowInsecureConnection, responseAsStream: true }, httpClient); - } - }, - async asNodeStream() { - if (checkEnvironment_js_1.isNodeLike) { - return (0, sendRequest_js_1.sendRequest)(method, url2, pipeline, { ...options, allowInsecureConnection, responseAsStream: true }, httpClient); - } else { - throw new Error("`isNodeStream` is not supported in the browser environment. Use `asBrowserStream` to obtain the response body stream."); - } - } - }; + function stringToUint8Array(value, format) { + return Buffer.from(value, format); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/operationOptionHelpers.js -var require_operationOptionHelpers = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/operationOptionHelpers.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js +var require_log2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.operationOptionsToRequestParameters = operationOptionsToRequestParameters; - function operationOptionsToRequestParameters(options) { - return { - allowInsecureConnection: options.requestOptions?.allowInsecureConnection, - timeout: options.requestOptions?.timeout, - skipUrlEncoding: options.requestOptions?.skipUrlEncoding, - abortSignal: options.abortSignal, - onUploadProgress: options.requestOptions?.onUploadProgress, - onDownloadProgress: options.requestOptions?.onDownloadProgress, - headers: { ...options.requestOptions?.headers }, - onResponse: options.onResponse - }; - } + exports2.logger = void 0; + var logger_js_1 = require_logger(); + exports2.logger = (0, logger_js_1.createClientLogger)("ts-http-runtime"); } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/restError.js -var require_restError2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/restError.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js +var require_nodeHttpClient = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createRestError = createRestError; - var restError_js_1 = require_restError(); - var httpHeaders_js_1 = require_httpHeaders(); - function createRestError(messageOrResponse, response) { - const resp = typeof messageOrResponse === "string" ? response : messageOrResponse; - const internalError = resp.body?.error ?? resp.body; - const message = typeof messageOrResponse === "string" ? messageOrResponse : internalError?.message ?? `Unexpected status code: ${resp.status}`; - return new restError_js_1.RestError(message, { - statusCode: statusCodeToNumber(resp.status), - code: internalError?.code, - request: resp.request, - response: toPipelineResponse(resp) + exports2.getBodyLength = getBodyLength; + exports2.createNodeHttpClient = createNodeHttpClient; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_http_1 = tslib_1.__importDefault(require("node:http")); + var node_https_1 = tslib_1.__importDefault(require("node:https")); + var node_zlib_1 = tslib_1.__importDefault(require("node:zlib")); + var node_stream_1 = require("node:stream"); + var AbortError_js_1 = require_AbortError(); + var httpHeaders_js_1 = require_httpHeaders(); + var restError_js_1 = require_restError(); + var log_js_1 = require_log2(); + var sanitizer_js_1 = require_sanitizer(); + var DEFAULT_TLS_SETTINGS = {}; + function isReadableStream(body) { + return body && typeof body.pipe === "function"; + } + function isStreamComplete(stream2) { + if (stream2.readable === false) { + return Promise.resolve(); + } + return new Promise((resolve6) => { + const handler2 = () => { + resolve6(); + stream2.removeListener("close", handler2); + stream2.removeListener("end", handler2); + stream2.removeListener("error", handler2); + }; + stream2.on("close", handler2); + stream2.on("end", handler2); + stream2.on("error", handler2); }); } - function toPipelineResponse(response) { - return { - headers: (0, httpHeaders_js_1.createHttpHeaders)(response.headers), - request: response.request, - status: statusCodeToNumber(response.status) ?? -1 - }; + function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; } - function statusCodeToNumber(statusCode) { - const status = Number.parseInt(statusCode); - return Number.isNaN(status) ? void 0 : status; + var ReportTransform = class extends node_stream_1.Transform { + loadedBytes = 0; + progressCallback; + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + } + }; + var NodeHttpClient = class { + cachedHttpAgent; + cachedHttpsAgents = /* @__PURE__ */ new WeakMap(); + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request2) { + const abortController = new AbortController(); + let abortListener; + if (request2.abortSignal) { + if (request2.abortSignal.aborted) { + throw new AbortError_js_1.AbortError("The operation was aborted. Request has already been canceled."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request2.abortSignal.addEventListener("abort", abortListener); + } + let timeoutId; + if (request2.timeout > 0) { + timeoutId = setTimeout(() => { + const sanitizer = new sanitizer_js_1.Sanitizer(); + log_js_1.logger.info(`request to '${sanitizer.sanitizeUrl(request2.url)}' timed out. canceling...`); + abortController.abort(); + }, request2.timeout); + } + const acceptEncoding = request2.headers.get("Accept-Encoding"); + const shouldDecompress = acceptEncoding?.includes("gzip") || acceptEncoding?.includes("deflate"); + let body = typeof request2.body === "function" ? request2.body() : request2.body; + if (body && !request2.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request2.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request2.onUploadProgress) { + const onUploadProgress = request2.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request2, abortController, body); + if (timeoutId !== void 0) { + clearTimeout(timeoutId); + } + const headers = getResponseHeaders(res); + const status = res.statusCode ?? 0; + const response = { + status, + headers, + request: request2 + }; + if (request2.method === "HEAD") { + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request2.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + request2.streamResponseStatusCodes?.has(Number.POSITIVE_INFINITY) || request2.streamResponseStatusCodes?.has(response.status) + ) { + response.readableStreamBody = responseStream; + } else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } finally { + if (request2.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]).then(() => { + if (abortListener) { + request2.abortSignal?.removeEventListener("abort", abortListener); + } + }).catch((e) => { + log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request2, abortController, body) { + const url2 = new URL(request2.url); + const isInsecure = url2.protocol !== "https:"; + if (isInsecure && !request2.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request2.url} while allowInsecureConnection is false.`); + } + const agent = request2.agent ?? this.getOrCreateAgent(request2, isInsecure); + const options = { + agent, + hostname: url2.hostname, + path: `${url2.pathname}${url2.search}`, + port: url2.port, + method: request2.method, + headers: request2.headers.toJSON({ preserveCase: true }), + ...request2.requestOverrides + }; + return new Promise((resolve6, reject) => { + const req = isInsecure ? node_http_1.default.request(options, resolve6) : node_https_1.default.request(options, resolve6); + req.once("error", (err) => { + reject(new restError_js_1.RestError(err.message, { code: err.code ?? restError_js_1.RestError.REQUEST_SEND_ERROR, request: request2 })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new AbortError_js_1.AbortError("The operation was aborted. Rejecting from abort signal callback while making request."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } else { + log_js_1.logger.error("Unrecognized body type", body); + reject(new restError_js_1.RestError("Unrecognized body type")); + } + } else { + req.end(); + } + }); + } + getOrCreateAgent(request2, isInsecure) { + const disableKeepAlive = request2.disableKeepAlive; + if (isInsecure) { + if (disableKeepAlive) { + return node_http_1.default.globalAgent; + } + if (!this.cachedHttpAgent) { + this.cachedHttpAgent = new node_http_1.default.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } else { + if (disableKeepAlive && !request2.tlsSettings) { + return node_https_1.default.globalAgent; + } + const tlsSettings = request2.tlsSettings ?? DEFAULT_TLS_SETTINGS; + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new node_https_1.default.Agent({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive, + // Since we are spreading, if no tslSettings were provided, nothing is added to the agent options. + ...tlsSettings + }); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } + }; + function getResponseHeaders(res) { + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } else if (value) { + headers.set(header, value); + } + } + return headers; + } + function getDecodedResponseStream(stream2, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = node_zlib_1.default.createGunzip(); + stream2.pipe(unzip); + return unzip; + } else if (contentEncoding === "deflate") { + const inflate = node_zlib_1.default.createInflate(); + stream2.pipe(inflate); + return inflate; + } + return stream2; + } + function streamToText(stream2) { + return new Promise((resolve6, reject) => { + const buffer = []; + stream2.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } else { + buffer.push(Buffer.from(chunk)); + } + }); + stream2.on("end", () => { + resolve6(Buffer.concat(buffer).toString("utf8")); + }); + stream2.on("error", (e) => { + if (e && e?.name === "AbortError") { + reject(e); + } else { + reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { + code: restError_js_1.RestError.PARSE_ERROR + })); + } + }); + }); + } + function getBodyLength(body) { + if (!body) { + return 0; + } else if (Buffer.isBuffer(body)) { + return body.length; + } else if (isReadableStream(body)) { + return null; + } else if (isArrayBuffer(body)) { + return body.byteLength; + } else if (typeof body === "string") { + return Buffer.from(body).length; + } else { + return null; + } + } + function createNodeHttpClient() { + return new NodeHttpClient(); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/index.js -var require_commonjs = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/index.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js +var require_defaultHttpClient = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createRestError = exports2.operationOptionsToRequestParameters = exports2.getClient = exports2.createDefaultHttpClient = exports2.uint8ArrayToString = exports2.stringToUint8Array = exports2.isRestError = exports2.RestError = exports2.createEmptyPipeline = exports2.createPipelineRequest = exports2.createHttpHeaders = exports2.TypeSpecRuntimeLogger = exports2.setLogLevel = exports2.getLogLevel = exports2.createClientLogger = exports2.AbortError = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var AbortError_js_1 = require_AbortError(); - Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { - return AbortError_js_1.AbortError; - } }); - var logger_js_1 = require_logger(); - Object.defineProperty(exports2, "createClientLogger", { enumerable: true, get: function() { - return logger_js_1.createClientLogger; - } }); - Object.defineProperty(exports2, "getLogLevel", { enumerable: true, get: function() { - return logger_js_1.getLogLevel; - } }); - Object.defineProperty(exports2, "setLogLevel", { enumerable: true, get: function() { - return logger_js_1.setLogLevel; - } }); - Object.defineProperty(exports2, "TypeSpecRuntimeLogger", { enumerable: true, get: function() { - return logger_js_1.TypeSpecRuntimeLogger; - } }); - var httpHeaders_js_1 = require_httpHeaders(); - Object.defineProperty(exports2, "createHttpHeaders", { enumerable: true, get: function() { - return httpHeaders_js_1.createHttpHeaders; - } }); - tslib_1.__exportStar(require_schemes(), exports2); - tslib_1.__exportStar(require_oauth2Flows(), exports2); - var pipelineRequest_js_1 = require_pipelineRequest(); - Object.defineProperty(exports2, "createPipelineRequest", { enumerable: true, get: function() { - return pipelineRequest_js_1.createPipelineRequest; - } }); - var pipeline_js_1 = require_pipeline(); - Object.defineProperty(exports2, "createEmptyPipeline", { enumerable: true, get: function() { - return pipeline_js_1.createEmptyPipeline; - } }); - var restError_js_1 = require_restError(); - Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { - return restError_js_1.RestError; - } }); - Object.defineProperty(exports2, "isRestError", { enumerable: true, get: function() { - return restError_js_1.isRestError; - } }); - var bytesEncoding_js_1 = require_bytesEncoding(); - Object.defineProperty(exports2, "stringToUint8Array", { enumerable: true, get: function() { - return bytesEncoding_js_1.stringToUint8Array; - } }); - Object.defineProperty(exports2, "uint8ArrayToString", { enumerable: true, get: function() { - return bytesEncoding_js_1.uint8ArrayToString; - } }); - var defaultHttpClient_js_1 = require_defaultHttpClient(); - Object.defineProperty(exports2, "createDefaultHttpClient", { enumerable: true, get: function() { - return defaultHttpClient_js_1.createDefaultHttpClient; - } }); - var getClient_js_1 = require_getClient(); - Object.defineProperty(exports2, "getClient", { enumerable: true, get: function() { - return getClient_js_1.getClient; - } }); - var operationOptionHelpers_js_1 = require_operationOptionHelpers(); - Object.defineProperty(exports2, "operationOptionsToRequestParameters", { enumerable: true, get: function() { - return operationOptionHelpers_js_1.operationOptionsToRequestParameters; - } }); - var restError_js_2 = require_restError2(); - Object.defineProperty(exports2, "createRestError", { enumerable: true, get: function() { - return restError_js_2.createRestError; - } }); + exports2.createDefaultHttpClient = createDefaultHttpClient; + var nodeHttpClient_js_1 = require_nodeHttpClient(); + function createDefaultHttpClient() { + return (0, nodeHttpClient_js_1.createNodeHttpClient)(); + } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js -var require_pipeline2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js +var require_logPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createEmptyPipeline = createEmptyPipeline; - var ts_http_runtime_1 = require_commonjs(); - function createEmptyPipeline() { - return (0, ts_http_runtime_1.createEmptyPipeline)(); + exports2.logPolicyName = void 0; + exports2.logPolicy = logPolicy; + var log_js_1 = require_log2(); + var sanitizer_js_1 = require_sanitizer(); + exports2.logPolicyName = "logPolicy"; + function logPolicy(options = {}) { + const logger = options.logger ?? log_js_1.logger.info; + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters + }); + return { + name: exports2.logPolicyName, + async sendRequest(request2, next) { + if (!logger.enabled) { + return next(request2); + } + logger(`Request: ${sanitizer.sanitize(request2)}`); + const response = await next(request2); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + } + }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/internal.js -var require_internal = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/internal.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js +var require_redirectPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createLoggerContext = void 0; - var logger_js_1 = require_logger(); - Object.defineProperty(exports2, "createLoggerContext", { enumerable: true, get: function() { - return logger_js_1.createLoggerContext; - } }); + exports2.redirectPolicyName = void 0; + exports2.redirectPolicy = redirectPolicy; + exports2.redirectPolicyName = "redirectPolicy"; + var allowedRedirect = ["GET", "HEAD"]; + function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: exports2.redirectPolicyName, + async sendRequest(request2, next) { + const response = await next(request2); + return handleRedirect(next, response, maxRetries); + } + }; + } + async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request: request2, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && (status === 300 || status === 301 && allowedRedirect.includes(request2.method) || status === 302 && allowedRedirect.includes(request2.method) || status === 303 && request2.method === "POST" || status === 307) && currentRetries < maxRetries) { + const url2 = new URL(locationHeader, request2.url); + request2.url = url2.toString(); + if (status === 303) { + request2.method = "GET"; + request2.headers.delete("Content-Length"); + delete request2.body; + } + request2.headers.delete("Authorization"); + const res = await next(request2); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; + } } }); -// node_modules/@azure/logger/dist/commonjs/index.js -var require_commonjs2 = __commonJS({ - "node_modules/@azure/logger/dist/commonjs/index.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js +var require_userAgentPlatform = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AzureLogger = void 0; - exports2.setLogLevel = setLogLevel; - exports2.getLogLevel = getLogLevel; - exports2.createClientLogger = createClientLogger; - var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ - logLevelEnvVarName: "AZURE_LOG_LEVEL", - namespace: "azure" - }); - exports2.AzureLogger = context2.logger; - function setLogLevel(level) { - context2.setLogLevel(level); - } - function getLogLevel() { - return context2.getLogLevel(); + exports2.getHeaderName = getHeaderName; + exports2.setPlatformSpecificData = setPlatformSpecificData; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_os_1 = tslib_1.__importDefault(require("node:os")); + var node_process_1 = tslib_1.__importDefault(require("node:process")); + function getHeaderName() { + return "User-Agent"; } - function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + async function setPlatformSpecificData(map2) { + if (node_process_1.default && node_process_1.default.versions) { + const osInfo = `${node_os_1.default.type()} ${node_os_1.default.release()}; ${node_os_1.default.arch()}`; + const versions = node_process_1.default.versions; + if (versions.bun) { + map2.set("Bun", `${versions.bun} (${osInfo})`); + } else if (versions.deno) { + map2.set("Deno", `${versions.deno} (${osInfo})`); + } else if (versions.node) { + map2.set("Node", `${versions.node} (${osInfo})`); + } + } } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js -var require_log3 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("core-rest-pipeline"); - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/exponentialRetryPolicy.js -var require_exponentialRetryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/exponentialRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.exponentialRetryPolicyName = void 0; - exports2.exponentialRetryPolicy = exponentialRetryPolicy; - var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); - var retryPolicy_js_1 = require_retryPolicy(); - var constants_js_1 = require_constants13(); - exports2.exponentialRetryPolicyName = "exponentialRetryPolicy"; - function exponentialRetryPolicy(options = {}) { - return (0, retryPolicy_js_1.retryPolicy)([ - (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)({ - ...options, - ignoreSystemErrors: true - }) - ], { - maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT - }); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/systemErrorRetryPolicy.js -var require_systemErrorRetryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/systemErrorRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.systemErrorRetryPolicyName = void 0; - exports2.systemErrorRetryPolicy = systemErrorRetryPolicy; - var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); - var retryPolicy_js_1 = require_retryPolicy(); - var constants_js_1 = require_constants13(); - exports2.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; - function systemErrorRetryPolicy(options = {}) { - return { - name: exports2.systemErrorRetryPolicyName, - sendRequest: (0, retryPolicy_js_1.retryPolicy)([ - (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)({ - ...options, - ignoreHttpStatusCodes: true - }) - ], { - maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT - }).sendRequest - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/throttlingRetryPolicy.js -var require_throttlingRetryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/throttlingRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.throttlingRetryPolicyName = void 0; - exports2.throttlingRetryPolicy = throttlingRetryPolicy; - var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); - var retryPolicy_js_1 = require_retryPolicy(); - var constants_js_1 = require_constants13(); - exports2.throttlingRetryPolicyName = "throttlingRetryPolicy"; - function throttlingRetryPolicy(options = {}) { - return { - name: exports2.throttlingRetryPolicyName, - sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { - maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT - }).sendRequest - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/internal.js -var require_internal2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/internal.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.userAgentPolicyName = exports2.userAgentPolicy = exports2.tlsPolicyName = exports2.tlsPolicy = exports2.redirectPolicyName = exports2.redirectPolicy = exports2.getDefaultProxySettings = exports2.proxyPolicyName = exports2.proxyPolicy = exports2.multipartPolicyName = exports2.multipartPolicy = exports2.logPolicyName = exports2.logPolicy = exports2.formDataPolicyName = exports2.formDataPolicy = exports2.throttlingRetryPolicyName = exports2.throttlingRetryPolicy = exports2.systemErrorRetryPolicyName = exports2.systemErrorRetryPolicy = exports2.retryPolicy = exports2.exponentialRetryPolicyName = exports2.exponentialRetryPolicy = exports2.defaultRetryPolicyName = exports2.defaultRetryPolicy = exports2.decompressResponsePolicyName = exports2.decompressResponsePolicy = exports2.agentPolicyName = exports2.agentPolicy = void 0; - var agentPolicy_js_1 = require_agentPolicy(); - Object.defineProperty(exports2, "agentPolicy", { enumerable: true, get: function() { - return agentPolicy_js_1.agentPolicy; - } }); - Object.defineProperty(exports2, "agentPolicyName", { enumerable: true, get: function() { - return agentPolicy_js_1.agentPolicyName; - } }); - var decompressResponsePolicy_js_1 = require_decompressResponsePolicy(); - Object.defineProperty(exports2, "decompressResponsePolicy", { enumerable: true, get: function() { - return decompressResponsePolicy_js_1.decompressResponsePolicy; - } }); - Object.defineProperty(exports2, "decompressResponsePolicyName", { enumerable: true, get: function() { - return decompressResponsePolicy_js_1.decompressResponsePolicyName; - } }); - var defaultRetryPolicy_js_1 = require_defaultRetryPolicy(); - Object.defineProperty(exports2, "defaultRetryPolicy", { enumerable: true, get: function() { - return defaultRetryPolicy_js_1.defaultRetryPolicy; - } }); - Object.defineProperty(exports2, "defaultRetryPolicyName", { enumerable: true, get: function() { - return defaultRetryPolicy_js_1.defaultRetryPolicyName; - } }); - var exponentialRetryPolicy_js_1 = require_exponentialRetryPolicy(); - Object.defineProperty(exports2, "exponentialRetryPolicy", { enumerable: true, get: function() { - return exponentialRetryPolicy_js_1.exponentialRetryPolicy; - } }); - Object.defineProperty(exports2, "exponentialRetryPolicyName", { enumerable: true, get: function() { - return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; - } }); - var retryPolicy_js_1 = require_retryPolicy(); - Object.defineProperty(exports2, "retryPolicy", { enumerable: true, get: function() { - return retryPolicy_js_1.retryPolicy; - } }); - var systemErrorRetryPolicy_js_1 = require_systemErrorRetryPolicy(); - Object.defineProperty(exports2, "systemErrorRetryPolicy", { enumerable: true, get: function() { - return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; - } }); - Object.defineProperty(exports2, "systemErrorRetryPolicyName", { enumerable: true, get: function() { - return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; - } }); - var throttlingRetryPolicy_js_1 = require_throttlingRetryPolicy(); - Object.defineProperty(exports2, "throttlingRetryPolicy", { enumerable: true, get: function() { - return throttlingRetryPolicy_js_1.throttlingRetryPolicy; - } }); - Object.defineProperty(exports2, "throttlingRetryPolicyName", { enumerable: true, get: function() { - return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; - } }); - var formDataPolicy_js_1 = require_formDataPolicy(); - Object.defineProperty(exports2, "formDataPolicy", { enumerable: true, get: function() { - return formDataPolicy_js_1.formDataPolicy; - } }); - Object.defineProperty(exports2, "formDataPolicyName", { enumerable: true, get: function() { - return formDataPolicy_js_1.formDataPolicyName; - } }); - var logPolicy_js_1 = require_logPolicy(); - Object.defineProperty(exports2, "logPolicy", { enumerable: true, get: function() { - return logPolicy_js_1.logPolicy; - } }); - Object.defineProperty(exports2, "logPolicyName", { enumerable: true, get: function() { - return logPolicy_js_1.logPolicyName; - } }); - var multipartPolicy_js_1 = require_multipartPolicy(); - Object.defineProperty(exports2, "multipartPolicy", { enumerable: true, get: function() { - return multipartPolicy_js_1.multipartPolicy; - } }); - Object.defineProperty(exports2, "multipartPolicyName", { enumerable: true, get: function() { - return multipartPolicy_js_1.multipartPolicyName; - } }); - var proxyPolicy_js_1 = require_proxyPolicy(); - Object.defineProperty(exports2, "proxyPolicy", { enumerable: true, get: function() { - return proxyPolicy_js_1.proxyPolicy; - } }); - Object.defineProperty(exports2, "proxyPolicyName", { enumerable: true, get: function() { - return proxyPolicy_js_1.proxyPolicyName; - } }); - Object.defineProperty(exports2, "getDefaultProxySettings", { enumerable: true, get: function() { - return proxyPolicy_js_1.getDefaultProxySettings; - } }); - var redirectPolicy_js_1 = require_redirectPolicy(); - Object.defineProperty(exports2, "redirectPolicy", { enumerable: true, get: function() { - return redirectPolicy_js_1.redirectPolicy; - } }); - Object.defineProperty(exports2, "redirectPolicyName", { enumerable: true, get: function() { - return redirectPolicy_js_1.redirectPolicyName; - } }); - var tlsPolicy_js_1 = require_tlsPolicy(); - Object.defineProperty(exports2, "tlsPolicy", { enumerable: true, get: function() { - return tlsPolicy_js_1.tlsPolicy; - } }); - Object.defineProperty(exports2, "tlsPolicyName", { enumerable: true, get: function() { - return tlsPolicy_js_1.tlsPolicyName; - } }); - var userAgentPolicy_js_1 = require_userAgentPolicy(); - Object.defineProperty(exports2, "userAgentPolicy", { enumerable: true, get: function() { - return userAgentPolicy_js_1.userAgentPolicy; - } }); - Object.defineProperty(exports2, "userAgentPolicyName", { enumerable: true, get: function() { - return userAgentPolicy_js_1.userAgentPolicyName; - } }); - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js -var require_logPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logPolicyName = void 0; - exports2.logPolicy = logPolicy; - var log_js_1 = require_log3(); - var policies_1 = require_internal2(); - exports2.logPolicyName = policies_1.logPolicyName; - function logPolicy(options = {}) { - return (0, policies_1.logPolicy)({ - logger: log_js_1.logger.info, - ...options - }); - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js -var require_redirectPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.redirectPolicyName = void 0; - exports2.redirectPolicy = redirectPolicy; - var policies_1 = require_internal2(); - exports2.redirectPolicyName = policies_1.redirectPolicyName; - function redirectPolicy(options = {}) { - return (0, policies_1.redirectPolicy)(options); - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js -var require_userAgentPlatform2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getHeaderName = getHeaderName; - exports2.setPlatformSpecificData = setPlatformSpecificData; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_os_1 = tslib_1.__importDefault(require("node:os")); - var node_process_1 = tslib_1.__importDefault(require("node:process")); - function getHeaderName() { - return "User-Agent"; - } - async function setPlatformSpecificData(map2) { - if (node_process_1.default && node_process_1.default.versions) { - const osInfo = `${node_os_1.default.type()} ${node_os_1.default.release()}; ${node_os_1.default.arch()}`; - const versions = node_process_1.default.versions; - if (versions.bun) { - map2.set("Bun", `${versions.bun} (${osInfo})`); - } else if (versions.deno) { - map2.set("Deno", `${versions.deno} (${osInfo})`); - } else if (versions.node) { - map2.set("Node", `${versions.node} (${osInfo})`); - } - } - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js -var require_constants14 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js +var require_constants13 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DEFAULT_RETRY_POLICY_COUNT = exports2.SDK_VERSION = void 0; - exports2.SDK_VERSION = "1.22.2"; + exports2.SDK_VERSION = "0.3.2"; exports2.DEFAULT_RETRY_POLICY_COUNT = 3; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js -var require_userAgent2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js +var require_userAgent = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUserAgentHeaderName = getUserAgentHeaderName; exports2.getUserAgentValue = getUserAgentValue; - var userAgentPlatform_js_1 = require_userAgentPlatform2(); - var constants_js_1 = require_constants14(); + var userAgentPlatform_js_1 = require_userAgentPlatform(); + var constants_js_1 = require_constants13(); function getUserAgentString(telemetryInfo) { const parts = []; for (const [key, value] of telemetryInfo) { @@ -56178,7 +54043,7 @@ var require_userAgent2 = __commonJS({ } async function getUserAgentValue(prefix) { const runtimeInfo = /* @__PURE__ */ new Map(); - runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION); + runtimeInfo.set("ts-http-runtime", constants_js_1.SDK_VERSION); await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); const defaultAgent = getUserAgentString(runtimeInfo); const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; @@ -56187,14 +54052,14 @@ var require_userAgent2 = __commonJS({ } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js -var require_userAgentPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js +var require_userAgentPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.userAgentPolicyName = void 0; exports2.userAgentPolicy = userAgentPolicy; - var userAgent_js_1 = require_userAgent2(); + var userAgent_js_1 = require_userAgent(); var UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); exports2.userAgentPolicyName = "userAgentPolicy"; function userAgentPolicy(options = {}) { @@ -56212,15993 +54077,13871 @@ var require_userAgentPolicy2 = __commonJS({ } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sha256.js -var require_sha256 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sha256.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js +var require_decompressResponsePolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.computeSha256Hmac = computeSha256Hmac; - exports2.computeSha256Hash = computeSha256Hash; - var node_crypto_1 = require("node:crypto"); - async function computeSha256Hmac(key, stringToSign, encoding) { - const decodedKey = Buffer.from(key, "base64"); - return (0, node_crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); - } - async function computeSha256Hash(content, encoding) { - return (0, node_crypto_1.createHash)("sha256").update(content).digest(encoding); + exports2.decompressResponsePolicyName = void 0; + exports2.decompressResponsePolicy = decompressResponsePolicy; + exports2.decompressResponsePolicyName = "decompressResponsePolicy"; + function decompressResponsePolicy() { + return { + name: exports2.decompressResponsePolicyName, + async sendRequest(request2, next) { + if (request2.method !== "HEAD") { + request2.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request2); + } + }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/internal.js -var require_internal3 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/internal.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Sanitizer = exports2.uint8ArrayToString = exports2.stringToUint8Array = exports2.isWebWorker = exports2.isReactNative = exports2.isDeno = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isBun = exports2.isBrowser = exports2.randomUUID = exports2.computeSha256Hmac = exports2.computeSha256Hash = exports2.isError = exports2.isObject = exports2.getRandomIntegerInclusive = exports2.calculateRetryDelay = void 0; - var delay_js_1 = require_delay(); - Object.defineProperty(exports2, "calculateRetryDelay", { enumerable: true, get: function() { - return delay_js_1.calculateRetryDelay; - } }); - var random_js_1 = require_random(); - Object.defineProperty(exports2, "getRandomIntegerInclusive", { enumerable: true, get: function() { - return random_js_1.getRandomIntegerInclusive; - } }); - var object_js_1 = require_object(); - Object.defineProperty(exports2, "isObject", { enumerable: true, get: function() { - return object_js_1.isObject; - } }); - var error_js_1 = require_error(); - Object.defineProperty(exports2, "isError", { enumerable: true, get: function() { - return error_js_1.isError; - } }); - var sha256_js_1 = require_sha256(); - Object.defineProperty(exports2, "computeSha256Hash", { enumerable: true, get: function() { - return sha256_js_1.computeSha256Hash; - } }); - Object.defineProperty(exports2, "computeSha256Hmac", { enumerable: true, get: function() { - return sha256_js_1.computeSha256Hmac; - } }); - var uuidUtils_js_1 = require_uuidUtils(); - Object.defineProperty(exports2, "randomUUID", { enumerable: true, get: function() { - return uuidUtils_js_1.randomUUID; - } }); - var checkEnvironment_js_1 = require_checkEnvironment(); - Object.defineProperty(exports2, "isBrowser", { enumerable: true, get: function() { - return checkEnvironment_js_1.isBrowser; - } }); - Object.defineProperty(exports2, "isBun", { enumerable: true, get: function() { - return checkEnvironment_js_1.isBun; - } }); - Object.defineProperty(exports2, "isNodeLike", { enumerable: true, get: function() { - return checkEnvironment_js_1.isNodeLike; - } }); - Object.defineProperty(exports2, "isNodeRuntime", { enumerable: true, get: function() { - return checkEnvironment_js_1.isNodeRuntime; - } }); - Object.defineProperty(exports2, "isDeno", { enumerable: true, get: function() { - return checkEnvironment_js_1.isDeno; - } }); - Object.defineProperty(exports2, "isReactNative", { enumerable: true, get: function() { - return checkEnvironment_js_1.isReactNative; - } }); - Object.defineProperty(exports2, "isWebWorker", { enumerable: true, get: function() { - return checkEnvironment_js_1.isWebWorker; - } }); - var bytesEncoding_js_1 = require_bytesEncoding(); - Object.defineProperty(exports2, "stringToUint8Array", { enumerable: true, get: function() { - return bytesEncoding_js_1.stringToUint8Array; - } }); - Object.defineProperty(exports2, "uint8ArrayToString", { enumerable: true, get: function() { - return bytesEncoding_js_1.uint8ArrayToString; - } }); - var sanitizer_js_1 = require_sanitizer(); - Object.defineProperty(exports2, "Sanitizer", { enumerable: true, get: function() { - return sanitizer_js_1.Sanitizer; - } }); - } -}); - -// node_modules/@azure/core-util/dist/commonjs/aborterUtils.js -var require_aborterUtils = __commonJS({ - "node_modules/@azure/core-util/dist/commonjs/aborterUtils.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js +var require_random = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.cancelablePromiseRace = cancelablePromiseRace; - async function cancelablePromiseRace(abortablePromiseBuilders, options) { - const aborter = new AbortController(); - function abortHandler() { - aborter.abort(); - } - options?.abortSignal?.addEventListener("abort", abortHandler); - try { - return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); - } finally { - aborter.abort(); - options?.abortSignal?.removeEventListener("abort", abortHandler); - } + exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; + function getRandomIntegerInclusive(min, max) { + min = Math.ceil(min); + max = Math.floor(max); + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; } } }); -// node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js -var require_AbortError2 = __commonJS({ - "node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError = class extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } - }; - exports2.AbortError = AbortError; - } -}); - -// node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js -var require_commonjs3 = __commonJS({ - "node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js +var require_delay = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError_js_1 = require_AbortError2(); - Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { - return AbortError_js_1.AbortError; - } }); + exports2.calculateRetryDelay = calculateRetryDelay; + var random_js_1 = require_random(); + function calculateRetryDelay(retryAttempt, config) { + const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); + const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); + const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); + return { retryAfterInMs }; + } } }); -// node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js -var require_createAbortablePromise = __commonJS({ - "node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js +var require_helpers2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createAbortablePromise = createAbortablePromise; - var abort_controller_1 = require_commonjs3(); - function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + exports2.delay = delay2; + exports2.parseHeaderValueAsNumber = parseHeaderValueAsNumber; + var AbortError_js_1 = require_AbortError(); + var StandardAbortMessage = "The operation was aborted."; + function delay2(delayInMs, value, options) { return new Promise((resolve6, reject) => { - function rejectOnAbort() { - reject(new abort_controller_1.AbortError(abortErrorMsg ?? "The operation was aborted.")); - } - function removeListeners() { - abortSignal?.removeEventListener("abort", onAbort); - } - function onAbort() { - cleanupBeforeAbort?.(); + let timer = void 0; + let onAborted = void 0; + const rejectOnAbort = () => { + return reject(new AbortError_js_1.AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if (options?.abortSignal && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } removeListeners(); - rejectOnAbort(); - } - if (abortSignal?.aborted) { + return rejectOnAbort(); + }; + if (options?.abortSignal && options.abortSignal.aborted) { return rejectOnAbort(); } - try { - buildPromise((x) => { - removeListeners(); - resolve6(x); - }, (x) => { - removeListeners(); - reject(x); - }); - } catch (err) { - reject(err); + timer = setTimeout(() => { + removeListeners(); + resolve6(value); + }, delayInMs); + if (options?.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); } - abortSignal?.addEventListener("abort", onAbort); }); } + function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; + } } }); -// node_modules/@azure/core-util/dist/commonjs/delay.js -var require_delay2 = __commonJS({ - "node_modules/@azure/core-util/dist/commonjs/delay.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js +var require_throttlingRetryStrategy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.delay = delay2; - exports2.calculateRetryDelay = calculateRetryDelay; - var createAbortablePromise_js_1 = require_createAbortablePromise(); - var util_1 = require_internal3(); - var StandardAbortMessage = "The delay was aborted."; - function delay2(timeInMs, options) { - let token; - const { abortSignal, abortErrorMsg } = options ?? {}; - return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve6) => { - token = setTimeout(resolve6, timeInMs); - }, { - cleanupBeforeAbort: () => clearTimeout(token), - abortSignal, - abortErrorMsg: abortErrorMsg ?? StandardAbortMessage - }); - } - function calculateRetryDelay(retryAttempt, config) { - const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); - const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); - const retryAfterInMs = clampedDelay / 2 + (0, util_1.getRandomIntegerInclusive)(0, clampedDelay / 2); - return { retryAfterInMs }; - } - } -}); - -// node_modules/@azure/core-util/dist/commonjs/error.js -var require_error2 = __commonJS({ - "node_modules/@azure/core-util/dist/commonjs/error.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getErrorMessage = getErrorMessage2; - var util_1 = require_internal3(); - function getErrorMessage2(e) { - if ((0, util_1.isError)(e)) { - return e.message; - } else { - let stringified; - try { - if (typeof e === "object" && e) { - stringified = JSON.stringify(e); - } else { - stringified = String(e); + exports2.isThrottlingRetryResponse = isThrottlingRetryResponse; + exports2.throttlingRetryStrategy = throttlingRetryStrategy; + var helpers_js_1 = require_helpers2(); + var RetryAfterHeader = "Retry-After"; + var AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; + function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return void 0; + try { + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + const multiplyingFactor = header === RetryAfterHeader ? 1e3 : 1; + return retryAfterValue * multiplyingFactor; } - } catch (err) { - stringified = "[unable to stringify input]"; } - return `Unknown error ${stringified}`; + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + return Number.isFinite(diff) ? Math.max(0, diff) : void 0; + } catch { + return void 0; } } - } -}); - -// node_modules/@azure/core-util/dist/commonjs/typeGuards.js -var require_typeGuards2 = __commonJS({ - "node_modules/@azure/core-util/dist/commonjs/typeGuards.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isDefined = isDefined3; - exports2.isObjectWithProperties = isObjectWithProperties; - exports2.objectHasProperty = objectHasProperty; - function isDefined3(thing) { - return typeof thing !== "undefined" && thing !== null; + function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); } - function isObjectWithProperties(thing, properties) { - if (!isDefined3(thing) || typeof thing !== "object") { - return false; - } - for (const property of properties) { - if (!objectHasProperty(thing, property)) { - return false; + function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs + }; } - } - return true; - } - function objectHasProperty(thing, property) { - return isDefined3(thing) && typeof thing === "object" && property in thing; - } - } -}); - -// node_modules/@azure/core-util/dist/commonjs/index.js -var require_commonjs4 = __commonJS({ - "node_modules/@azure/core-util/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isWebWorker = exports2.isReactNative = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isNode = exports2.isDeno = exports2.isBun = exports2.isBrowser = exports2.objectHasProperty = exports2.isObjectWithProperties = exports2.isDefined = exports2.getErrorMessage = exports2.delay = exports2.createAbortablePromise = exports2.cancelablePromiseRace = void 0; - exports2.calculateRetryDelay = calculateRetryDelay; - exports2.computeSha256Hash = computeSha256Hash; - exports2.computeSha256Hmac = computeSha256Hmac; - exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; - exports2.isError = isError; - exports2.isObject = isObject2; - exports2.randomUUID = randomUUID2; - exports2.uint8ArrayToString = uint8ArrayToString; - exports2.stringToUint8Array = stringToUint8Array; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var tspRuntime = tslib_1.__importStar(require_internal3()); - var aborterUtils_js_1 = require_aborterUtils(); - Object.defineProperty(exports2, "cancelablePromiseRace", { enumerable: true, get: function() { - return aborterUtils_js_1.cancelablePromiseRace; - } }); - var createAbortablePromise_js_1 = require_createAbortablePromise(); - Object.defineProperty(exports2, "createAbortablePromise", { enumerable: true, get: function() { - return createAbortablePromise_js_1.createAbortablePromise; - } }); - var delay_js_1 = require_delay2(); - Object.defineProperty(exports2, "delay", { enumerable: true, get: function() { - return delay_js_1.delay; - } }); - var error_js_1 = require_error2(); - Object.defineProperty(exports2, "getErrorMessage", { enumerable: true, get: function() { - return error_js_1.getErrorMessage; - } }); - var typeGuards_js_1 = require_typeGuards2(); - Object.defineProperty(exports2, "isDefined", { enumerable: true, get: function() { - return typeGuards_js_1.isDefined; - } }); - Object.defineProperty(exports2, "isObjectWithProperties", { enumerable: true, get: function() { - return typeGuards_js_1.isObjectWithProperties; - } }); - Object.defineProperty(exports2, "objectHasProperty", { enumerable: true, get: function() { - return typeGuards_js_1.objectHasProperty; - } }); - function calculateRetryDelay(retryAttempt, config) { - return tspRuntime.calculateRetryDelay(retryAttempt, config); - } - function computeSha256Hash(content, encoding) { - return tspRuntime.computeSha256Hash(content, encoding); - } - function computeSha256Hmac(key, stringToSign, encoding) { - return tspRuntime.computeSha256Hmac(key, stringToSign, encoding); - } - function getRandomIntegerInclusive(min, max) { - return tspRuntime.getRandomIntegerInclusive(min, max); - } - function isError(e) { - return tspRuntime.isError(e); - } - function isObject2(input) { - return tspRuntime.isObject(input); - } - function randomUUID2() { - return tspRuntime.randomUUID(); - } - exports2.isBrowser = tspRuntime.isBrowser; - exports2.isBun = tspRuntime.isBun; - exports2.isDeno = tspRuntime.isDeno; - exports2.isNode = tspRuntime.isNodeLike; - exports2.isNodeLike = tspRuntime.isNodeLike; - exports2.isNodeRuntime = tspRuntime.isNodeRuntime; - exports2.isReactNative = tspRuntime.isReactNative; - exports2.isWebWorker = tspRuntime.isWebWorker; - function uint8ArrayToString(bytes, format) { - return tspRuntime.uint8ArrayToString(bytes, format); - } - function stringToUint8Array(value, format) { - return tspRuntime.stringToUint8Array(value, format); + }; } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js -var require_file3 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js +var require_exponentialRetryStrategy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.hasRawContent = hasRawContent; - exports2.getRawContent = getRawContent; - exports2.createFileFromStream = createFileFromStream; - exports2.createFile = createFile; - var core_util_1 = require_commonjs4(); - function isNodeReadableStream(x) { - return Boolean(x && typeof x["pipe"] === "function"); - } - var unimplementedMethods = { - arrayBuffer: () => { - throw new Error("Not implemented"); - }, - bytes: () => { - throw new Error("Not implemented"); - }, - slice: () => { - throw new Error("Not implemented"); - }, - text: () => { - throw new Error("Not implemented"); - } - }; - var rawContent = /* @__PURE__ */ Symbol("rawContent"); - function hasRawContent(x) { - return typeof x[rawContent] === "function"; - } - function getRawContent(blob) { - if (hasRawContent(blob)) { - return blob[rawContent](); - } else { - return blob; - } - } - function createFileFromStream(stream2, name, options = {}) { + exports2.exponentialRetryStrategy = exponentialRetryStrategy; + exports2.isExponentialRetryResponse = isExponentialRetryResponse; + exports2.isSystemError = isSystemError; + var delay_js_1 = require_delay(); + var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); + var DEFAULT_CLIENT_RETRY_INTERVAL = 1e3; + var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1e3 * 64; + function exponentialRetryStrategy(options = {}) { + const retryInterval = options.retryDelayInMs ?? DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = options.maxRetryDelayInMs ?? DEFAULT_CLIENT_MAX_RETRY_INTERVAL; return { - ...unimplementedMethods, - type: options.type ?? "", - lastModified: options.lastModified ?? (/* @__PURE__ */ new Date()).getTime(), - webkitRelativePath: options.webkitRelativePath ?? "", - size: options.size ?? -1, - name, - stream: () => { - const s = stream2(); - if (isNodeReadableStream(s)) { - throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; } - return s; - }, - [rawContent]: stream2 + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + return (0, delay_js_1.calculateRetryDelay)(retryCount, { + retryDelayInMs: retryInterval, + maxRetryDelayInMs: maxRetryInterval + }); + } }; } - function createFile(content, name, options = {}) { - if (core_util_1.isNodeLike) { - return { - ...unimplementedMethods, - type: options.type ?? "", - lastModified: options.lastModified ?? (/* @__PURE__ */ new Date()).getTime(), - webkitRelativePath: options.webkitRelativePath ?? "", - size: content.byteLength, - name, - arrayBuffer: async () => content.buffer, - stream: () => new Blob([toArrayBuffer(content)]).stream(), - [rawContent]: () => content - }; - } else { - return new File([toArrayBuffer(content)], name, options); - } + function isExponentialRetryResponse(response) { + return Boolean(response && response.status !== void 0 && (response.status >= 500 || response.status === 408) && response.status !== 501 && response.status !== 505); } - function toArrayBuffer(source) { - if ("resize" in source.buffer) { - return source; + function isSystemError(err) { + if (!err) { + return false; } - return source.map((x) => x); + return err.code === "ETIMEDOUT" || err.code === "ESOCKETTIMEDOUT" || err.code === "ECONNREFUSED" || err.code === "ECONNRESET" || err.code === "ENOENT" || err.code === "ENOTFOUND"; } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js -var require_multipartPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js +var require_retryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.multipartPolicyName = void 0; - exports2.multipartPolicy = multipartPolicy; - var policies_1 = require_internal2(); - var file_js_1 = require_file3(); - exports2.multipartPolicyName = policies_1.multipartPolicyName; - function multipartPolicy() { - const tspPolicy = (0, policies_1.multipartPolicy)(); + exports2.retryPolicy = retryPolicy; + var helpers_js_1 = require_helpers2(); + var AbortError_js_1 = require_AbortError(); + var logger_js_1 = require_logger(); + var constants_js_1 = require_constants13(); + var retryPolicyLogger = (0, logger_js_1.createClientLogger)("ts-http-runtime retryPolicy"); + var retryPolicyName = "retryPolicy"; + function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; return { - name: exports2.multipartPolicyName, - sendRequest: async (request2, next) => { - if (request2.multipartBody) { - for (const part of request2.multipartBody.parts) { - if ((0, file_js_1.hasRawContent)(part.body)) { - part.body = (0, file_js_1.getRawContent)(part.body); + name: retryPolicyName, + async sendRequest(request2, next) { + let response; + let responseError; + let retryCount = -1; + retryRequest: while (true) { + retryCount += 1; + response = void 0; + responseError = void 0; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request2.requestId); + response = await next(request2); + logger.info(`Retry ${retryCount}: Received a response from request`, request2.requestId); + } catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request2.requestId); + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if (request2.abortSignal?.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new AbortError_js_1.AbortError(); + throw abortError; + } + if (retryCount >= (options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } else if (response) { + return response; + } else { + throw new Error("Maximum retries reached with no response or error to throw"); } } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || logger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await (0, helpers_js_1.delay)(retryAfterInMs, void 0, { abortSignal: request2.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request2.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } } - return tspPolicy.sendRequest(request2, next); } }; } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js -var require_decompressResponsePolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.decompressResponsePolicyName = void 0; - exports2.decompressResponsePolicy = decompressResponsePolicy; - var policies_1 = require_internal2(); - exports2.decompressResponsePolicyName = policies_1.decompressResponsePolicyName; - function decompressResponsePolicy() { - return (0, policies_1.decompressResponsePolicy)(); - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js -var require_defaultRetryPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js +var require_defaultRetryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.defaultRetryPolicyName = void 0; exports2.defaultRetryPolicy = defaultRetryPolicy; - var policies_1 = require_internal2(); - exports2.defaultRetryPolicyName = policies_1.defaultRetryPolicyName; + var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); + var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); + var retryPolicy_js_1 = require_retryPolicy(); + var constants_js_1 = require_constants13(); + exports2.defaultRetryPolicyName = "defaultRetryPolicy"; function defaultRetryPolicy(options = {}) { - return (0, policies_1.defaultRetryPolicy)(options); + return { + name: exports2.defaultRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { + maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT + }).sendRequest + }; } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js -var require_formDataPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js +var require_checkEnvironment = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.formDataPolicyName = void 0; - exports2.formDataPolicy = formDataPolicy; - var policies_1 = require_internal2(); - exports2.formDataPolicyName = policies_1.formDataPolicyName; - function formDataPolicy() { - return (0, policies_1.formDataPolicy)(); - } + exports2.isReactNative = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isBun = exports2.isDeno = exports2.isWebWorker = exports2.isBrowser = void 0; + exports2.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; + exports2.isWebWorker = typeof self === "object" && typeof self?.importScripts === "function" && (self.constructor?.name === "DedicatedWorkerGlobalScope" || self.constructor?.name === "ServiceWorkerGlobalScope" || self.constructor?.name === "SharedWorkerGlobalScope"); + exports2.isDeno = typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; + exports2.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; + exports2.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && Boolean(globalThis.process.versions?.node); + exports2.isNodeRuntime = exports2.isNodeLike && !exports2.isBun && !exports2.isDeno; + exports2.isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js -var require_proxyPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js +var require_formDataPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.proxyPolicyName = void 0; - exports2.getDefaultProxySettings = getDefaultProxySettings; - exports2.proxyPolicy = proxyPolicy; - var policies_1 = require_internal2(); - exports2.proxyPolicyName = policies_1.proxyPolicyName; - function getDefaultProxySettings(proxyUrl) { - return (0, policies_1.getDefaultProxySettings)(proxyUrl); - } - function proxyPolicy(proxySettings, options) { - return (0, policies_1.proxyPolicy)(proxySettings, options); + exports2.formDataPolicyName = void 0; + exports2.formDataPolicy = formDataPolicy; + var bytesEncoding_js_1 = require_bytesEncoding(); + var checkEnvironment_js_1 = require_checkEnvironment(); + var httpHeaders_js_1 = require_httpHeaders(); + exports2.formDataPolicyName = "formDataPolicy"; + function formDataToFormDataMap(formData) { + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + formDataMap[key] ??= []; + formDataMap[key].push(value); + } + return formDataMap; } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js -var require_setClientRequestIdPolicy = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.setClientRequestIdPolicyName = void 0; - exports2.setClientRequestIdPolicy = setClientRequestIdPolicy; - exports2.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; - function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + function formDataPolicy() { return { - name: exports2.setClientRequestIdPolicyName, + name: exports2.formDataPolicyName, async sendRequest(request2, next) { - if (!request2.headers.has(requestIdHeaderName)) { - request2.headers.set(requestIdHeaderName, request2.requestId); + if (checkEnvironment_js_1.isNodeLike && typeof FormData !== "undefined" && request2.body instanceof FormData) { + request2.formData = formDataToFormDataMap(request2.body); + request2.body = void 0; + } + if (request2.formData) { + const contentType = request2.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request2.body = wwwFormUrlEncode(request2.formData); + } else { + await prepareFormData(request2.formData, request2); + } + request2.formData = void 0; } return next(request2); } }; } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/agentPolicy.js -var require_agentPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/agentPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.agentPolicyName = void 0; - exports2.agentPolicy = agentPolicy; - var policies_1 = require_internal2(); - exports2.agentPolicyName = policies_1.agentPolicyName; - function agentPolicy(agent) { - return (0, policies_1.agentPolicy)(agent); + function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } else { + urlSearchParams.append(key, value.toString()); + } + } + return urlSearchParams.toString(); } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js -var require_tlsPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.tlsPolicyName = void 0; - exports2.tlsPolicy = tlsPolicy; - var policies_1 = require_internal2(); - exports2.tlsPolicyName = policies_1.tlsPolicyName; - function tlsPolicy(tlsSettings) { - return (0, policies_1.tlsPolicy)(tlsSettings); + async function prepareFormData(formData, request2) { + const contentType = request2.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + return; + } + request2.headers.set("Content-Type", contentType ?? "multipart/form-data"); + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: (0, httpHeaders_js_1.createHttpHeaders)({ + "Content-Disposition": `form-data; name="${fieldName}"` + }), + body: (0, bytesEncoding_js_1.stringToUint8Array)(value, "utf-8") + }); + } else if (value === void 0 || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } else { + const fileName = value.name || "blob"; + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value + }); + } + } + } + request2.multipartBody = { parts }; } } }); -// node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js -var require_tracingContext = __commonJS({ - "node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TracingContextImpl = exports2.knownContextKeys = void 0; - exports2.createTracingContext = createTracingContext; - exports2.knownContextKeys = { - span: /* @__PURE__ */ Symbol.for("@azure/core-tracing span"), - namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") +// node_modules/ms/index.js +var require_ms = __commonJS({ + "node_modules/ms/index.js"(exports2, module2) { + var s = 1e3; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type2 = typeof val; + if (type2 === "string" && val.length > 0) { + return parse2(val); + } else if (type2 === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) + ); }; - function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); - if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + function parse2(str2) { + str2 = String(str2); + if (str2.length > 100) { + return; } - if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str2 + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type2 = (match[2] || "ms").toLowerCase(); + switch (type2) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; } - return context2; } - var TracingContextImpl = class _TracingContextImpl { - _contextMap; - constructor(initialContext) { - this._contextMap = initialContext instanceof _TracingContextImpl ? new Map(initialContext._contextMap) : /* @__PURE__ */ new Map(); + function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + "d"; } - setValue(key, value) { - const newContext = new _TracingContextImpl(this); - newContext._contextMap.set(key, value); - return newContext; + if (msAbs >= h) { + return Math.round(ms / h) + "h"; } - getValue(key) { - return this._contextMap.get(key); + if (msAbs >= m) { + return Math.round(ms / m) + "m"; } - deleteValue(key) { - const newContext = new _TracingContextImpl(this); - newContext._contextMap.delete(key); - return newContext; + if (msAbs >= s) { + return Math.round(ms / s) + "s"; } - }; - exports2.TracingContextImpl = TracingContextImpl; - } -}); - -// node_modules/@azure/core-tracing/dist/commonjs/state.js -var require_state = __commonJS({ - "node_modules/@azure/core-tracing/dist/commonjs/state.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.state = void 0; - exports2.state = { - instrumenterImplementation: void 0 - }; - } -}); - -// node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js -var require_instrumenter = __commonJS({ - "node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createDefaultTracingSpan = createDefaultTracingSpan; - exports2.createDefaultInstrumenter = createDefaultInstrumenter; - exports2.useInstrumenter = useInstrumenter; - exports2.getInstrumenter = getInstrumenter; - var tracingContext_js_1 = require_tracingContext(); - var state_js_1 = require_state(); - function createDefaultTracingSpan() { - return { - end: () => { - }, - isRecording: () => false, - recordException: () => { - }, - setAttribute: () => { - }, - setStatus: () => { - }, - addEvent: () => { - } - }; - } - function createDefaultInstrumenter() { - return { - createRequestHeaders: () => { - return {}; - }, - parseTraceparentHeader: () => { - return void 0; - }, - startSpan: (_name, spanOptions) => { - return { - span: createDefaultTracingSpan(), - tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }) - }; - }, - withContext(_context, callback, ...callbackArgs) { - return callback(...callbackArgs); - } - }; - } - function useInstrumenter(instrumenter) { - state_js_1.state.instrumenterImplementation = instrumenter; + return ms + "ms"; } - function getInstrumenter() { - if (!state_js_1.state.instrumenterImplementation) { - state_js_1.state.instrumenterImplementation = createDefaultInstrumenter(); + function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, "day"); } - return state_js_1.state.instrumenterImplementation; + if (msAbs >= h) { + return plural(ms, msAbs, h, "hour"); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, "minute"); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, "second"); + } + return ms + " ms"; + } + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); } } }); -// node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js -var require_tracingClient = __commonJS({ - "node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createTracingClient = createTracingClient; - var instrumenter_js_1 = require_instrumenter(); - var tracingContext_js_1 = require_tracingContext(); - function createTracingClient(options) { - const { namespace, packageName, packageVersion } = options; - function startSpan(name, operationOptions, spanOptions) { - const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, { - ...spanOptions, - packageName, - packageVersion, - tracingContext: operationOptions?.tracingOptions?.tracingContext - }); - let tracingContext = startSpanResult.tracingContext; - const span = startSpanResult.span; - if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) { - tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); +// node_modules/debug/src/common.js +var require_common = __commonJS({ + "node_modules/debug/src/common.js"(exports2, module2) { + function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce3; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require_ms(); + createDebug.destroy = destroy; + Object.keys(env).forEach((key) => { + createDebug[key] = env[key]; + }); + createDebug.names = []; + createDebug.skips = []; + createDebug.formatters = {}; + function selectColor(namespace) { + let hash2 = 0; + for (let i = 0; i < namespace.length; i++) { + hash2 = (hash2 << 5) - hash2 + namespace.charCodeAt(i); + hash2 |= 0; } - span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); - const updatedOptions = Object.assign({}, operationOptions, { - tracingOptions: { ...operationOptions?.tracingOptions, tracingContext } + return createDebug.colors[Math.abs(hash2) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + function debug6(...args) { + if (!debug6.enabled) { + return; + } + const self2 = debug6; + const curr = Number(/* @__PURE__ */ new Date()); + const ms = curr - (prevTime || curr); + self2.diff = ms; + self2.prev = prevTime; + self2.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + if (typeof args[0] !== "string") { + args.unshift("%O"); + } + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + if (match === "%%") { + return "%"; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === "function") { + const val = args[index]; + match = formatter.call(self2, val); + args.splice(index, 1); + index--; + } + return match; + }); + createDebug.formatArgs.call(self2, args); + const logFn = self2.log || createDebug.log; + logFn.apply(self2, args); + } + debug6.namespace = namespace; + debug6.useColors = createDebug.useColors(); + debug6.color = createDebug.selectColor(namespace); + debug6.extend = extend3; + debug6.destroy = createDebug.destroy; + Object.defineProperty(debug6, "enabled", { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + return enabledCache; + }, + set: (v) => { + enableOverride = v; + } }); - return { - span, - updatedOptions - }; + if (typeof createDebug.init === "function") { + createDebug.init(debug6); + } + return debug6; } - async function withSpan(name, operationOptions, callback, spanOptions) { - const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); - try { - const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); - span.setStatus({ status: "success" }); - return result; - } catch (err) { - span.setStatus({ status: "error", error: err }); - throw err; - } finally { - span.end(); + function extend3(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + createDebug.names = []; + createDebug.skips = []; + const split = (typeof namespaces === "string" ? namespaces : "").trim().replace(/\s+/g, ",").split(",").filter(Boolean); + for (const ns of split) { + if (ns[0] === "-") { + createDebug.skips.push(ns.slice(1)); + } else { + createDebug.names.push(ns); + } } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function matchesTemplate(search, template) { + let searchIndex = 0; + let templateIndex = 0; + let starIndex = -1; + let matchIndex = 0; + while (searchIndex < search.length) { + if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === "*")) { + if (template[templateIndex] === "*") { + starIndex = templateIndex; + matchIndex = searchIndex; + templateIndex++; + } else { + searchIndex++; + templateIndex++; + } + } else if (starIndex !== -1) { + templateIndex = starIndex + 1; + matchIndex++; + searchIndex = matchIndex; + } else { + return false; + } + } + while (templateIndex < template.length && template[templateIndex] === "*") { + templateIndex++; + } + return templateIndex === template.length; } - function parseTraceparentHeader(traceparentHeader) { - return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); + function disable() { + const namespaces = [ + ...createDebug.names, + ...createDebug.skips.map((namespace) => "-" + namespace) + ].join(","); + createDebug.enable(""); + return namespaces; } - function createRequestHeaders(tracingContext) { - return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext); + function enabled(name) { + for (const skip of createDebug.skips) { + if (matchesTemplate(name, skip)) { + return false; + } + } + for (const ns of createDebug.names) { + if (matchesTemplate(name, ns)) { + return true; + } + } + return false; } - return { - startSpan, - withSpan, - withContext, - parseTraceparentHeader, - createRequestHeaders - }; + function coerce3(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + function destroy() { + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + } + createDebug.enable(createDebug.load()); + return createDebug; } + module2.exports = setup; } }); -// node_modules/@azure/core-tracing/dist/commonjs/index.js -var require_commonjs5 = __commonJS({ - "node_modules/@azure/core-tracing/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createTracingClient = exports2.useInstrumenter = void 0; - var instrumenter_js_1 = require_instrumenter(); - Object.defineProperty(exports2, "useInstrumenter", { enumerable: true, get: function() { - return instrumenter_js_1.useInstrumenter; - } }); - var tracingClient_js_1 = require_tracingClient(); - Object.defineProperty(exports2, "createTracingClient", { enumerable: true, get: function() { - return tracingClient_js_1.createTracingClient; - } }); - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js -var require_restError3 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RestError = void 0; - exports2.isRestError = isRestError; - var ts_http_runtime_1 = require_commonjs(); - exports2.RestError = ts_http_runtime_1.RestError; - function isRestError(e) { - return (0, ts_http_runtime_1.isRestError)(e); - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js -var require_tracingPolicy = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.tracingPolicyName = void 0; - exports2.tracingPolicy = tracingPolicy; - var core_tracing_1 = require_commonjs5(); - var constants_js_1 = require_constants14(); - var userAgent_js_1 = require_userAgent2(); - var log_js_1 = require_log3(); - var core_util_1 = require_commonjs4(); - var restError_js_1 = require_restError3(); - var util_1 = require_internal3(); - exports2.tracingPolicyName = "tracingPolicy"; - function tracingPolicy(options = {}) { - const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); - const sanitizer = new util_1.Sanitizer({ - additionalAllowedQueryParameters: options.additionalAllowedQueryParameters - }); - const tracingClient = tryCreateTracingClient(); - return { - name: exports2.tracingPolicyName, - async sendRequest(request2, next) { - if (!tracingClient) { - return next(request2); - } - const userAgent2 = await userAgentPromise; - const spanAttributes = { - "http.url": sanitizer.sanitizeUrl(request2.url), - "http.method": request2.method, - "http.user_agent": userAgent2, - requestId: request2.requestId - }; - if (userAgent2) { - spanAttributes["http.user_agent"] = userAgent2; - } - const { span, tracingContext } = tryCreateSpan(tracingClient, request2, spanAttributes) ?? {}; - if (!span || !tracingContext) { - return next(request2); - } - try { - const response = await tracingClient.withContext(tracingContext, next, request2); - tryProcessResponse(span, response); - return response; - } catch (err) { - tryProcessError(span, err); - throw err; - } +// node_modules/debug/src/browser.js +var require_browser = __commonJS({ + "node_modules/debug/src/browser.js"(exports2, module2) { + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load2; + exports2.useColors = useColors; + exports2.storage = localstorage(); + exports2.destroy = /* @__PURE__ */ (() => { + let warned = false; + return () => { + if (!warned) { + warned = true; + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } }; + })(); + exports2.colors = [ + "#0000CC", + "#0000FF", + "#0033CC", + "#0033FF", + "#0066CC", + "#0066FF", + "#0099CC", + "#0099FF", + "#00CC00", + "#00CC33", + "#00CC66", + "#00CC99", + "#00CCCC", + "#00CCFF", + "#3300CC", + "#3300FF", + "#3333CC", + "#3333FF", + "#3366CC", + "#3366FF", + "#3399CC", + "#3399FF", + "#33CC00", + "#33CC33", + "#33CC66", + "#33CC99", + "#33CCCC", + "#33CCFF", + "#6600CC", + "#6600FF", + "#6633CC", + "#6633FF", + "#66CC00", + "#66CC33", + "#9900CC", + "#9900FF", + "#9933CC", + "#9933FF", + "#99CC00", + "#99CC33", + "#CC0000", + "#CC0033", + "#CC0066", + "#CC0099", + "#CC00CC", + "#CC00FF", + "#CC3300", + "#CC3333", + "#CC3366", + "#CC3399", + "#CC33CC", + "#CC33FF", + "#CC6600", + "#CC6633", + "#CC9900", + "#CC9933", + "#CCCC00", + "#CCCC33", + "#FF0000", + "#FF0033", + "#FF0066", + "#FF0099", + "#FF00CC", + "#FF00FF", + "#FF3300", + "#FF3333", + "#FF3366", + "#FF3399", + "#FF33CC", + "#FF33FF", + "#FF6600", + "#FF6633", + "#FF9900", + "#FF9933", + "#FFCC00", + "#FFCC33" + ]; + function useColors() { + if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { + return true; + } + if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + let m; + return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== "undefined" && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); } - function tryCreateTracingClient() { - try { - return (0, core_tracing_1.createTracingClient)({ - namespace: "", - packageName: "@azure/core-rest-pipeline", - packageVersion: constants_js_1.SDK_VERSION - }); - } catch (e) { - log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`); - return void 0; + function formatArgs(args) { + args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); + if (!this.useColors) { + return; } + const c = "color: " + this.color; + args.splice(1, 0, c, "color: inherit"); + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, (match) => { + if (match === "%%") { + return; + } + index++; + if (match === "%c") { + lastC = index; + } + }); + args.splice(lastC, 0, c); } - function tryCreateSpan(tracingClient, request2, spanAttributes) { + exports2.log = console.debug || console.log || (() => { + }); + function save(namespaces) { try { - const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request2.method}`, { tracingOptions: request2.tracingOptions }, { - spanKind: "client", - spanAttributes - }); - if (!span.isRecording()) { - span.end(); - return void 0; - } - const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); - for (const [key, value] of Object.entries(headers)) { - request2.headers.set(key, value); + if (namespaces) { + exports2.storage.setItem("debug", namespaces); + } else { + exports2.storage.removeItem("debug"); } - return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; - } catch (e) { - log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); - return void 0; + } catch (error3) { } } - function tryProcessError(span, error3) { + function load2() { + let r; try { - span.setStatus({ - status: "error", - error: (0, core_util_1.isError)(error3) ? error3 : void 0 - }); - if ((0, restError_js_1.isRestError)(error3) && error3.statusCode) { - span.setAttribute("http.status_code", error3.statusCode); - } - span.end(); - } catch (e) { - log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + r = exports2.storage.getItem("debug") || exports2.storage.getItem("DEBUG"); + } catch (error3) { + } + if (!r && typeof process !== "undefined" && "env" in process) { + r = process.env.DEBUG; } + return r; } - function tryProcessResponse(span, response) { + function localstorage() { try { - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - if (response.status >= 400) { - span.setStatus({ - status: "error" - }); - } - span.end(); - } catch (e) { - log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + return localStorage; + } catch (error3) { } } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (error3) { + return "[UnexpectedJSONParseError]: " + error3.message; + } + }; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/wrapAbortSignal.js -var require_wrapAbortSignal = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/wrapAbortSignal.js"(exports2) { +// node_modules/has-flag/index.js +var require_has_flag = __commonJS({ + "node_modules/has-flag/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.wrapAbortSignalLike = wrapAbortSignalLike; - function wrapAbortSignalLike(abortSignalLike) { - if (abortSignalLike instanceof AbortSignal) { - return { abortSignal: abortSignalLike }; - } - if (abortSignalLike.aborted) { - return { abortSignal: AbortSignal.abort(abortSignalLike.reason) }; - } - const controller = new AbortController(); - let needsCleanup = true; - function cleanup() { - if (needsCleanup) { - abortSignalLike.removeEventListener("abort", listener); - needsCleanup = false; - } - } - function listener() { - controller.abort(abortSignalLike.reason); - cleanup(); - } - abortSignalLike.addEventListener("abort", listener); - return { abortSignal: controller.signal, cleanup }; - } + module2.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf("--"); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); + }; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/wrapAbortSignalLikePolicy.js -var require_wrapAbortSignalLikePolicy = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/wrapAbortSignalLikePolicy.js"(exports2) { +// node_modules/supports-color/index.js +var require_supports_color = __commonJS({ + "node_modules/supports-color/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.wrapAbortSignalLikePolicyName = void 0; - exports2.wrapAbortSignalLikePolicy = wrapAbortSignalLikePolicy; - var wrapAbortSignal_js_1 = require_wrapAbortSignal(); - exports2.wrapAbortSignalLikePolicyName = "wrapAbortSignalLikePolicy"; - function wrapAbortSignalLikePolicy() { + var os3 = require("os"); + var tty = require("tty"); + var hasFlag = require_has_flag(); + var { env } = process; + var forceColor; + if (hasFlag("no-color") || hasFlag("no-colors") || hasFlag("color=false") || hasFlag("color=never")) { + forceColor = 0; + } else if (hasFlag("color") || hasFlag("colors") || hasFlag("color=true") || hasFlag("color=always")) { + forceColor = 1; + } + if ("FORCE_COLOR" in env) { + if (env.FORCE_COLOR === "true") { + forceColor = 1; + } else if (env.FORCE_COLOR === "false") { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } + } + function translateLevel(level) { + if (level === 0) { + return false; + } return { - name: exports2.wrapAbortSignalLikePolicyName, - sendRequest: async (request2, next) => { - if (!request2.abortSignal) { - return next(request2); - } - const { abortSignal, cleanup } = (0, wrapAbortSignal_js_1.wrapAbortSignalLike)(request2.abortSignal); - request2.abortSignal = abortSignal; - try { - return await next(request2); - } finally { - cleanup?.(); - } - } + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 }; } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js -var require_createPipelineFromOptions2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createPipelineFromOptions = createPipelineFromOptions; - var logPolicy_js_1 = require_logPolicy2(); - var pipeline_js_1 = require_pipeline2(); - var redirectPolicy_js_1 = require_redirectPolicy2(); - var userAgentPolicy_js_1 = require_userAgentPolicy2(); - var multipartPolicy_js_1 = require_multipartPolicy2(); - var decompressResponsePolicy_js_1 = require_decompressResponsePolicy2(); - var defaultRetryPolicy_js_1 = require_defaultRetryPolicy2(); - var formDataPolicy_js_1 = require_formDataPolicy2(); - var core_util_1 = require_commonjs4(); - var proxyPolicy_js_1 = require_proxyPolicy2(); - var setClientRequestIdPolicy_js_1 = require_setClientRequestIdPolicy(); - var agentPolicy_js_1 = require_agentPolicy2(); - var tlsPolicy_js_1 = require_tlsPolicy2(); - var tracingPolicy_js_1 = require_tracingPolicy(); - var wrapAbortSignalLikePolicy_js_1 = require_wrapAbortSignalLikePolicy(); - function createPipelineFromOptions(options) { - const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); - if (core_util_1.isNodeLike) { - if (options.agent) { - pipeline.addPolicy((0, agentPolicy_js_1.agentPolicy)(options.agent)); + function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + if (hasFlag("color=16m") || hasFlag("color=full") || hasFlag("color=truecolor")) { + return 3; + } + if (hasFlag("color=256")) { + return 2; + } + if (haveStream && !streamIsTTY && forceColor === void 0) { + return 0; + } + const min = forceColor || 0; + if (env.TERM === "dumb") { + return min; + } + if (process.platform === "win32") { + const osRelease = os3.release().split("."); + if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; } - if (options.tlsOptions) { - pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); + return 1; + } + if ("CI" in env) { + if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "GITHUB_ACTIONS", "BUILDKITE"].some((sign) => sign in env) || env.CI_NAME === "codeship") { + return 1; } - pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); - pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); + return min; } - pipeline.addPolicy((0, wrapAbortSignalLikePolicy_js_1.wrapAbortSignalLikePolicy)()); - pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); - pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); - pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)(options.telemetryOptions?.clientRequestIdHeaderName)); - pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); - pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); - pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)({ ...options.userAgentOptions, ...options.loggingOptions }), { - afterPhase: "Retry" - }); - if (core_util_1.isNodeLike) { - pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); + if ("TEAMCITY_VERSION" in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; } - pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); - return pipeline; - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js -var require_defaultHttpClient2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createDefaultHttpClient = createDefaultHttpClient; - var ts_http_runtime_1 = require_commonjs(); - var wrapAbortSignal_js_1 = require_wrapAbortSignal(); - function createDefaultHttpClient() { - const client = (0, ts_http_runtime_1.createDefaultHttpClient)(); - return { - async sendRequest(request2) { - const { abortSignal, cleanup } = request2.abortSignal ? (0, wrapAbortSignal_js_1.wrapAbortSignalLike)(request2.abortSignal) : {}; - try { - request2.abortSignal = abortSignal; - return await client.sendRequest(request2); - } finally { - cleanup?.(); - } + if (env.COLORTERM === "truecolor") { + return 3; + } + if ("TERM_PROGRAM" in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || "").split(".")[0], 10); + switch (env.TERM_PROGRAM) { + case "iTerm.app": + return version >= 3 ? 3 : 2; + case "Apple_Terminal": + return 2; } - }; - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js -var require_httpHeaders2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpHeaders = createHttpHeaders; - var ts_http_runtime_1 = require_commonjs(); - function createHttpHeaders(rawHeaders) { - return (0, ts_http_runtime_1.createHttpHeaders)(rawHeaders); - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js -var require_pipelineRequest2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createPipelineRequest = createPipelineRequest; - var ts_http_runtime_1 = require_commonjs(); - function createPipelineRequest(options) { - return (0, ts_http_runtime_1.createPipelineRequest)(options); + } + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + if ("COLORTERM" in env) { + return 1; + } + return min; } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js -var require_exponentialRetryPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.exponentialRetryPolicyName = void 0; - exports2.exponentialRetryPolicy = exponentialRetryPolicy; - var policies_1 = require_internal2(); - exports2.exponentialRetryPolicyName = policies_1.exponentialRetryPolicyName; - function exponentialRetryPolicy(options = {}) { - return (0, policies_1.exponentialRetryPolicy)(options); + function getSupportLevel(stream2) { + const level = supportsColor(stream2, stream2 && stream2.isTTY); + return translateLevel(level); } + module2.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) + }; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js -var require_systemErrorRetryPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.systemErrorRetryPolicyName = void 0; - exports2.systemErrorRetryPolicy = systemErrorRetryPolicy; - var policies_1 = require_internal2(); - exports2.systemErrorRetryPolicyName = policies_1.systemErrorRetryPolicyName; - function systemErrorRetryPolicy(options = {}) { - return (0, policies_1.systemErrorRetryPolicy)(options); +// node_modules/debug/src/node.js +var require_node = __commonJS({ + "node_modules/debug/src/node.js"(exports2, module2) { + var tty = require("tty"); + var util = require("util"); + exports2.init = init; + exports2.log = log; + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load2; + exports2.useColors = useColors; + exports2.destroy = util.deprecate( + () => { + }, + "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." + ); + exports2.colors = [6, 2, 3, 4, 5, 1]; + try { + const supportsColor = require_supports_color(); + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports2.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } + } catch (error3) { + } + exports2.inspectOpts = Object.keys(process.env).filter((key) => { + return /^debug_/i.test(key); + }).reduce((obj, key) => { + const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === "null") { + val = null; + } else { + val = Number(val); + } + obj[prop] = val; + return obj; + }, {}); + function useColors() { + return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty.isatty(process.stderr.fd); + } + function formatArgs(args) { + const { namespace: name, useColors: useColors2 } = this; + if (useColors2) { + const c = this.color; + const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); + const prefix = ` ${colorCode};1m${name} \x1B[0m`; + args[0] = prefix + args[0].split("\n").join("\n" + prefix); + args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name + " " + args[0]; + } + } + function getDate() { + if (exports2.inspectOpts.hideDate) { + return ""; + } + return (/* @__PURE__ */ new Date()).toISOString() + " "; + } + function log(...args) { + return process.stderr.write(util.formatWithOptions(exports2.inspectOpts, ...args) + "\n"); + } + function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + delete process.env.DEBUG; + } + } + function load2() { + return process.env.DEBUG; + } + function init(debug6) { + debug6.inspectOpts = {}; + const keys = Object.keys(exports2.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug6.inspectOpts[keys[i]] = exports2.inspectOpts[keys[i]]; + } } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).split("\n").map((str2) => str2.trim()).join(" "); + }; + formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); + }; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js -var require_throttlingRetryPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.throttlingRetryPolicyName = void 0; - exports2.throttlingRetryPolicy = throttlingRetryPolicy; - var policies_1 = require_internal2(); - exports2.throttlingRetryPolicyName = policies_1.throttlingRetryPolicyName; - function throttlingRetryPolicy(options = {}) { - return (0, policies_1.throttlingRetryPolicy)(options); +// node_modules/debug/src/index.js +var require_src = __commonJS({ + "node_modules/debug/src/index.js"(exports2, module2) { + if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { + module2.exports = require_browser(); + } else { + module2.exports = require_node(); } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js -var require_retryPolicy2 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js"(exports2) { +// node_modules/agent-base/dist/helpers.js +var require_helpers3 = __commonJS({ + "node_modules/agent-base/dist/helpers.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + } + __setModuleDefault2(result, mod); + return result; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.retryPolicy = retryPolicy; - var logger_1 = require_commonjs2(); - var constants_js_1 = require_constants14(); - var policies_1 = require_internal2(); - var retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy"); - function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { - return (0, policies_1.retryPolicy)(strategies, { - logger: retryPolicyLogger, - ...options + exports2.req = exports2.json = exports2.toBuffer = void 0; + var http = __importStar2(require("http")); + var https2 = __importStar2(require("https")); + async function toBuffer(stream2) { + let length = 0; + const chunks = []; + for await (const chunk of stream2) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); + } + exports2.toBuffer = toBuffer; + async function json2(stream2) { + const buf = await toBuffer(stream2); + const str2 = buf.toString("utf8"); + try { + return JSON.parse(str2); + } catch (_err) { + const err = _err; + err.message += ` (input: ${str2})`; + throw err; + } + } + exports2.json = json2; + function req(url2, opts = {}) { + const href = typeof url2 === "string" ? url2 : url2.href; + const req2 = (href.startsWith("https:") ? https2 : http).request(url2, opts); + const promise = new Promise((resolve6, reject) => { + req2.once("response", resolve6).once("error", reject).end(); }); + req2.then = promise.then.bind(promise); + return req2; } + exports2.req = req; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js -var require_tokenCycler = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js"(exports2) { +// node_modules/agent-base/dist/index.js +var require_dist = __commonJS({ + "node_modules/agent-base/dist/index.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DEFAULT_CYCLER_OPTIONS = void 0; - exports2.createTokenCycler = createTokenCycler; - var core_util_1 = require_commonjs4(); - exports2.DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1e3, - // Force waiting for a refresh 1s before the token expires - retryIntervalInMs: 3e3, - // Allow refresh attempts every 3s - refreshWindowInMs: 1e3 * 60 * 2 - // Start refreshing 2m before expiry + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + } + __setModuleDefault2(result, mod); + return result; }; - async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { - async function tryGetAccessToken() { - if (Date.now() < refreshTimeout) { - try { - return await getAccessToken(); - } catch { - return null; + var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding2(exports3, m, p); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Agent = void 0; + var net = __importStar2(require("net")); + var http = __importStar2(require("http")); + var https_1 = require("https"); + __exportStar2(require_helpers3(), exports2); + var INTERNAL = /* @__PURE__ */ Symbol("AgentBaseInternalState"); + var Agent = class extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + if (typeof options.secureEndpoint === "boolean") { + return options.secureEndpoint; } - } else { - const finalToken = await getAccessToken(); - if (finalToken === null) { - throw new Error("Failed to refresh access token."); + if (typeof options.protocol === "string") { + return options.protocol === "https:"; } - return finalToken; } + const { stack } = new Error(); + if (typeof stack !== "string") + return false; + return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1); } - let token = await tryGetAccessToken(); - while (token === null) { - await (0, core_util_1.delay)(retryIntervalInMs); - token = await tryGetAccessToken(); + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + if (!this.sockets[name]) { + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + this.totalSocketCount++; + return fakeSocket; } - return token; - } - function createTokenCycler(credential, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - let tenantId; - const options = { - ...exports2.DEFAULT_CYCLER_OPTIONS, - ...tokenCyclerOptions - }; - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - if (cycler.isRefreshing) { - return false; - } - if (token?.refreshAfterTimestamp && token.refreshAfterTimestamp < Date.now()) { - return true; + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + this.totalSocketCount--; + if (sockets.length === 0) { + delete this.sockets[name]; } - return (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now(); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now(); } - }; - function refresh(scopes, getTokenOptions) { - if (!cycler.isRefreshing) { - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - refreshWorker = beginRefresh( - tryGetAccessToken, - options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - token?.expiresOnTimestamp ?? Date.now() - ).then((_token) => { - refreshWorker = null; - token = _token; - tenantId = getTokenOptions.tenantId; - return token; - }).catch((reason) => { - refreshWorker = null; - token = null; - tenantId = void 0; - throw reason; - }); + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === "boolean" ? options.secureEndpoint : this.isSecureEndpoint(options); + if (secureEndpoint) { + return https_1.Agent.prototype.getName.call(this, options); } - return refreshWorker; + return super.getName(options); } - return async (scopes, tokenOptions) => { - const hasClaimChallenge = Boolean(tokenOptions.claims); - const tenantIdChanged = tenantId !== tokenOptions.tenantId; - if (hasClaimChallenge) { - token = null; + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options) + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve().then(() => this.connect(req, connectOpts)).then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + return socket.addRequest(req, connectOpts); + } catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = void 0; + if (!socket) { + throw new Error("No socket was returned in the `connect()` function"); } - const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh; - if (mustRefresh) { - return refresh(scopes, tokenOptions); + return socket; + } + get defaultPort() { + return this[INTERNAL].defaultPort ?? (this.protocol === "https:" ? 443 : 80); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; } - if (cycler.shouldRefresh) { - refresh(scopes, tokenOptions); + } + get protocol() { + return this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? "https:" : "http:"); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; } - return token; - }; - } + } + }; + exports2.Agent = Agent; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js -var require_bearerTokenAuthenticationPolicy = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js"(exports2) { - "use strict"; +// node_modules/https-proxy-agent/dist/parse-proxy-response.js +var require_parse_proxy_response = __commonJS({ + "node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports2) { + "use strict"; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.bearerTokenAuthenticationPolicyName = void 0; - exports2.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; - exports2.parseChallenges = parseChallenges; - var tokenCycler_js_1 = require_tokenCycler(); - var log_js_1 = require_log3(); - var restError_js_1 = require_restError3(); - exports2.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; - async function trySendRequest(request2, next) { - try { - return [await next(request2), void 0]; - } catch (e) { - if ((0, restError_js_1.isRestError)(e) && e.response) { - return [e.response, e]; - } else { - throw e; + exports2.parseProxyResponse = void 0; + var debug_1 = __importDefault2(require_src()); + var debug6 = (0, debug_1.default)("https-proxy-agent:parse-proxy-response"); + function parseProxyResponse(socket) { + return new Promise((resolve6, reject) => { + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once("readable", read); } - } - } - async function defaultAuthorizeRequest(options) { - const { scopes, getAccessToken, request: request2 } = options; - const getTokenOptions = { - abortSignal: request2.abortSignal, - tracingOptions: request2.tracingOptions, - enableCae: true - }; - const accessToken = await getAccessToken(scopes, getTokenOptions); - if (accessToken) { - options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); - } - } - function isChallengeResponse(response) { - return response.status === 401 && response.headers.has("WWW-Authenticate"); - } - async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) { - const { scopes } = onChallengeOptions; - const accessToken = await onChallengeOptions.getAccessToken(scopes, { - enableCae: true, - claims: caeClaims - }); - if (!accessToken) { - return false; - } - onChallengeOptions.request.headers.set("Authorization", `${accessToken.tokenType ?? "Bearer"} ${accessToken.token}`); - return true; - } - function bearerTokenAuthenticationPolicy(options) { - const { credential, scopes, challengeCallbacks } = options; - const logger = options.logger || log_js_1.logger; - const callbacks = { - authorizeRequest: challengeCallbacks?.authorizeRequest?.bind(challengeCallbacks) ?? defaultAuthorizeRequest, - authorizeRequestOnChallenge: challengeCallbacks?.authorizeRequestOnChallenge?.bind(challengeCallbacks) - }; - const getAccessToken = credential ? (0, tokenCycler_js_1.createTokenCycler)( - credential - /* , options */ - ) : () => Promise.resolve(null); - return { - name: exports2.bearerTokenAuthenticationPolicyName, - /** - * If there's no challenge parameter: - * - It will try to retrieve the token using the cache, or the credential's getToken. - * - Then it will try the next policy with or without the retrieved token. - * - * It uses the challenge parameters to: - * - Skip a first attempt to get the token from the credential if there's no cached token, - * since it expects the token to be retrievable only after the challenge. - * - Prepare the outgoing request if the `prepareRequest` method has been provided. - * - Send an initial request to receive the challenge if it fails. - * - Process a challenge if the response contains it. - * - Retrieve a token with the challenge information, then re-send the request. - */ - async sendRequest(request2, next) { - if (!request2.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + function cleanup() { + socket.removeListener("end", onend); + socket.removeListener("error", onerror); + socket.removeListener("readable", read); + } + function onend() { + cleanup(); + debug6("onend"); + reject(new Error("Proxy connection ended before receiving CONNECT response")); + } + function onerror(err) { + cleanup(); + debug6("onerror %o", err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf("\r\n\r\n"); + if (endOfHeaders === -1) { + debug6("have not received end of HTTP headers yet..."); + read(); + return; } - await callbacks.authorizeRequest({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - request: request2, - getAccessToken, - logger - }); - let response; - let error3; - let shouldSendRequest; - [response, error3] = await trySendRequest(request2, next); - if (isChallengeResponse(response)) { - let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); - if (claims) { - let parsedClaim; - try { - parsedClaim = atob(claims); - } catch (e) { - logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); - return response; - } - shouldSendRequest = await authorizeRequestOnCaeChallenge({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - response, - request: request2, - getAccessToken, - logger - }, parsedClaim); - if (shouldSendRequest) { - [response, error3] = await trySendRequest(request2, next); - } - } else if (callbacks.authorizeRequestOnChallenge) { - shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - request: request2, - response, - getAccessToken, - logger - }); - if (shouldSendRequest) { - [response, error3] = await trySendRequest(request2, next); - } - if (isChallengeResponse(response)) { - claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); - if (claims) { - let parsedClaim; - try { - parsedClaim = atob(claims); - } catch (e) { - logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); - return response; - } - shouldSendRequest = await authorizeRequestOnCaeChallenge({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - response, - request: request2, - getAccessToken, - logger - }, parsedClaim); - if (shouldSendRequest) { - [response, error3] = await trySendRequest(request2, next); - } - } - } - } + const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n"); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error("No header received from proxy CONNECT response")); } - if (error3) { - throw error3; - } else { - return response; + const firstLineParts = firstLine.split(" "); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(" "); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(":"); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === "string") { + headers[key] = [current, value]; + } else if (Array.isArray(current)) { + current.push(value); + } else { + headers[key] = value; + } } + debug6("got proxy server response: %o %o", firstLine, headers); + cleanup(); + resolve6({ + connect: { + statusCode, + statusText, + headers + }, + buffered + }); } - }; - } - function parseChallenges(challenges) { - const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g; - const paramRegex = /(\w+)="([^"]*)"/g; - const parsedChallenges = []; - let match; - while ((match = challengeRegex.exec(challenges)) !== null) { - const scheme = match[1]; - const paramsString = match[2]; - const params = {}; - let paramMatch; - while ((paramMatch = paramRegex.exec(paramsString)) !== null) { - params[paramMatch[1]] = paramMatch[2]; - } - parsedChallenges.push({ scheme, params }); - } - return parsedChallenges; - } - function getCaeChallengeClaims(challenges) { - if (!challenges) { - return; - } - const parsedChallenges = parseChallenges(challenges); - return parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")?.params.claims; + socket.on("error", onerror); + socket.on("end", onend); + read(); + }); } + exports2.parseProxyResponse = parseProxyResponse; } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js -var require_ndJsonPolicy = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js"(exports2) { +// node_modules/https-proxy-agent/dist/index.js +var require_dist2 = __commonJS({ + "node_modules/https-proxy-agent/dist/index.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + } + __setModuleDefault2(result, mod); + return result; + }; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ndJsonPolicyName = void 0; - exports2.ndJsonPolicy = ndJsonPolicy; - exports2.ndJsonPolicyName = "ndJsonPolicy"; - function ndJsonPolicy() { - return { - name: exports2.ndJsonPolicyName, - async sendRequest(request2, next) { - if (typeof request2.body === "string" && request2.body.startsWith("[")) { - const body = JSON.parse(request2.body); - if (Array.isArray(body)) { - request2.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } + exports2.HttpsProxyAgent = void 0; + var net = __importStar2(require("net")); + var tls = __importStar2(require("tls")); + var assert_1 = __importDefault2(require("assert")); + var debug_1 = __importDefault2(require_src()); + var agent_base_1 = require_dist(); + var url_1 = require("url"); + var parse_proxy_response_1 = require_parse_proxy_response(); + var debug6 = (0, debug_1.default)("https-proxy-agent"); + var setServernameFromNonIpHost = (options) => { + if (options.servername === void 0 && options.host && !net.isIP(options.host)) { + return { + ...options, + servername: options.host + }; + } + return options; + }; + var HttpsProxyAgent = class extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: void 0 }; + this.proxy = typeof proxy === "string" ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug6("Creating new HttpsProxyAgent instance: %o", this.proxy.href); + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ""); + const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ["http/1.1"], + ...opts ? omit2(opts, "headers") : null, + host, + port + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + let socket; + if (proxy.protocol === "https:") { + debug6("Creating `tls.Socket`: %o", this.connectOpts); + socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); + } else { + debug6("Creating `net.Socket`: %o", this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r +`; + if (proxy.username || proxy.password) { + const auth2 = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers["Proxy-Authorization"] = `Basic ${Buffer.from(auth2).toString("base64")}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers["Proxy-Connection"]) { + headers["Proxy-Connection"] = this.keepAlive ? "Keep-Alive" : "close"; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r +`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r +`); + const { connect, buffered } = await proxyResponsePromise; + req.emit("proxyConnect", connect); + this.emit("proxyConnect", connect, req); + if (connect.statusCode === 200) { + req.once("socket", resume); + if (opts.secureEndpoint) { + debug6("Upgrading socket connection to TLS"); + return tls.connect({ + ...omit2(setServernameFromNonIpHost(opts), "host", "path", "port"), + socket + }); } - return next(request2); + return socket; } - }; - } - } -}); - -// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js -var require_auxiliaryAuthenticationHeaderPolicy = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.auxiliaryAuthenticationHeaderPolicyName = void 0; - exports2.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; - var tokenCycler_js_1 = require_tokenCycler(); - var log_js_1 = require_log3(); - exports2.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; - var AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; - async function sendAuthorizeRequest(options) { - const { scopes, getAccessToken, request: request2 } = options; - const getTokenOptions = { - abortSignal: request2.abortSignal, - tracingOptions: request2.tracingOptions - }; - return (await getAccessToken(scopes, getTokenOptions))?.token ?? ""; + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + req.once("socket", (s) => { + debug6("Replaying proxy buffer for failed request"); + (0, assert_1.default)(s.listenerCount("data") > 0); + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } + }; + HttpsProxyAgent.protocols = ["http", "https"]; + exports2.HttpsProxyAgent = HttpsProxyAgent; + function resume(socket) { + socket.resume(); } - function auxiliaryAuthenticationHeaderPolicy(options) { - const { credentials, scopes } = options; - const logger = options.logger || log_js_1.logger; - const tokenCyclerMap = /* @__PURE__ */ new WeakMap(); - return { - name: exports2.auxiliaryAuthenticationHeaderPolicyName, - async sendRequest(request2, next) { - if (!request2.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); - } - if (!credentials || credentials.length === 0) { - logger.info(`${exports2.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); - return next(request2); - } - const tokenPromises = []; - for (const credential of credentials) { - let getAccessToken = tokenCyclerMap.get(credential); - if (!getAccessToken) { - getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); - tokenCyclerMap.set(credential, getAccessToken); - } - tokenPromises.push(sendAuthorizeRequest({ - scopes: Array.isArray(scopes) ? scopes : [scopes], - request: request2, - getAccessToken, - logger - })); - } - const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); - if (auxiliaryTokens.length === 0) { - logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); - return next(request2); - } - request2.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); - return next(request2); + function omit2(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; } - }; + } + return ret; } } }); -// node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js -var require_commonjs6 = __commonJS({ - "node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js"(exports2) { +// node_modules/http-proxy-agent/dist/index.js +var require_dist3 = __commonJS({ + "node_modules/http-proxy-agent/dist/index.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + } + __setModuleDefault2(result, mod); + return result; + }; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createFileFromStream = exports2.createFile = exports2.agentPolicyName = exports2.agentPolicy = exports2.auxiliaryAuthenticationHeaderPolicyName = exports2.auxiliaryAuthenticationHeaderPolicy = exports2.ndJsonPolicyName = exports2.ndJsonPolicy = exports2.bearerTokenAuthenticationPolicyName = exports2.bearerTokenAuthenticationPolicy = exports2.formDataPolicyName = exports2.formDataPolicy = exports2.tlsPolicyName = exports2.tlsPolicy = exports2.userAgentPolicyName = exports2.userAgentPolicy = exports2.defaultRetryPolicy = exports2.tracingPolicyName = exports2.tracingPolicy = exports2.retryPolicy = exports2.throttlingRetryPolicyName = exports2.throttlingRetryPolicy = exports2.systemErrorRetryPolicyName = exports2.systemErrorRetryPolicy = exports2.redirectPolicyName = exports2.redirectPolicy = exports2.getDefaultProxySettings = exports2.proxyPolicyName = exports2.proxyPolicy = exports2.multipartPolicyName = exports2.multipartPolicy = exports2.logPolicyName = exports2.logPolicy = exports2.setClientRequestIdPolicyName = exports2.setClientRequestIdPolicy = exports2.exponentialRetryPolicyName = exports2.exponentialRetryPolicy = exports2.decompressResponsePolicyName = exports2.decompressResponsePolicy = exports2.isRestError = exports2.RestError = exports2.createPipelineRequest = exports2.createHttpHeaders = exports2.createDefaultHttpClient = exports2.createPipelineFromOptions = exports2.createEmptyPipeline = void 0; - var pipeline_js_1 = require_pipeline2(); - Object.defineProperty(exports2, "createEmptyPipeline", { enumerable: true, get: function() { - return pipeline_js_1.createEmptyPipeline; - } }); - var createPipelineFromOptions_js_1 = require_createPipelineFromOptions2(); - Object.defineProperty(exports2, "createPipelineFromOptions", { enumerable: true, get: function() { - return createPipelineFromOptions_js_1.createPipelineFromOptions; - } }); - var defaultHttpClient_js_1 = require_defaultHttpClient2(); - Object.defineProperty(exports2, "createDefaultHttpClient", { enumerable: true, get: function() { - return defaultHttpClient_js_1.createDefaultHttpClient; - } }); - var httpHeaders_js_1 = require_httpHeaders2(); - Object.defineProperty(exports2, "createHttpHeaders", { enumerable: true, get: function() { - return httpHeaders_js_1.createHttpHeaders; - } }); - var pipelineRequest_js_1 = require_pipelineRequest2(); - Object.defineProperty(exports2, "createPipelineRequest", { enumerable: true, get: function() { - return pipelineRequest_js_1.createPipelineRequest; - } }); - var restError_js_1 = require_restError3(); - Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { - return restError_js_1.RestError; - } }); - Object.defineProperty(exports2, "isRestError", { enumerable: true, get: function() { - return restError_js_1.isRestError; - } }); - var decompressResponsePolicy_js_1 = require_decompressResponsePolicy2(); - Object.defineProperty(exports2, "decompressResponsePolicy", { enumerable: true, get: function() { - return decompressResponsePolicy_js_1.decompressResponsePolicy; - } }); - Object.defineProperty(exports2, "decompressResponsePolicyName", { enumerable: true, get: function() { - return decompressResponsePolicy_js_1.decompressResponsePolicyName; - } }); - var exponentialRetryPolicy_js_1 = require_exponentialRetryPolicy2(); - Object.defineProperty(exports2, "exponentialRetryPolicy", { enumerable: true, get: function() { - return exponentialRetryPolicy_js_1.exponentialRetryPolicy; - } }); - Object.defineProperty(exports2, "exponentialRetryPolicyName", { enumerable: true, get: function() { - return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; - } }); - var setClientRequestIdPolicy_js_1 = require_setClientRequestIdPolicy(); - Object.defineProperty(exports2, "setClientRequestIdPolicy", { enumerable: true, get: function() { - return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; - } }); - Object.defineProperty(exports2, "setClientRequestIdPolicyName", { enumerable: true, get: function() { - return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; - } }); - var logPolicy_js_1 = require_logPolicy2(); - Object.defineProperty(exports2, "logPolicy", { enumerable: true, get: function() { - return logPolicy_js_1.logPolicy; - } }); - Object.defineProperty(exports2, "logPolicyName", { enumerable: true, get: function() { - return logPolicy_js_1.logPolicyName; - } }); - var multipartPolicy_js_1 = require_multipartPolicy2(); - Object.defineProperty(exports2, "multipartPolicy", { enumerable: true, get: function() { - return multipartPolicy_js_1.multipartPolicy; - } }); - Object.defineProperty(exports2, "multipartPolicyName", { enumerable: true, get: function() { - return multipartPolicy_js_1.multipartPolicyName; - } }); - var proxyPolicy_js_1 = require_proxyPolicy2(); - Object.defineProperty(exports2, "proxyPolicy", { enumerable: true, get: function() { - return proxyPolicy_js_1.proxyPolicy; - } }); - Object.defineProperty(exports2, "proxyPolicyName", { enumerable: true, get: function() { - return proxyPolicy_js_1.proxyPolicyName; - } }); - Object.defineProperty(exports2, "getDefaultProxySettings", { enumerable: true, get: function() { - return proxyPolicy_js_1.getDefaultProxySettings; - } }); - var redirectPolicy_js_1 = require_redirectPolicy2(); - Object.defineProperty(exports2, "redirectPolicy", { enumerable: true, get: function() { - return redirectPolicy_js_1.redirectPolicy; - } }); - Object.defineProperty(exports2, "redirectPolicyName", { enumerable: true, get: function() { - return redirectPolicy_js_1.redirectPolicyName; - } }); - var systemErrorRetryPolicy_js_1 = require_systemErrorRetryPolicy2(); - Object.defineProperty(exports2, "systemErrorRetryPolicy", { enumerable: true, get: function() { - return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; - } }); - Object.defineProperty(exports2, "systemErrorRetryPolicyName", { enumerable: true, get: function() { - return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; - } }); - var throttlingRetryPolicy_js_1 = require_throttlingRetryPolicy2(); - Object.defineProperty(exports2, "throttlingRetryPolicy", { enumerable: true, get: function() { - return throttlingRetryPolicy_js_1.throttlingRetryPolicy; - } }); - Object.defineProperty(exports2, "throttlingRetryPolicyName", { enumerable: true, get: function() { - return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; - } }); - var retryPolicy_js_1 = require_retryPolicy2(); - Object.defineProperty(exports2, "retryPolicy", { enumerable: true, get: function() { - return retryPolicy_js_1.retryPolicy; - } }); - var tracingPolicy_js_1 = require_tracingPolicy(); - Object.defineProperty(exports2, "tracingPolicy", { enumerable: true, get: function() { - return tracingPolicy_js_1.tracingPolicy; - } }); - Object.defineProperty(exports2, "tracingPolicyName", { enumerable: true, get: function() { - return tracingPolicy_js_1.tracingPolicyName; - } }); - var defaultRetryPolicy_js_1 = require_defaultRetryPolicy2(); - Object.defineProperty(exports2, "defaultRetryPolicy", { enumerable: true, get: function() { - return defaultRetryPolicy_js_1.defaultRetryPolicy; - } }); - var userAgentPolicy_js_1 = require_userAgentPolicy2(); - Object.defineProperty(exports2, "userAgentPolicy", { enumerable: true, get: function() { - return userAgentPolicy_js_1.userAgentPolicy; - } }); - Object.defineProperty(exports2, "userAgentPolicyName", { enumerable: true, get: function() { - return userAgentPolicy_js_1.userAgentPolicyName; - } }); - var tlsPolicy_js_1 = require_tlsPolicy2(); - Object.defineProperty(exports2, "tlsPolicy", { enumerable: true, get: function() { - return tlsPolicy_js_1.tlsPolicy; - } }); - Object.defineProperty(exports2, "tlsPolicyName", { enumerable: true, get: function() { - return tlsPolicy_js_1.tlsPolicyName; - } }); - var formDataPolicy_js_1 = require_formDataPolicy2(); - Object.defineProperty(exports2, "formDataPolicy", { enumerable: true, get: function() { - return formDataPolicy_js_1.formDataPolicy; - } }); - Object.defineProperty(exports2, "formDataPolicyName", { enumerable: true, get: function() { - return formDataPolicy_js_1.formDataPolicyName; - } }); - var bearerTokenAuthenticationPolicy_js_1 = require_bearerTokenAuthenticationPolicy(); - Object.defineProperty(exports2, "bearerTokenAuthenticationPolicy", { enumerable: true, get: function() { - return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; - } }); - Object.defineProperty(exports2, "bearerTokenAuthenticationPolicyName", { enumerable: true, get: function() { - return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; - } }); - var ndJsonPolicy_js_1 = require_ndJsonPolicy(); - Object.defineProperty(exports2, "ndJsonPolicy", { enumerable: true, get: function() { - return ndJsonPolicy_js_1.ndJsonPolicy; - } }); - Object.defineProperty(exports2, "ndJsonPolicyName", { enumerable: true, get: function() { - return ndJsonPolicy_js_1.ndJsonPolicyName; - } }); - var auxiliaryAuthenticationHeaderPolicy_js_1 = require_auxiliaryAuthenticationHeaderPolicy(); - Object.defineProperty(exports2, "auxiliaryAuthenticationHeaderPolicy", { enumerable: true, get: function() { - return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; - } }); - Object.defineProperty(exports2, "auxiliaryAuthenticationHeaderPolicyName", { enumerable: true, get: function() { - return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; - } }); - var agentPolicy_js_1 = require_agentPolicy2(); - Object.defineProperty(exports2, "agentPolicy", { enumerable: true, get: function() { - return agentPolicy_js_1.agentPolicy; - } }); - Object.defineProperty(exports2, "agentPolicyName", { enumerable: true, get: function() { - return agentPolicy_js_1.agentPolicyName; - } }); - var file_js_1 = require_file3(); - Object.defineProperty(exports2, "createFile", { enumerable: true, get: function() { - return file_js_1.createFile; - } }); - Object.defineProperty(exports2, "createFileFromStream", { enumerable: true, get: function() { - return file_js_1.createFileFromStream; - } }); - } -}); - -// node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js -var require_azureKeyCredential = __commonJS({ - "node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AzureKeyCredential = void 0; - var AzureKeyCredential = class { - _key; - /** - * The value of the key to be used in authentication - */ - get key() { - return this._key; + exports2.HttpProxyAgent = void 0; + var net = __importStar2(require("net")); + var tls = __importStar2(require("tls")); + var debug_1 = __importDefault2(require_src()); + var events_1 = require("events"); + var agent_base_1 = require_dist(); + var url_1 = require("url"); + var debug6 = (0, debug_1.default)("http-proxy-agent"); + var HttpProxyAgent = class extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.proxy = typeof proxy === "string" ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug6("Creating new HttpProxyAgent instance: %o", this.proxy.href); + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ""); + const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80; + this.connectOpts = { + ...opts ? omit2(opts, "headers") : null, + host, + port + }; } - /** - * Create an instance of an AzureKeyCredential for use - * with a service client. - * - * @param key - The initial value of the key to use in authentication - */ - constructor(key) { - if (!key) { - throw new Error("key must be a non-empty string"); + addRequest(req, opts) { + req._header = null; + this.setRequestProps(req, opts); + super.addRequest(req, opts); + } + setRequestProps(req, opts) { + const { proxy } = this; + const protocol = opts.secureEndpoint ? "https:" : "http:"; + const hostname = req.getHeader("host") || "localhost"; + const base = `${protocol}//${hostname}`; + const url2 = new url_1.URL(req.path, base); + if (opts.port !== 80) { + url2.port = String(opts.port); + } + req.path = String(url2); + const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders }; + if (proxy.username || proxy.password) { + const auth2 = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers["Proxy-Authorization"] = `Basic ${Buffer.from(auth2).toString("base64")}`; + } + if (!headers["Proxy-Connection"]) { + headers["Proxy-Connection"] = this.keepAlive ? "Keep-Alive" : "close"; + } + for (const name of Object.keys(headers)) { + const value = headers[name]; + if (value) { + req.setHeader(name, value); + } } - this._key = key; } - /** - * Change the value of the key. - * - * Updates will take effect upon the next request after - * updating the key value. - * - * @param newKey - The new key value to be used - */ - update(newKey) { - this._key = newKey; + async connect(req, opts) { + req._header = null; + if (!req.path.includes("://")) { + this.setRequestProps(req, opts); + } + let first; + let endOfHeaders; + debug6("Regenerating stored HTTP header string for request"); + req._implicitHeader(); + if (req.outputData && req.outputData.length > 0) { + debug6("Patching connection write() output buffer with updated header"); + first = req.outputData[0].data; + endOfHeaders = first.indexOf("\r\n\r\n") + 4; + req.outputData[0].data = req._header + first.substring(endOfHeaders); + debug6("Output buffer: %o", req.outputData[0].data); + } + let socket; + if (this.proxy.protocol === "https:") { + debug6("Creating `tls.Socket`: %o", this.connectOpts); + socket = tls.connect(this.connectOpts); + } else { + debug6("Creating `net.Socket`: %o", this.connectOpts); + socket = net.connect(this.connectOpts); + } + await (0, events_1.once)(socket, "connect"); + return socket; } }; - exports2.AzureKeyCredential = AzureKeyCredential; - } -}); - -// node_modules/@azure/core-auth/dist/commonjs/keyCredential.js -var require_keyCredential = __commonJS({ - "node_modules/@azure/core-auth/dist/commonjs/keyCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isKeyCredential = isKeyCredential; - var core_util_1 = require_commonjs4(); - function isKeyCredential(credential) { - return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; + HttpProxyAgent.protocols = ["http", "https"]; + exports2.HttpProxyAgent = HttpProxyAgent; + function omit2(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; } } }); -// node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js -var require_azureNamedKeyCredential = __commonJS({ - "node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/proxyPolicy.js +var require_proxyPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/proxyPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AzureNamedKeyCredential = void 0; - exports2.isNamedKeyCredential = isNamedKeyCredential; - var core_util_1 = require_commonjs4(); - var AzureNamedKeyCredential = class { - _key; - _name; - /** - * The value of the key to be used in authentication. - */ - get key() { - return this._key; + exports2.globalNoProxyList = exports2.proxyPolicyName = void 0; + exports2.loadNoProxy = loadNoProxy; + exports2.getDefaultProxySettings = getDefaultProxySettings; + exports2.proxyPolicy = proxyPolicy; + var https_proxy_agent_1 = require_dist2(); + var http_proxy_agent_1 = require_dist3(); + var log_js_1 = require_log2(); + var HTTPS_PROXY = "HTTPS_PROXY"; + var HTTP_PROXY = "HTTP_PROXY"; + var ALL_PROXY = "ALL_PROXY"; + var NO_PROXY = "NO_PROXY"; + exports2.proxyPolicyName = "proxyPolicy"; + exports2.globalNoProxyList = []; + var noProxyListLoaded = false; + var globalBypassedMap = /* @__PURE__ */ new Map(); + function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; } - /** - * The value of the name to be used in authentication. - */ - get name() { - return this._name; + return void 0; + } + function loadEnvironmentProxyValue() { + if (!process) { + return void 0; } - /** - * Create an instance of an AzureNamedKeyCredential for use - * with a service client. - * - * @param name - The initial value of the name to use in authentication. - * @param key - The initial value of the key to use in authentication. - */ - constructor(name, key) { - if (!name || !key) { - throw new TypeError("name and key must be non-empty strings"); - } - this._name = name; - this._key = key; + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; + } + function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; } - /** - * Change the value of the key. - * - * Updates will take effect upon the next request after - * updating the key value. - * - * @param newName - The new name value to be used. - * @param newKey - The new key value to be used. - */ - update(newName, newKey) { - if (!newName || !newKey) { - throw new TypeError("newName and newKey must be non-empty strings"); + const host = new URL(uri).hostname; + if (bypassedMap?.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } else { + if (host === pattern) { + isBypassedFlag = true; + } } - this._name = newName; - this._key = newKey; } - }; - exports2.AzureNamedKeyCredential = AzureNamedKeyCredential; - function isNamedKeyCredential(credential) { - return (0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) && typeof credential.key === "string" && typeof credential.name === "string"; + bypassedMap?.set(host, isBypassedFlag); + return isBypassedFlag; } - } -}); - -// node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js -var require_azureSASCredential = __commonJS({ - "node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AzureSASCredential = void 0; - exports2.isSASCredential = isSASCredential; - var core_util_1 = require_commonjs4(); - var AzureSASCredential = class { - _signature; - /** - * The value of the shared access signature to be used in authentication - */ - get signature() { - return this._signature; - } - /** - * Create an instance of an AzureSASCredential for use - * with a service client. - * - * @param signature - The initial value of the shared access signature to use in authentication - */ - constructor(signature) { - if (!signature) { - throw new Error("shared access signature must be a non-empty string"); - } - this._signature = signature; + function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy.split(",").map((item) => item.trim()).filter((item) => item.length); } - /** - * Change the value of the signature. - * - * Updates will take effect upon the next request after - * updating the signature value. - * - * @param newSignature - The new shared access signature value to be used - */ - update(newSignature) { - if (!newSignature) { - throw new Error("shared access signature must be a non-empty string"); + return []; + } + function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return void 0; } - this._signature = newSignature; } - }; - exports2.AzureSASCredential = AzureSASCredential; - function isSASCredential(credential) { - return (0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"; - } - } -}); - -// node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js -var require_tokenCredential = __commonJS({ - "node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isBearerToken = isBearerToken; - exports2.isPopToken = isPopToken; - exports2.isTokenCredential = isTokenCredential; - function isBearerToken(accessToken) { - return !accessToken.tokenType || accessToken.tokenType === "Bearer"; + const parsedUrl = new URL(proxyUrl); + const schema2 = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema2 + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password + }; } - function isPopToken(accessToken) { - return accessToken.tokenType === "pop"; + function getDefaultProxySettingsInternal() { + const envProxy = loadEnvironmentProxyValue(); + return envProxy ? new URL(envProxy) : void 0; } - function isTokenCredential(credential) { - const castCredential = credential; - return castCredential && typeof castCredential.getToken === "function" && (castCredential.signRequest === void 0 || castCredential.getToken.length > 0); + function getUrlFromProxySettings(settings) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(settings.host); + } catch { + throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); + } + parsedProxyUrl.port = String(settings.port); + if (settings.username) { + parsedProxyUrl.username = settings.username; + } + if (settings.password) { + parsedProxyUrl.password = settings.password; + } + return parsedProxyUrl; + } + function setProxyAgentOnRequest(request2, cachedAgents, proxyUrl) { + if (request2.agent) { + return; + } + const url2 = new URL(request2.url); + const isInsecure = url2.protocol !== "https:"; + if (request2.tlsSettings) { + log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const headers = request2.headers.toJSON(); + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); + } + request2.agent = cachedAgents.httpProxyAgent; + } else { + if (!cachedAgents.httpsProxyAgent) { + cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); + } + request2.agent = cachedAgents.httpsProxyAgent; + } + } + function proxyPolicy(proxySettings, options) { + if (!noProxyListLoaded) { + exports2.globalNoProxyList.push(...loadNoProxy()); + } + const defaultProxy = proxySettings ? getUrlFromProxySettings(proxySettings) : getDefaultProxySettingsInternal(); + const cachedAgents = {}; + return { + name: exports2.proxyPolicyName, + async sendRequest(request2, next) { + if (!request2.proxySettings && defaultProxy && !isBypassed(request2.url, options?.customNoProxyList ?? exports2.globalNoProxyList, options?.customNoProxyList ? void 0 : globalBypassedMap)) { + setProxyAgentOnRequest(request2, cachedAgents, defaultProxy); + } else if (request2.proxySettings) { + setProxyAgentOnRequest(request2, cachedAgents, getUrlFromProxySettings(request2.proxySettings)); + } + return next(request2); + } + }; } } }); -// node_modules/@azure/core-auth/dist/commonjs/index.js -var require_commonjs7 = __commonJS({ - "node_modules/@azure/core-auth/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isTokenCredential = exports2.isSASCredential = exports2.AzureSASCredential = exports2.isNamedKeyCredential = exports2.AzureNamedKeyCredential = exports2.isKeyCredential = exports2.AzureKeyCredential = void 0; - var azureKeyCredential_js_1 = require_azureKeyCredential(); - Object.defineProperty(exports2, "AzureKeyCredential", { enumerable: true, get: function() { - return azureKeyCredential_js_1.AzureKeyCredential; - } }); - var keyCredential_js_1 = require_keyCredential(); - Object.defineProperty(exports2, "isKeyCredential", { enumerable: true, get: function() { - return keyCredential_js_1.isKeyCredential; - } }); - var azureNamedKeyCredential_js_1 = require_azureNamedKeyCredential(); - Object.defineProperty(exports2, "AzureNamedKeyCredential", { enumerable: true, get: function() { - return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; - } }); - Object.defineProperty(exports2, "isNamedKeyCredential", { enumerable: true, get: function() { - return azureNamedKeyCredential_js_1.isNamedKeyCredential; - } }); - var azureSASCredential_js_1 = require_azureSASCredential(); - Object.defineProperty(exports2, "AzureSASCredential", { enumerable: true, get: function() { - return azureSASCredential_js_1.AzureSASCredential; - } }); - Object.defineProperty(exports2, "isSASCredential", { enumerable: true, get: function() { - return azureSASCredential_js_1.isSASCredential; - } }); - var tokenCredential_js_1 = require_tokenCredential(); - Object.defineProperty(exports2, "isTokenCredential", { enumerable: true, get: function() { - return tokenCredential_js_1.isTokenCredential; - } }); - } -}); - -// node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js -var require_disableKeepAlivePolicy = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/agentPolicy.js +var require_agentPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/agentPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.disableKeepAlivePolicyName = void 0; - exports2.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; - exports2.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; - exports2.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; - function createDisableKeepAlivePolicy() { + exports2.agentPolicyName = void 0; + exports2.agentPolicy = agentPolicy; + exports2.agentPolicyName = "agentPolicy"; + function agentPolicy(agent) { return { - name: exports2.disableKeepAlivePolicyName, - async sendRequest(request2, next) { - request2.disableKeepAlive = true; - return next(request2); + name: exports2.agentPolicyName, + sendRequest: async (req, next) => { + if (!req.agent) { + req.agent = agent; + } + return next(req); } }; } - function pipelineContainsDisableKeepAlivePolicy(pipeline) { - return pipeline.getOrderedPolicies().some((policy) => policy.name === exports2.disableKeepAlivePolicyName); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/tlsPolicy.js +var require_tlsPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/tlsPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.tlsPolicyName = void 0; + exports2.tlsPolicy = tlsPolicy; + exports2.tlsPolicyName = "tlsPolicy"; + function tlsPolicy(tlsSettings) { + return { + name: exports2.tlsPolicyName, + sendRequest: async (req, next) => { + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + } + }; } } }); -// node_modules/@azure/core-client/dist/commonjs/base64.js -var require_base64 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/base64.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/typeGuards.js +var require_typeGuards = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/typeGuards.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.encodeString = encodeString; - exports2.encodeByteArray = encodeByteArray; - exports2.decodeString = decodeString; - exports2.decodeStringToString = decodeStringToString; - function encodeString(value) { - return Buffer.from(value).toString("base64"); + exports2.isNodeReadableStream = isNodeReadableStream; + exports2.isWebReadableStream = isWebReadableStream; + exports2.isBinaryBody = isBinaryBody; + exports2.isReadableStream = isReadableStream; + exports2.isBlob = isBlob; + function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); } - function encodeByteArray(value) { - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); - return bufferValue.toString("base64"); + function isWebReadableStream(x) { + return Boolean(x && typeof x.getReader === "function" && typeof x.tee === "function"); } - function decodeString(value) { - return Buffer.from(value, "base64"); + function isBinaryBody(body) { + return body !== void 0 && (body instanceof Uint8Array || isReadableStream(body) || typeof body === "function" || body instanceof Blob); } - function decodeStringToString(value) { - return Buffer.from(value, "base64").toString(); + function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); + } + function isBlob(x) { + return typeof x.stream === "function"; } } }); -// node_modules/@azure/core-client/dist/commonjs/interfaces.js -var require_interfaces = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/interfaces.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/concat.js +var require_concat = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/concat.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.XML_CHARKEY = exports2.XML_ATTRKEY = void 0; - exports2.XML_ATTRKEY = "$"; - exports2.XML_CHARKEY = "_"; + exports2.concat = concat; + var stream_1 = require("stream"); + var typeGuards_js_1 = require_typeGuards(); + async function* streamAsyncIterator() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = await reader.read(); + if (done) { + return; + } + yield value; + } + } finally { + reader.releaseLock(); + } + } + function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); + } + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); + } + } + function ensureNodeStream(stream2) { + if (stream2 instanceof ReadableStream) { + makeAsyncIterable(stream2); + return stream_1.Readable.fromWeb(stream2); + } else { + return stream2; + } + } + function toStream(source) { + if (source instanceof Uint8Array) { + return stream_1.Readable.from(Buffer.from(source)); + } else if ((0, typeGuards_js_1.isBlob)(source)) { + return ensureNodeStream(source.stream()); + } else { + return ensureNodeStream(source); + } + } + async function concat(sources) { + return function() { + const streams = sources.map((x) => typeof x === "function" ? x() : x).map(toStream); + return stream_1.Readable.from((async function* () { + for (const stream2 of streams) { + for await (const chunk of stream2) { + yield chunk; + } + } + })()); + }; + } } }); -// node_modules/@azure/core-client/dist/commonjs/utils.js -var require_utils6 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/utils.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/multipartPolicy.js +var require_multipartPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/multipartPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isPrimitiveBody = isPrimitiveBody; - exports2.isDuration = isDuration; - exports2.isValidUuid = isValidUuid; - exports2.flattenResponse = flattenResponse; - function isPrimitiveBody(value, mapperTypeName) { - return mapperTypeName !== "Composite" && mapperTypeName !== "Dictionary" && (typeof value === "string" || typeof value === "number" || typeof value === "boolean" || mapperTypeName?.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i) !== null || value === void 0 || value === null); - } - var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; - function isDuration(value) { - return validateISODuration.test(value); + exports2.multipartPolicyName = void 0; + exports2.multipartPolicy = multipartPolicy; + var bytesEncoding_js_1 = require_bytesEncoding(); + var typeGuards_js_1 = require_typeGuards(); + var uuidUtils_js_1 = require_uuidUtils(); + var concat_js_1 = require_concat(); + function generateBoundary() { + return `----AzSDKFormBoundary${(0, uuidUtils_js_1.randomUUID)()}`; } - var validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; - function isValidUuid(uuid) { - return validUuidRegex.test(uuid); + function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r +`; + } + return result; } - function handleNullableResponseAndWrappableBody(responseObject) { - const combinedHeadersAndBody = { - ...responseObject.headers, - ...responseObject.body - }; - if (responseObject.hasNullableType && Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { - return responseObject.shouldWrapBody ? { body: null } : null; + function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } else if ((0, typeGuards_js_1.isBlob)(source)) { + return source.size === -1 ? void 0 : source.size; } else { - return responseObject.shouldWrapBody ? { - ...responseObject.headers, - body: responseObject.body - } : combinedHeadersAndBody; + return void 0; } } - function flattenResponse(fullResponse, responseSpec) { - const parsedHeaders = fullResponse.parsedHeaders; - if (fullResponse.request.method === "HEAD") { - return { - ...parsedHeaders, - body: fullResponse.parsedBody - }; + function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === void 0) { + return void 0; + } else { + total += partLength; + } } - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const isNullable = Boolean(bodyMapper?.nullable); - const expectedBodyTypeName = bodyMapper?.type.name; - if (expectedBodyTypeName === "Stream") { - return { - ...parsedHeaders, - blobBody: fullResponse.blobBody, - readableStreamBody: fullResponse.readableStreamBody - }; + return total; + } + async function buildRequestBody(request2, parts, boundary) { + const sources = [ + (0, bytesEncoding_js_1.stringToUint8Array)(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + (0, bytesEncoding_js_1.stringToUint8Array)("\r\n", "utf-8"), + (0, bytesEncoding_js_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), + (0, bytesEncoding_js_1.stringToUint8Array)("\r\n", "utf-8"), + part.body, + (0, bytesEncoding_js_1.stringToUint8Array)(`\r +--${boundary}`, "utf-8") + ]), + (0, bytesEncoding_js_1.stringToUint8Array)("--\r\n\r\n", "utf-8") + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request2.headers.set("Content-Length", contentLength); } - const modelProperties = expectedBodyTypeName === "Composite" && bodyMapper.type.modelProperties || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); - if (expectedBodyTypeName === "Sequence" || isPageableResponse) { - const arrayResponse = fullResponse.parsedBody ?? []; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { - arrayResponse[key] = fullResponse.parsedBody?.[key]; + request2.body = await (0, concat_js_1.concat)(sources); + } + exports2.multipartPolicyName = "multipartPolicy"; + var maxBoundaryLength = 70; + var validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); + function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + } + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + } + } + function multipartPolicy() { + return { + name: exports2.multipartPolicyName, + async sendRequest(request2, next) { + if (!request2.multipartBody) { + return next(request2); } - } - if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { - arrayResponse[key] = parsedHeaders[key]; + if (request2.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request2.multipartBody.boundary; + const contentTypeHeader = request2.headers.get("Content-Type") ?? "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary ??= parsedBoundary; + if (boundary) { + assertValidBoundary(boundary); + } else { + boundary = generateBoundary(); } + request2.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request2, request2.multipartBody.parts, boundary); + request2.multipartBody = void 0; + return next(request2); } - return isNullable && !fullResponse.parsedBody && !parsedHeaders && Object.getOwnPropertyNames(modelProperties).length === 0 ? null : arrayResponse; - } - return handleNullableResponseAndWrappableBody({ - body: fullResponse.parsedBody, - headers: parsedHeaders, - hasNullableType: isNullable, - shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName) - }); + }; } } }); -// node_modules/@azure/core-client/dist/commonjs/serializer.js -var require_serializer = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/serializer.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/createPipelineFromOptions.js +var require_createPipelineFromOptions = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/createPipelineFromOptions.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MapperTypeNames = void 0; - exports2.createSerializer = createSerializer; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var base64 = tslib_1.__importStar(require_base64()); - var interfaces_js_1 = require_interfaces(); - var utils_js_1 = require_utils6(); - var SerializerImpl = class { - modelMappers; - isXML; - constructor(modelMappers = {}, isXML = false) { - this.modelMappers = modelMappers; - this.isXML = isXML; - } - /** - * @deprecated Removing the constraints validation on client side. - */ - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); - }; - if (mapper.constraints && value !== void 0 && value !== null) { - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; - if (ExclusiveMaximum !== void 0 && value >= ExclusiveMaximum) { - failValidation("ExclusiveMaximum", ExclusiveMaximum); - } - if (ExclusiveMinimum !== void 0 && value <= ExclusiveMinimum) { - failValidation("ExclusiveMinimum", ExclusiveMinimum); - } - if (InclusiveMaximum !== void 0 && value > InclusiveMaximum) { - failValidation("InclusiveMaximum", InclusiveMaximum); - } - if (InclusiveMinimum !== void 0 && value < InclusiveMinimum) { - failValidation("InclusiveMinimum", InclusiveMinimum); - } - if (MaxItems !== void 0 && value.length > MaxItems) { - failValidation("MaxItems", MaxItems); - } - if (MaxLength !== void 0 && value.length > MaxLength) { - failValidation("MaxLength", MaxLength); - } - if (MinItems !== void 0 && value.length < MinItems) { - failValidation("MinItems", MinItems); - } - if (MinLength !== void 0 && value.length < MinLength) { - failValidation("MinLength", MinLength); - } - if (MultipleOf !== void 0 && value % MultipleOf !== 0) { - failValidation("MultipleOf", MultipleOf); - } - if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; - if (typeof value !== "string" || value.match(pattern) === null) { - failValidation("Pattern", Pattern); - } - } - if (UniqueItems && value.some((item, i, ar) => ar.indexOf(item) !== i)) { - failValidation("UniqueItems", UniqueItems); - } + exports2.createPipelineFromOptions = createPipelineFromOptions; + var logPolicy_js_1 = require_logPolicy(); + var pipeline_js_1 = require_pipeline(); + var redirectPolicy_js_1 = require_redirectPolicy(); + var userAgentPolicy_js_1 = require_userAgentPolicy(); + var decompressResponsePolicy_js_1 = require_decompressResponsePolicy(); + var defaultRetryPolicy_js_1 = require_defaultRetryPolicy(); + var formDataPolicy_js_1 = require_formDataPolicy(); + var checkEnvironment_js_1 = require_checkEnvironment(); + var proxyPolicy_js_1 = require_proxyPolicy(); + var agentPolicy_js_1 = require_agentPolicy(); + var tlsPolicy_js_1 = require_tlsPolicy(); + var multipartPolicy_js_1 = require_multipartPolicy(); + function createPipelineFromOptions(options) { + const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); + if (checkEnvironment_js_1.isNodeLike) { + if (options.agent) { + pipeline.addPolicy((0, agentPolicy_js_1.agentPolicy)(options.agent)); + } + if (options.tlsOptions) { + pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); } + pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); + pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); } - /** - * Serialize the given object based on its metadata defined in the mapper - * - * @param mapper - The mapper which defines the metadata of the serializable object - * - * @param object - A valid Javascript object to be serialized - * - * @param objectName - Name of the serialized object - * - * @param options - additional options to serialization - * - * @returns A valid serialized Javascript object - */ - serialize(mapper, object, objectName, options = { xml: {} }) { - const updatedOptions = { - xml: { - rootName: options.xml.rootName ?? "", - includeRoot: options.xml.includeRoot ?? false, - xmlCharKey: options.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY + pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); + pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); + pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); + pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); + if (checkEnvironment_js_1.isNodeLike) { + pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); + } + pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/apiVersionPolicy.js +var require_apiVersionPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/apiVersionPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.apiVersionPolicyName = void 0; + exports2.apiVersionPolicy = apiVersionPolicy; + exports2.apiVersionPolicyName = "ApiVersionPolicy"; + function apiVersionPolicy(options) { + return { + name: exports2.apiVersionPolicyName, + sendRequest: (req, next) => { + const url2 = new URL(req.url); + if (!url2.searchParams.get("api-version") && options.apiVersion) { + req.url = `${req.url}${Array.from(url2.searchParams.keys()).length > 0 ? "&" : "?"}api-version=${options.apiVersion}`; } - }; - let payload = {}; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Sequence$/i) !== null) { - payload = []; - } - if (mapper.isConstant) { - object = mapper.defaultValue; + return next(req); } - const { required, nullable } = mapper; - if (required && nullable && object === void 0) { - throw new Error(`${objectName} cannot be undefined.`); - } - if (required && !nullable && (object === void 0 || object === null)) { - throw new Error(`${objectName} cannot be null or undefined.`); - } - if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); + }; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/credentials.js +var require_credentials = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/credentials.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isOAuth2TokenCredential = isOAuth2TokenCredential; + exports2.isBearerTokenCredential = isBearerTokenCredential; + exports2.isBasicCredential = isBasicCredential; + exports2.isApiKeyCredential = isApiKeyCredential; + function isOAuth2TokenCredential(credential) { + return "getOAuth2Token" in credential; + } + function isBearerTokenCredential(credential) { + return "getBearerToken" in credential; + } + function isBasicCredential(credential) { + return "username" in credential && "password" in credential; + } + function isApiKeyCredential(credential) { + return "key" in credential; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/checkInsecureConnection.js +var require_checkInsecureConnection = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/checkInsecureConnection.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ensureSecureConnection = ensureSecureConnection; + var log_js_1 = require_log2(); + var insecureConnectionWarningEmmitted = false; + function allowInsecureConnection(request2, options) { + if (options.allowInsecureConnection && request2.allowInsecureConnection) { + const url2 = new URL(request2.url); + if (url2.hostname === "localhost" || url2.hostname === "127.0.0.1") { + return true; } - if (object === void 0 || object === null) { - payload = object; + } + return false; + } + function emitInsecureConnectionWarning() { + const warning10 = "Sending token over insecure transport. Assume any token issued is compromised."; + log_js_1.logger.warning(warning10); + if (typeof process?.emitWarning === "function" && !insecureConnectionWarningEmmitted) { + insecureConnectionWarningEmmitted = true; + process.emitWarning(warning10); + } + } + function ensureSecureConnection(request2, options) { + if (!request2.url.toLowerCase().startsWith("https://")) { + if (allowInsecureConnection(request2, options)) { + emitInsecureConnectionWarning(); } else { - if (mapperType.match(/^any$/i) !== null) { - payload = object; - } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { - payload = serializeBasicTypes(mapperType, objectName, object); - } else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; - payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); - } else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { - payload = serializeDateTypes(mapperType, object, objectName); - } else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = serializeByteArrayType(objectName, object); - } else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = serializeBase64UrlType(objectName, object); - } else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } + throw new Error("Authentication is not permitted for non-TLS protected (non-https) URLs when allowInsecureConnection is false."); } - return payload; } - /** - * Deserialize the given object based on its metadata defined in the mapper - * - * @param mapper - The mapper which defines the metadata of the serializable object - * - * @param responseBody - A valid Javascript entity to be deserialized - * - * @param objectName - Name of the deserialized object - * - * @param options - Controls behavior of XML parser and builder. - * - * @returns A valid deserialized Javascript object - */ - deserialize(mapper, responseBody, objectName, options = { xml: {} }) { - const updatedOptions = { - xml: { - rootName: options.xml.rootName ?? "", - includeRoot: options.xml.includeRoot ?? false, - xmlCharKey: options.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY - }, - ignoreUnknownProperties: options.ignoreUnknownProperties ?? false - }; - if (responseBody === void 0 || responseBody === null) { - if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { - responseBody = []; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/apiKeyAuthenticationPolicy.js +var require_apiKeyAuthenticationPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/apiKeyAuthenticationPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.apiKeyAuthenticationPolicyName = void 0; + exports2.apiKeyAuthenticationPolicy = apiKeyAuthenticationPolicy; + var checkInsecureConnection_js_1 = require_checkInsecureConnection(); + exports2.apiKeyAuthenticationPolicyName = "apiKeyAuthenticationPolicy"; + function apiKeyAuthenticationPolicy(options) { + return { + name: exports2.apiKeyAuthenticationPolicyName, + async sendRequest(request2, next) { + (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); + const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "apiKey"); + if (!scheme) { + return next(request2); } - if (mapper.defaultValue !== void 0) { - responseBody = mapper.defaultValue; + if (scheme.apiKeyLocation !== "header") { + throw new Error(`Unsupported API key location: ${scheme.apiKeyLocation}`); } - return responseBody; + request2.headers.set(scheme.name, options.credential.key); + return next(request2); } - let payload; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; + }; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/basicAuthenticationPolicy.js +var require_basicAuthenticationPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/basicAuthenticationPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.basicAuthenticationPolicyName = void 0; + exports2.basicAuthenticationPolicy = basicAuthenticationPolicy; + var bytesEncoding_js_1 = require_bytesEncoding(); + var checkInsecureConnection_js_1 = require_checkInsecureConnection(); + exports2.basicAuthenticationPolicyName = "bearerAuthenticationPolicy"; + function basicAuthenticationPolicy(options) { + return { + name: exports2.basicAuthenticationPolicyName, + async sendRequest(request2, next) { + (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); + const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "http" && x.scheme === "basic"); + if (!scheme) { + return next(request2); + } + const { username, password } = options.credential; + const headerValue = (0, bytesEncoding_js_1.uint8ArrayToString)((0, bytesEncoding_js_1.stringToUint8Array)(`${username}:${password}`, "utf-8"), "base64"); + request2.headers.set("Authorization", `Basic ${headerValue}`); + return next(request2); } - if (mapperType.match(/^Composite$/i) !== null) { - payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); - } else { - if (this.isXML) { - const xmlCharKey = updatedOptions.xml.xmlCharKey; - if (responseBody[interfaces_js_1.XML_ATTRKEY] !== void 0 && responseBody[xmlCharKey] !== void 0) { - responseBody = responseBody[xmlCharKey]; - } + }; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/bearerAuthenticationPolicy.js +var require_bearerAuthenticationPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/bearerAuthenticationPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.bearerAuthenticationPolicyName = void 0; + exports2.bearerAuthenticationPolicy = bearerAuthenticationPolicy; + var checkInsecureConnection_js_1 = require_checkInsecureConnection(); + exports2.bearerAuthenticationPolicyName = "bearerAuthenticationPolicy"; + function bearerAuthenticationPolicy(options) { + return { + name: exports2.bearerAuthenticationPolicyName, + async sendRequest(request2, next) { + (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); + const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "http" && x.scheme === "bearer"); + if (!scheme) { + return next(request2); } - if (mapperType.match(/^Number$/i) !== null) { - payload = parseFloat(responseBody); - if (isNaN(payload)) { - payload = responseBody; - } - } else if (mapperType.match(/^Boolean$/i) !== null) { - if (responseBody === "true") { - payload = true; - } else if (responseBody === "false") { - payload = false; - } else { - payload = responseBody; - } - } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { - payload = responseBody; - } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { - payload = new Date(responseBody); - } else if (mapperType.match(/^UnixTime$/i) !== null) { - payload = unixTimeToDate(responseBody); - } else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = base64.decodeString(responseBody); - } else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = base64UrlToByteArray(responseBody); - } else if (mapperType.match(/^Sequence$/i) !== null) { - payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); - } else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + const token = await options.credential.getBearerToken({ + abortSignal: request2.abortSignal + }); + request2.headers.set("Authorization", `Bearer ${token}`); + return next(request2); + } + }; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/oauth2AuthenticationPolicy.js +var require_oauth2AuthenticationPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/oauth2AuthenticationPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.oauth2AuthenticationPolicyName = void 0; + exports2.oauth2AuthenticationPolicy = oauth2AuthenticationPolicy; + var checkInsecureConnection_js_1 = require_checkInsecureConnection(); + exports2.oauth2AuthenticationPolicyName = "oauth2AuthenticationPolicy"; + function oauth2AuthenticationPolicy(options) { + return { + name: exports2.oauth2AuthenticationPolicyName, + async sendRequest(request2, next) { + (0, checkInsecureConnection_js_1.ensureSecureConnection)(request2, options); + const scheme = (request2.authSchemes ?? options.authSchemes)?.find((x) => x.kind === "oauth2"); + if (!scheme) { + return next(request2); } + const token = await options.credential.getOAuth2Token(scheme.flows, { + abortSignal: request2.abortSignal + }); + request2.headers.set("Authorization", `Bearer ${token}`); + return next(request2); } - if (mapper.isConstant) { - payload = mapper.defaultValue; + }; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/clientHelpers.js +var require_clientHelpers = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/clientHelpers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createDefaultPipeline = createDefaultPipeline; + exports2.getCachedDefaultHttpsClient = getCachedDefaultHttpsClient; + var defaultHttpClient_js_1 = require_defaultHttpClient(); + var createPipelineFromOptions_js_1 = require_createPipelineFromOptions(); + var apiVersionPolicy_js_1 = require_apiVersionPolicy(); + var credentials_js_1 = require_credentials(); + var apiKeyAuthenticationPolicy_js_1 = require_apiKeyAuthenticationPolicy(); + var basicAuthenticationPolicy_js_1 = require_basicAuthenticationPolicy(); + var bearerAuthenticationPolicy_js_1 = require_bearerAuthenticationPolicy(); + var oauth2AuthenticationPolicy_js_1 = require_oauth2AuthenticationPolicy(); + var cachedHttpClient; + function createDefaultPipeline(options = {}) { + const pipeline = (0, createPipelineFromOptions_js_1.createPipelineFromOptions)(options); + pipeline.addPolicy((0, apiVersionPolicy_js_1.apiVersionPolicy)(options)); + const { credential, authSchemes, allowInsecureConnection } = options; + if (credential) { + if ((0, credentials_js_1.isApiKeyCredential)(credential)) { + pipeline.addPolicy((0, apiKeyAuthenticationPolicy_js_1.apiKeyAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); + } else if ((0, credentials_js_1.isBasicCredential)(credential)) { + pipeline.addPolicy((0, basicAuthenticationPolicy_js_1.basicAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); + } else if ((0, credentials_js_1.isBearerTokenCredential)(credential)) { + pipeline.addPolicy((0, bearerAuthenticationPolicy_js_1.bearerAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); + } else if ((0, credentials_js_1.isOAuth2TokenCredential)(credential)) { + pipeline.addPolicy((0, oauth2AuthenticationPolicy_js_1.oauth2AuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection })); } - return payload; } - }; - function createSerializer(modelMappers = {}, isXML = false) { - return new SerializerImpl(modelMappers, isXML); + return pipeline; } - function trimEnd(str2, ch) { - let len = str2.length; - while (len - 1 >= 0 && str2[len - 1] === ch) { - --len; + function getCachedDefaultHttpsClient() { + if (!cachedHttpClient) { + cachedHttpClient = (0, defaultHttpClient_js_1.createDefaultHttpClient)(); } - return str2.substr(0, len); + return cachedHttpClient; } - function bufferToBase64Url(buffer) { - if (!buffer) { - return void 0; - } - if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/multipart.js +var require_multipart = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/multipart.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.buildBodyPart = buildBodyPart; + exports2.buildMultipartBody = buildMultipartBody; + var restError_js_1 = require_restError(); + var httpHeaders_js_1 = require_httpHeaders(); + var bytesEncoding_js_1 = require_bytesEncoding(); + var typeGuards_js_1 = require_typeGuards(); + function getHeaderValue(descriptor, headerName) { + if (descriptor.headers) { + const actualHeaderName = Object.keys(descriptor.headers).find((x) => x.toLowerCase() === headerName.toLowerCase()); + if (actualHeaderName) { + return descriptor.headers[actualHeaderName]; + } } - const str2 = base64.encodeByteArray(buffer); - return trimEnd(str2, "=").replace(/\+/g, "-").replace(/\//g, "_"); + return void 0; } - function base64UrlToByteArray(str2) { - if (!str2) { - return void 0; + function getPartContentType(descriptor) { + const contentTypeHeader = getHeaderValue(descriptor, "content-type"); + if (contentTypeHeader) { + return contentTypeHeader; } - if (str2 && typeof str2.valueOf() !== "string") { - throw new Error("Please provide an input of type string for converting to Uint8Array"); + if (descriptor.contentType === null) { + return void 0; } - str2 = str2.replace(/-/g, "+").replace(/_/g, "/"); - return base64.decodeString(str2); - } - function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; - if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { - if (item.charAt(item.length - 1) === "\\") { - partialclass += item.substr(0, item.length - 1) + "."; - } else { - partialclass += item; - classes.push(partialclass); - partialclass = ""; - } - } + if (descriptor.contentType) { + return descriptor.contentType; } - return classes; - } - function dateToUnixTime(d) { - if (!d) { + const { body } = descriptor; + if (body === null || body === void 0) { return void 0; } - if (typeof d.valueOf() === "string") { - d = new Date(d); + if (typeof body === "string" || typeof body === "number" || typeof body === "boolean") { + return "text/plain; charset=UTF-8"; } - return Math.floor(d.getTime() / 1e3); - } - function unixTimeToDate(n) { - if (!n) { - return void 0; + if (body instanceof Blob) { + return body.type || "application/octet-stream"; } - return new Date(n * 1e3); - } - function serializeBasicTypes(typeName, objectName, value) { - if (value !== null && value !== void 0) { - if (typeName.match(/^Number$/i) !== null) { - if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); - } - } else if (typeName.match(/^String$/i) !== null) { - if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); - } - } else if (typeName.match(/^Uuid$/i) !== null) { - if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); - } - } else if (typeName.match(/^Boolean$/i) !== null) { - if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); - } - } else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; - if (objectType !== "string" && typeof value.pipe !== "function" && // NodeJS.ReadableStream - typeof value.tee !== "function" && // browser ReadableStream - !(value instanceof ArrayBuffer) && !ArrayBuffer.isView(value) && // File objects count as a type of Blob, so we want to use instanceof explicitly - !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && objectType !== "function") { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); - } - } + if ((0, typeGuards_js_1.isBinaryBody)(body)) { + return "application/octet-stream"; } - return value; + return "application/json"; } - function serializeEnumType(objectName, allowedValues, value) { - if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + function escapeDispositionField(value) { + return JSON.stringify(value); + } + function getContentDisposition(descriptor) { + const contentDispositionHeader = getHeaderValue(descriptor, "content-disposition"); + if (contentDispositionHeader) { + return contentDispositionHeader; } - const isPresent = allowedValues.some((item) => { - if (typeof item.valueOf() === "string") { - return item.toLowerCase() === value.toLowerCase(); - } - return item === value; - }); - if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + if (descriptor.dispositionType === void 0 && descriptor.name === void 0 && descriptor.filename === void 0) { + return void 0; } - return value; - } - function serializeByteArrayType(objectName, value) { - if (value !== void 0 && value !== null) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - value = base64.encodeByteArray(value); + const dispositionType = descriptor.dispositionType ?? "form-data"; + let disposition = dispositionType; + if (descriptor.name) { + disposition += `; name=${escapeDispositionField(descriptor.name)}`; } - return value; - } - function serializeBase64UrlType(objectName, value) { - if (value !== void 0 && value !== null) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); + let filename = void 0; + if (descriptor.filename) { + filename = descriptor.filename; + } else if (typeof File !== "undefined" && descriptor.body instanceof File) { + const filenameFromFile = descriptor.body.name; + if (filenameFromFile !== "") { + filename = filenameFromFile; } - value = bufferToBase64Url(value); } - return value; - } - function serializeDateTypes(typeName, value, objectName) { - if (value !== void 0 && value !== null) { - if (typeName.match(/^Date$/i) !== null) { - if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = value instanceof Date ? value.toISOString().substring(0, 10) : new Date(value).toISOString().substring(0, 10); - } else if (typeName.match(/^DateTime$/i) !== null) { - if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); - } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { - if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); - } - value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); - } else if (typeName.match(/^UnixTime$/i) !== null) { - if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format for it to be serialized in UnixTime/Epoch format.`); - } - value = dateToUnixTime(value); - } else if (typeName.match(/^TimeSpan$/i) !== null) { - if (!(0, utils_js_1.isDuration)(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); - } - } + if (filename) { + disposition += `; filename=${escapeDispositionField(filename)}`; } - return value; + return disposition; } - function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { - if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); + function normalizeBody(body, contentType) { + if (body === void 0) { + return new Uint8Array([]); } - let elementType = mapper.type.element; - if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the mapper and it must of type "object" in ${objectName}.`); + if ((0, typeGuards_js_1.isBinaryBody)(body)) { + return body; } - if (elementType.type.name === "Composite" && elementType.type.className) { - elementType = serializer.modelMappers[elementType.type.className] ?? elementType; + if (typeof body === "string" || typeof body === "number" || typeof body === "boolean") { + return (0, bytesEncoding_js_1.stringToUint8Array)(String(body), "utf-8"); } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); - if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix ? `xmlns:${elementType.xmlNamespacePrefix}` : "xmlns"; - if (elementType.type.name === "Composite") { - tempArray[i] = { ...serializedValue }; - tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } else { - tempArray[i] = {}; - tempArray[i][options.xml.xmlCharKey] = serializedValue; - tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } - } else { - tempArray[i] = serializedValue; - } + if (contentType && /application\/(.+\+)?json(;.+)?/i.test(String(contentType))) { + return (0, bytesEncoding_js_1.stringToUint8Array)(JSON.stringify(body), "utf-8"); } - return tempArray; + throw new restError_js_1.RestError(`Unsupported body/content-type combination: ${body}, ${contentType}`); } - function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { - if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); - } - const valueType = mapper.type.value; - if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the mapper and it must of type "object" in ${objectName}.`); - } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); - tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + function buildBodyPart(descriptor) { + const contentType = getPartContentType(descriptor); + const contentDisposition = getContentDisposition(descriptor); + const headers = (0, httpHeaders_js_1.createHttpHeaders)(descriptor.headers ?? {}); + if (contentType) { + headers.set("content-type", contentType); } - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; - return result; + if (contentDisposition) { + headers.set("content-disposition", contentDisposition); } - return tempDictionary; + const body = normalizeBody(descriptor.body, contentType); + return { + headers, + body + }; } - function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; - if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - return modelMapper?.type.additionalProperties; - } - return additionalProperties; + function buildMultipartBody(parts) { + return { parts: parts.map(buildBodyPart) }; } - function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; - if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, void 0, 2)}".`); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/sendRequest.js +var require_sendRequest = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/sendRequest.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.sendRequest = sendRequest; + var restError_js_1 = require_restError(); + var httpHeaders_js_1 = require_httpHeaders(); + var pipelineRequest_js_1 = require_pipelineRequest(); + var clientHelpers_js_1 = require_clientHelpers(); + var typeGuards_js_1 = require_typeGuards(); + var multipart_js_1 = require_multipart(); + async function sendRequest(method, url2, pipeline, options = {}, customHttpClient) { + const httpClient = customHttpClient ?? (0, clientHelpers_js_1.getCachedDefaultHttpsClient)(); + const request2 = buildPipelineRequest(method, url2, options); + try { + const response = await pipeline.sendRequest(httpClient, request2); + const headers = response.headers.toJSON(); + const stream2 = response.readableStreamBody ?? response.browserStreamBody; + const parsedBody = options.responseAsStream || stream2 !== void 0 ? void 0 : getResponseBody(response); + const body = stream2 ?? parsedBody; + if (options?.onResponse) { + options.onResponse({ ...response, request: request2, rawHeaders: headers, parsedBody }); + } + return { + request: request2, + headers, + status: `${response.status}`, + body + }; + } catch (e) { + if ((0, restError_js_1.isRestError)(e) && e.response && options.onResponse) { + const { response } = e; + const rawHeaders = response.headers.toJSON(); + options?.onResponse({ ...response, request: request2, rawHeaders }, e); + } + throw e; } - return serializer.modelMappers[className]; } - function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; - if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); - } - modelProps = modelMapper?.type.modelProperties; - if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + function getRequestContentType(options = {}) { + return options.contentType ?? options.headers?.["content-type"] ?? getContentType(options.body); + } + function getContentType(body) { + if (ArrayBuffer.isView(body)) { + return "application/octet-stream"; + } + if (typeof body === "string") { + try { + JSON.parse(body); + return "application/json"; + } catch (error3) { + return void 0; } } - return modelProps; + return "application/json"; } - function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + function buildPipelineRequest(method, url2, options = {}) { + const requestContentType = getRequestContentType(options); + const { body, multipartBody } = getRequestBody(options.body, requestContentType); + const hasContent = body !== void 0 || multipartBody !== void 0; + const headers = (0, httpHeaders_js_1.createHttpHeaders)({ + ...options.headers ? options.headers : {}, + accept: options.accept ?? options.headers?.accept ?? "application/json", + ...hasContent && requestContentType && { + "content-type": requestContentType + } + }); + return (0, pipelineRequest_js_1.createPipelineRequest)({ + url: url2, + method, + body, + multipartBody, + headers, + allowInsecureConnection: options.allowInsecureConnection, + abortSignal: options.abortSignal, + onUploadProgress: options.onUploadProgress, + onDownloadProgress: options.onDownloadProgress, + timeout: options.timeout, + enableBrowserStreams: true, + streamResponseStatusCodes: options.responseAsStream ? /* @__PURE__ */ new Set([Number.POSITIVE_INFINITY]) : void 0 + }); + } + function getRequestBody(body, contentType = "") { + if (body === void 0) { + return { body: void 0 }; } - if (object !== void 0 && object !== null) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - if (propertyMapper.readOnly) { - continue; - } - let propName; - let parentObject = payload; - if (serializer.isXML) { - if (propertyMapper.xmlIsWrapped) { - propName = propertyMapper.xmlName; - } else { - propName = propertyMapper.xmlElementName || propertyMapper.xmlName; - } - } else { - const paths = splitSerializeName(propertyMapper.serializedName); - propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; - if ((childObject === void 0 || childObject === null) && (object[key] !== void 0 && object[key] !== null || propertyMapper.defaultValue !== void 0)) { - parentObject[pathName] = {}; - } - parentObject = parentObject[pathName]; - } - } - if (parentObject !== void 0 && parentObject !== null) { - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - parentObject[interfaces_js_1.XML_ATTRKEY] = { - ...parentObject[interfaces_js_1.XML_ATTRKEY], - [xmlnsKey]: mapper.xmlNamespace - }; - } - const propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator && polymorphicDiscriminator.clientName === key && (toSerialize === void 0 || toSerialize === null)) { - toSerialize = mapper.serializedName; - } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); - if (serializedValue !== void 0 && propName !== void 0 && propName !== null) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); - if (isXml && propertyMapper.xmlIsAttribute) { - parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {}; - parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue; - } else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; - } else { - parentObject[propName] = value; - } - } + if (typeof FormData !== "undefined" && body instanceof FormData) { + return { body }; + } + if ((0, typeGuards_js_1.isReadableStream)(body)) { + return { body }; + } + if (ArrayBuffer.isView(body)) { + return { body: body instanceof Uint8Array ? body : JSON.stringify(body) }; + } + const firstType = contentType.split(";")[0]; + switch (firstType) { + case "application/json": + return { body: JSON.stringify(body) }; + case "multipart/form-data": + if (Array.isArray(body)) { + return { multipartBody: (0, multipart_js_1.buildMultipartBody)(body) }; } - } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); - if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); - if (isAdditionalProperty) { - payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); - } + return { body: JSON.stringify(body) }; + case "text/plain": + return { body: String(body) }; + default: + if (typeof body === "string") { + return { body }; } - } - return payload; + return { body: JSON.stringify(body) }; } - return object; } - function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { - if (!isXml || !propertyMapper.xmlNamespace) { - return serializedValue; + function getResponseBody(response) { + const contentType = response.headers.get("content-type") ?? ""; + const firstType = contentType.split(";")[0]; + const bodyToParse = response.bodyAsText ?? ""; + if (firstType === "text/plain") { + return String(bodyToParse); } - const xmlnsKey = propertyMapper.xmlNamespacePrefix ? `xmlns:${propertyMapper.xmlNamespacePrefix}` : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; - if (["Composite"].includes(propertyMapper.type.name)) { - if (serializedValue[interfaces_js_1.XML_ATTRKEY]) { - return serializedValue; - } else { - const result2 = { ...serializedValue }; - result2[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; - return result2; + try { + return bodyToParse ? JSON.parse(bodyToParse) : void 0; + } catch (error3) { + if (firstType === "application/json") { + throw createParseError(response, error3); } + return String(bodyToParse); } - const result = {}; - result[options.xml.xmlCharKey] = serializedValue; - result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; - return result; } - function isSpecialXmlProperty(propertyName, options) { - return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); + function createParseError(response, err) { + const msg = `Error "${err}" occurred while parsing the response body - ${response.bodyAsText}.`; + const errCode = err.code ?? restError_js_1.RestError.PARSE_ERROR; + return new restError_js_1.RestError(msg, { + code: errCode, + statusCode: response.status, + request: response.request, + response + }); } - function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { - const xmlCharKey = options.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY; - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/urlHelpers.js +var require_urlHelpers = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/urlHelpers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.buildRequestUrl = buildRequestUrl; + exports2.buildBaseUrl = buildBaseUrl; + exports2.replaceAll = replaceAll; + function isQueryParameterWithOptions(x) { + const value = x.value; + return value !== void 0 && value.toString !== void 0 && typeof value.toString === "function"; + } + function buildRequestUrl(endpoint2, routePath, pathParameters, options = {}) { + if (routePath.startsWith("https://") || routePath.startsWith("http://")) { + return routePath; } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); - handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; - if (serializedName !== "" && serializedName !== void 0) { - propertyObjectName = objectName + "." + serializedName; + endpoint2 = buildBaseUrl(endpoint2, options); + routePath = buildRoutePath(routePath, pathParameters, options); + const requestUrl = appendQueryParams(`${endpoint2}/${routePath}`, options); + const url2 = new URL(requestUrl); + return url2.toString().replace(/([^:]\/)\/+/g, "$1"); + } + function getQueryParamValue(key, allowReserved, style, param) { + let separator; + if (style === "pipeDelimited") { + separator = "|"; + } else if (style === "spaceDelimited") { + separator = "%20"; + } else { + separator = ","; + } + let paramValues; + if (Array.isArray(param)) { + paramValues = param; + } else if (typeof param === "object" && param.toString === Object.prototype.toString) { + paramValues = Object.entries(param).flat(); + } else { + paramValues = [param]; + } + const value = paramValues.map((p) => { + if (p === null || p === void 0) { + return ""; } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; - if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { - if (headerKey.startsWith(headerCollectionPrefix)) { - dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + if (!p.toString || typeof p.toString !== "function") { + throw new Error(`Query parameters must be able to be represented as string, ${key} can't`); + } + const rawValue = p.toISOString !== void 0 ? p.toISOString() : p.toString(); + return allowReserved ? rawValue : encodeURIComponent(rawValue); + }).join(separator); + return `${allowReserved ? key : encodeURIComponent(key)}=${value}`; + } + function appendQueryParams(url2, options = {}) { + if (!options.queryParameters) { + return url2; + } + const parsedUrl = new URL(url2); + const queryParams = options.queryParameters; + const paramStrings = []; + for (const key of Object.keys(queryParams)) { + const param = queryParams[key]; + if (param === void 0 || param === null) { + continue; + } + const hasMetadata = isQueryParameterWithOptions(param); + const rawValue = hasMetadata ? param.value : param; + const explode = hasMetadata ? param.explode ?? false : false; + const style = hasMetadata && param.style ? param.style : "form"; + if (explode) { + if (Array.isArray(rawValue)) { + for (const item of rawValue) { + paramStrings.push(getQueryParamValue(key, options.skipUrlEncoding ?? false, style, item)); } - handledPropertyNames.push(headerKey); - } - instance[key] = dictionary; - } else if (serializer.isXML) { - if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) { - instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options); - } else if (propertyMapper.xmlIsMsText) { - if (responseBody[xmlCharKey] !== void 0) { - instance[key] = responseBody[xmlCharKey]; - } else if (typeof responseBody === "string") { - instance[key] = responseBody; + } else if (typeof rawValue === "object") { + for (const [actualKey, value] of Object.entries(rawValue)) { + paramStrings.push(getQueryParamValue(actualKey, options.skipUrlEncoding ?? false, style, value)); } } else { - const propertyName = xmlElementName || xmlName || serializedName; - if (propertyMapper.xmlIsWrapped) { - const wrapped = responseBody[xmlName]; - const elementList = wrapped?.[xmlElementName] ?? []; - instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); - handledPropertyNames.push(xmlName); - } else { - const property = responseBody[propertyName]; - instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); - handledPropertyNames.push(propertyName); - } + throw new Error("explode can only be set to true for objects and arrays"); } } else { - let propertyInstance; - let res = responseBody; - let steps = 0; - for (const item of paths) { - if (!res) - break; - steps++; - res = res[item]; - } - if (res === null && steps < paths.length) { - res = void 0; - } - propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; - if (polymorphicDiscriminator && key === polymorphicDiscriminator.clientName && (propertyInstance === void 0 || propertyInstance === null)) { - propertyInstance = mapper.serializedName; - } - let serializedValue; - if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { - propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - for (const [k, v] of Object.entries(instance)) { - if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { - arrayInstance[k] = v; - } - } - instance = arrayInstance; - } else if (propertyInstance !== void 0 || propertyMapper.defaultValue !== void 0) { - serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - instance[key] = serializedValue; - } - } - } - const additionalPropertiesMapper = mapper.type.additionalProperties; - if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); - if (paths[0] === responsePropName) { - return false; - } - } - return true; - }; - for (const responsePropName in responseBody) { - if (isAdditionalProperty(responsePropName)) { - instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); - } - } - } else if (responseBody && !options.ignoreUnknownProperties) { - for (const key of Object.keys(responseBody)) { - if (instance[key] === void 0 && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key, options)) { - instance[key] = responseBody[key]; - } + paramStrings.push(getQueryParamValue(key, options.skipUrlEncoding ?? false, style, rawValue)); } } - return instance; - } - function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; - if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { - tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); - } - return tempDictionary; + if (parsedUrl.search !== "") { + parsedUrl.search += "&"; } - return responseBody; + parsedUrl.search += paramStrings.join("&"); + return parsedUrl.toString(); } - function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - let element = mapper.type.element; - if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the mapper and it must of type "object" in ${objectName}`); + function buildBaseUrl(endpoint2, options) { + if (!options.pathParameters) { + return endpoint2; } - if (responseBody) { - if (!Array.isArray(responseBody)) { - responseBody = [responseBody]; + const pathParams = options.pathParameters; + for (const [key, param] of Object.entries(pathParams)) { + if (param === void 0 || param === null) { + throw new Error(`Path parameters ${key} must not be undefined or null`); } - if (element.type.name === "Composite" && element.type.className) { - element = serializer.modelMappers[element.type.className] ?? element; + if (!param.toString || typeof param.toString !== "function") { + throw new Error(`Path parameters must be able to be represented as string, ${key} can't`); } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + let value = param.toISOString !== void 0 ? param.toISOString() : String(param); + if (!options.skipUrlEncoding) { + value = encodeURIComponent(param); } - return tempArray; + endpoint2 = replaceAll(endpoint2, `{${key}}`, value) ?? ""; } - return responseBody; - } - function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { - const typeNamesToCheck = [typeName]; - while (typeNamesToCheck.length) { - const currentName = typeNamesToCheck.shift(); - const indexDiscriminator = discriminatorValue === currentName ? discriminatorValue : currentName + "." + discriminatorValue; - if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { - return discriminators[indexDiscriminator]; - } else { - for (const [name, mapper] of Object.entries(discriminators)) { - if (name.startsWith(currentName + ".") && mapper.type.uberParent === currentName && mapper.type.className) { - typeNamesToCheck.push(mapper.type.className); - } - } - } - } - return void 0; + return endpoint2; } - function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator) { - let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; - if (discriminatorName) { - if (polymorphicPropertyName === "serializedName") { - discriminatorName = discriminatorName.replace(/\\/gi, ""); - } - const discriminatorValue = object[discriminatorName]; - const typeName = mapper.type.uberParent ?? mapper.type.className; - if (typeof discriminatorValue === "string" && typeName) { - const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); - if (polymorphicMapper) { - mapper = polymorphicMapper; - } - } + function buildRoutePath(routePath, pathParameters, options = {}) { + for (const pathParam of pathParameters) { + const allowReserved = typeof pathParam === "object" && (pathParam.allowReserved ?? false); + let value = typeof pathParam === "object" ? pathParam.value : pathParam; + if (!options.skipUrlEncoding && !allowReserved) { + value = encodeURIComponent(value); } + routePath = routePath.replace(/\{[\w-]+\}/, String(value)); } - return mapper; - } - function getPolymorphicDiscriminatorRecursively(serializer, mapper) { - return mapper.type.polymorphicDiscriminator || getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || getPolymorphicDiscriminatorSafely(serializer, mapper.type.className); + return routePath; } - function getPolymorphicDiscriminatorSafely(serializer, typeName) { - return typeName && serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator; + function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); } - exports2.MapperTypeNames = { - Base64Url: "Base64Url", - Boolean: "Boolean", - ByteArray: "ByteArray", - Composite: "Composite", - Date: "Date", - DateTime: "DateTime", - DateTimeRfc1123: "DateTimeRfc1123", - Dictionary: "Dictionary", - Enum: "Enum", - Number: "Number", - Object: "Object", - Sequence: "Sequence", - String: "String", - Stream: "Stream", - TimeSpan: "TimeSpan", - UnixTime: "UnixTime" - }; - } -}); - -// node_modules/@azure/core-client/dist/commonjs/state.js -var require_state2 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/state.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.state = void 0; - exports2.state = { - operationRequestMap: /* @__PURE__ */ new WeakMap() - }; } }); -// node_modules/@azure/core-client/dist/commonjs/operationHelpers.js -var require_operationHelpers = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/operationHelpers.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/getClient.js +var require_getClient = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/getClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; - exports2.getOperationRequestInfo = getOperationRequestInfo; - var state_js_1 = require_state2(); - function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { - let parameterPath = parameter.parameterPath; - const parameterMapper = parameter.mapper; - let value; - if (typeof parameterPath === "string") { - parameterPath = [parameterPath]; + exports2.getClient = getClient; + var clientHelpers_js_1 = require_clientHelpers(); + var sendRequest_js_1 = require_sendRequest(); + var urlHelpers_js_1 = require_urlHelpers(); + var checkEnvironment_js_1 = require_checkEnvironment(); + function getClient(endpoint2, clientOptions = {}) { + const pipeline = clientOptions.pipeline ?? (0, clientHelpers_js_1.createDefaultPipeline)(clientOptions); + if (clientOptions.additionalPolicies?.length) { + for (const { policy, position } of clientOptions.additionalPolicies) { + const afterPhase = position === "perRetry" ? "Sign" : void 0; + pipeline.addPolicy(policy, { + afterPhase + }); + } } - if (Array.isArray(parameterPath)) { - if (parameterPath.length > 0) { - if (parameterMapper.isConstant) { - value = parameterMapper.defaultValue; + const { allowInsecureConnection, httpClient } = clientOptions; + const endpointUrl = clientOptions.endpoint ?? endpoint2; + const client = (path13, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path13, args, { allowInsecureConnection, ...requestOptions }); + return { + get: (requestOptions = {}) => { + return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + post: (requestOptions = {}) => { + return buildOperation("POST", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + put: (requestOptions = {}) => { + return buildOperation("PUT", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + patch: (requestOptions = {}) => { + return buildOperation("PATCH", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + delete: (requestOptions = {}) => { + return buildOperation("DELETE", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + head: (requestOptions = {}) => { + return buildOperation("HEAD", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + options: (requestOptions = {}) => { + return buildOperation("OPTIONS", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + }, + trace: (requestOptions = {}) => { + return buildOperation("TRACE", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); + } + }; + }; + return { + path: client, + pathUnchecked: client, + pipeline + }; + } + function buildOperation(method, url2, pipeline, options, allowInsecureConnection, httpClient) { + allowInsecureConnection = options.allowInsecureConnection ?? allowInsecureConnection; + return { + then: function(onFulfilled, onrejected) { + return (0, sendRequest_js_1.sendRequest)(method, url2, pipeline, { ...options, allowInsecureConnection }, httpClient).then(onFulfilled, onrejected); + }, + async asBrowserStream() { + if (checkEnvironment_js_1.isNodeLike) { + throw new Error("`asBrowserStream` is supported only in the browser environment. Use `asNodeStream` instead to obtain the response body stream. If you require a Web stream of the response in Node, consider using `Readable.toWeb` on the result of `asNodeStream`."); } else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); - if (!propertySearchResult.propertyFound && fallbackObject) { - propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); - } - let useDefaultValue = false; - if (!propertySearchResult.propertyFound) { - useDefaultValue = parameterMapper.required || parameterPath[0] === "options" && parameterPath.length === 2; - } - value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + return (0, sendRequest_js_1.sendRequest)(method, url2, pipeline, { ...options, allowInsecureConnection, responseAsStream: true }, httpClient); } - } - } else { - if (parameterMapper.required) { - value = {}; - } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { - parameterPath: propertyPath, - mapper: propertyMapper - }, fallbackObject); - if (propertyValue !== void 0) { - if (!value) { - value = {}; - } - value[propertyName] = propertyValue; + }, + async asNodeStream() { + if (checkEnvironment_js_1.isNodeLike) { + return (0, sendRequest_js_1.sendRequest)(method, url2, pipeline, { ...options, allowInsecureConnection, responseAsStream: true }, httpClient); + } else { + throw new Error("`isNodeStream` is not supported in the browser environment. Use `asBrowserStream` to obtain the response body stream."); } } - } - return value; - } - function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; - for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; - if (parent && parameterPathPart in parent) { - parent = parent[parameterPathPart]; - } else { - break; - } - } - if (i === parameterPath.length) { - result.propertyValue = parent; - result.propertyFound = true; - } - return result; - } - var originalRequestSymbol = /* @__PURE__ */ Symbol.for("@azure/core-client original request"); - function hasOriginalRequest(request2) { - return originalRequestSymbol in request2; - } - function getOperationRequestInfo(request2) { - if (hasOriginalRequest(request2)) { - return getOperationRequestInfo(request2[originalRequestSymbol]); - } - let info6 = state_js_1.state.operationRequestMap.get(request2); - if (!info6) { - info6 = {}; - state_js_1.state.operationRequestMap.set(request2, info6); - } - return info6; + }; } } }); -// node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js -var require_deserializationPolicy = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/operationOptionHelpers.js +var require_operationOptionHelpers = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/operationOptionHelpers.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.deserializationPolicyName = void 0; - exports2.deserializationPolicy = deserializationPolicy; - var interfaces_js_1 = require_interfaces(); - var core_rest_pipeline_1 = require_commonjs6(); - var serializer_js_1 = require_serializer(); - var operationHelpers_js_1 = require_operationHelpers(); - var defaultJsonContentTypes = ["application/json", "text/json"]; - var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; - exports2.deserializationPolicyName = "deserializationPolicy"; - function deserializationPolicy(options = {}) { - const jsonContentTypes = options.expectedContentTypes?.json ?? defaultJsonContentTypes; - const xmlContentTypes = options.expectedContentTypes?.xml ?? defaultXmlContentTypes; - const parseXML = options.parseXML; - const serializerOptions = options.serializerOptions; - const updatedOptions = { - xml: { - rootName: serializerOptions?.xml.rootName ?? "", - includeRoot: serializerOptions?.xml.includeRoot ?? false, - xmlCharKey: serializerOptions?.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY - } - }; + exports2.operationOptionsToRequestParameters = operationOptionsToRequestParameters; + function operationOptionsToRequestParameters(options) { return { - name: exports2.deserializationPolicyName, - async sendRequest(request2, next) { - const response = await next(request2); - return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); - } + allowInsecureConnection: options.requestOptions?.allowInsecureConnection, + timeout: options.requestOptions?.timeout, + skipUrlEncoding: options.requestOptions?.skipUrlEncoding, + abortSignal: options.abortSignal, + onUploadProgress: options.requestOptions?.onUploadProgress, + onDownloadProgress: options.requestOptions?.onDownloadProgress, + headers: { ...options.requestOptions?.headers }, + onResponse: options.onResponse }; } - function getOperationResponseMap(parsedResponse) { - let result; - const request2 = parsedResponse.request; - const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); - const operationSpec = operationInfo?.operationSpec; - if (operationSpec) { - if (!operationInfo?.operationResponseGetter) { - result = operationSpec.responses[parsedResponse.status]; - } else { - result = operationInfo?.operationResponseGetter(operationSpec, parsedResponse); - } - } - return result; - } - function shouldDeserializeResponse(parsedResponse) { - const request2 = parsedResponse.request; - const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); - const shouldDeserialize = operationInfo?.shouldDeserialize; - let result; - if (shouldDeserialize === void 0) { - result = true; - } else if (typeof shouldDeserialize === "boolean") { - result = shouldDeserialize; - } else { - result = shouldDeserialize(parsedResponse); - } - return result; - } - async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { - const parsedResponse = await parse2(jsonContentTypes, xmlContentTypes, response, options, parseXML); - if (!shouldDeserializeResponse(parsedResponse)) { - return parsedResponse; - } - const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request); - const operationSpec = operationInfo?.operationSpec; - if (!operationSpec || !operationSpec.responses) { - return parsedResponse; - } - const responseSpec = getOperationResponseMap(parsedResponse); - const { error: error3, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); - if (error3) { - throw error3; - } else if (shouldReturnResponse) { - return parsedResponse; - } - if (responseSpec) { - if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; - if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { - valueToDeserialize = typeof valueToDeserialize === "object" ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] : []; - } - try { - parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); - } catch (deserializeError) { - const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { - statusCode: parsedResponse.status, - request: parsedResponse.request, - response: parsedResponse - }); - throw restError; - } - } else if (operationSpec.httpMethod === "HEAD") { - parsedResponse.parsedBody = response.status >= 200 && response.status < 300; - } - if (responseSpec.headersMapper) { - parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); - } - } - return parsedResponse; - } - function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); - return expectedStatusCodes.length === 0 || expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"; - } - function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; - if (isExpectedStatusCode) { - if (responseSpec) { - if (!responseSpec.isError) { - return { error: null, shouldReturnResponse: false }; - } - } else { - return { error: null, shouldReturnResponse: false }; - } - } - const errorResponseSpec = responseSpec ?? operationSpec.responses.default; - const initialErrorMessage = parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ? `Unexpected status code: ${parsedResponse.status}` : parsedResponse.bodyAsText; - const error3 = new core_rest_pipeline_1.RestError(initialErrorMessage, { - statusCode: parsedResponse.status, - request: parsedResponse.request, - response: parsedResponse - }); - if (!errorResponseSpec && !(parsedResponse.parsedBody?.error?.code && parsedResponse.parsedBody?.error?.message)) { - throw error3; - } - const defaultBodyMapper = errorResponseSpec?.bodyMapper; - const defaultHeadersMapper = errorResponseSpec?.headersMapper; - try { - if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let deserializedError; - if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; - if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { - valueToDeserialize = []; - const elementName = defaultBodyMapper.xmlElementName; - if (typeof parsedBody === "object" && elementName) { - valueToDeserialize = parsedBody[elementName]; - } - } - deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); - } - const internalError = parsedBody.error || deserializedError || parsedBody; - error3.code = internalError.code; - if (internalError.message) { - error3.message = internalError.message; - } - if (defaultBodyMapper) { - error3.response.parsedBody = deserializedError; - } - } - if (parsedResponse.headers && defaultHeadersMapper) { - error3.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); - } - } catch (defaultError) { - error3.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; - } - return { error: error3, shouldReturnResponse: false }; - } - async function parse2(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { - if (!operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType ? [] : contentType.split(";").map((component) => component.toLowerCase()); - try { - if (contentComponents.length === 0 || contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - operationResponse.parsedBody = JSON.parse(text); - return operationResponse; - } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - if (!parseXML) { - throw new Error("Parsing XML not supported."); - } - const body = await parseXML(text, opts.xml); - operationResponse.parsedBody = body; - return operationResponse; - } - } catch (err) { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR; - const e = new core_rest_pipeline_1.RestError(msg, { - code: errCode, - statusCode: operationResponse.status, - request: operationResponse.request, - response: operationResponse - }); - throw e; - } - } - return operationResponse; - } } }); -// node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js -var require_interfaceHelpers = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/client/restError.js +var require_restError2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/client/restError.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; - exports2.getPathStringFromParameter = getPathStringFromParameter; - var serializer_js_1 = require_serializer(); - function getStreamingResponseStatusCodes(operationSpec) { - const result = /* @__PURE__ */ new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) { - result.add(Number(statusCode)); - } - } - return result; + exports2.createRestError = createRestError; + var restError_js_1 = require_restError(); + var httpHeaders_js_1 = require_httpHeaders(); + function createRestError(messageOrResponse, response) { + const resp = typeof messageOrResponse === "string" ? response : messageOrResponse; + const internalError = resp.body?.error ?? resp.body; + const message = typeof messageOrResponse === "string" ? messageOrResponse : internalError?.message ?? `Unexpected status code: ${resp.status}`; + return new restError_js_1.RestError(message, { + statusCode: statusCodeToNumber(resp.status), + code: internalError?.code, + request: resp.request, + response: toPipelineResponse(resp) + }); } - function getPathStringFromParameter(parameter) { - const { parameterPath, mapper } = parameter; - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } else { - result = mapper.serializedName; - } - return result; + function toPipelineResponse(response) { + return { + headers: (0, httpHeaders_js_1.createHttpHeaders)(response.headers), + request: response.request, + status: statusCodeToNumber(response.status) ?? -1 + }; + } + function statusCodeToNumber(statusCode) { + const status = Number.parseInt(statusCode); + return Number.isNaN(status) ? void 0 : status; } } }); -// node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js -var require_serializationPolicy = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/index.js +var require_commonjs = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.serializationPolicyName = void 0; - exports2.serializationPolicy = serializationPolicy; - exports2.serializeHeaders = serializeHeaders; - exports2.serializeRequestBody = serializeRequestBody; - var interfaces_js_1 = require_interfaces(); - var operationHelpers_js_1 = require_operationHelpers(); - var serializer_js_1 = require_serializer(); - var interfaceHelpers_js_1 = require_interfaceHelpers(); - exports2.serializationPolicyName = "serializationPolicy"; - function serializationPolicy(options = {}) { - const stringifyXML = options.stringifyXML; - return { - name: exports2.serializationPolicyName, - async sendRequest(request2, next) { - const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); - const operationSpec = operationInfo?.operationSpec; - const operationArguments = operationInfo?.operationArguments; - if (operationSpec && operationArguments) { - serializeHeaders(request2, operationArguments, operationSpec); - serializeRequestBody(request2, operationArguments, operationSpec, stringifyXML); - } - return next(request2); - } - }; - } - function serializeHeaders(request2, operationArguments, operationSpec) { - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); - if (headerValue !== null && headerValue !== void 0 || headerParameter.mapper.required) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter)); - const headerCollectionPrefix = headerParameter.mapper.headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - request2.headers.set(headerCollectionPrefix + key, headerValue[key]); - } - } else { - request2.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue); - } - } - } - } - const customHeaders = operationArguments.options?.requestOptions?.customHeaders; - if (customHeaders) { - for (const customHeaderName of Object.keys(customHeaders)) { - request2.headers.set(customHeaderName, customHeaders[customHeaderName]); - } - } - } - function serializeRequestBody(request2, operationArguments, operationSpec, stringifyXML = function() { - throw new Error("XML serialization unsupported!"); - }) { - const serializerOptions = operationArguments.options?.serializerOptions; - const updatedOptions = { - xml: { - rootName: serializerOptions?.xml.rootName ?? "", - includeRoot: serializerOptions?.xml.includeRoot ?? false, - xmlCharKey: serializerOptions?.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY - } - }; - const xmlCharKey = updatedOptions.xml.xmlCharKey; - if (operationSpec.requestBody && operationSpec.requestBody.mapper) { - request2.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable } = bodyMapper; - const typeName = bodyMapper.type.name; - try { - if (request2.body !== void 0 && request2.body !== null || nullable && request2.body === null || required) { - const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody); - request2.body = operationSpec.serializer.serialize(bodyMapper, request2.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === serializer_js_1.MapperTypeNames.Stream; - if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request2.body, updatedOptions); - if (typeName === serializer_js_1.MapperTypeNames.Sequence) { - request2.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); - } else if (!isStream) { - request2.body = stringifyXML(value, { - rootName: xmlName || serializedName, - xmlCharKey - }); - } - } else if (typeName === serializer_js_1.MapperTypeNames.String && (operationSpec.contentType?.match("text/plain") || operationSpec.mediaType === "text")) { - return; - } else if (!isStream) { - request2.body = JSON.stringify(request2.body); - } - } - } catch (error3) { - throw new Error(`Error "${error3.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, void 0, " ")}.`); - } - } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { - request2.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); - if (formDataParameterValue !== void 0 && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter); - request2.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions); - } - } - } - } - function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { - if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; - result[options.xml.xmlCharKey] = serializedValue; - result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; - return result; - } - return serializedValue; - } - function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { - if (!Array.isArray(obj)) { - obj = [obj]; - } - if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; - } - const result = { [elementName]: obj }; - result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; - return result; - } + exports2.createRestError = exports2.operationOptionsToRequestParameters = exports2.getClient = exports2.createDefaultHttpClient = exports2.uint8ArrayToString = exports2.stringToUint8Array = exports2.isRestError = exports2.RestError = exports2.createEmptyPipeline = exports2.createPipelineRequest = exports2.createHttpHeaders = exports2.TypeSpecRuntimeLogger = exports2.setLogLevel = exports2.getLogLevel = exports2.createClientLogger = exports2.AbortError = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var AbortError_js_1 = require_AbortError(); + Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { + return AbortError_js_1.AbortError; + } }); + var logger_js_1 = require_logger(); + Object.defineProperty(exports2, "createClientLogger", { enumerable: true, get: function() { + return logger_js_1.createClientLogger; + } }); + Object.defineProperty(exports2, "getLogLevel", { enumerable: true, get: function() { + return logger_js_1.getLogLevel; + } }); + Object.defineProperty(exports2, "setLogLevel", { enumerable: true, get: function() { + return logger_js_1.setLogLevel; + } }); + Object.defineProperty(exports2, "TypeSpecRuntimeLogger", { enumerable: true, get: function() { + return logger_js_1.TypeSpecRuntimeLogger; + } }); + var httpHeaders_js_1 = require_httpHeaders(); + Object.defineProperty(exports2, "createHttpHeaders", { enumerable: true, get: function() { + return httpHeaders_js_1.createHttpHeaders; + } }); + tslib_1.__exportStar(require_schemes(), exports2); + tslib_1.__exportStar(require_oauth2Flows(), exports2); + var pipelineRequest_js_1 = require_pipelineRequest(); + Object.defineProperty(exports2, "createPipelineRequest", { enumerable: true, get: function() { + return pipelineRequest_js_1.createPipelineRequest; + } }); + var pipeline_js_1 = require_pipeline(); + Object.defineProperty(exports2, "createEmptyPipeline", { enumerable: true, get: function() { + return pipeline_js_1.createEmptyPipeline; + } }); + var restError_js_1 = require_restError(); + Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { + return restError_js_1.RestError; + } }); + Object.defineProperty(exports2, "isRestError", { enumerable: true, get: function() { + return restError_js_1.isRestError; + } }); + var bytesEncoding_js_1 = require_bytesEncoding(); + Object.defineProperty(exports2, "stringToUint8Array", { enumerable: true, get: function() { + return bytesEncoding_js_1.stringToUint8Array; + } }); + Object.defineProperty(exports2, "uint8ArrayToString", { enumerable: true, get: function() { + return bytesEncoding_js_1.uint8ArrayToString; + } }); + var defaultHttpClient_js_1 = require_defaultHttpClient(); + Object.defineProperty(exports2, "createDefaultHttpClient", { enumerable: true, get: function() { + return defaultHttpClient_js_1.createDefaultHttpClient; + } }); + var getClient_js_1 = require_getClient(); + Object.defineProperty(exports2, "getClient", { enumerable: true, get: function() { + return getClient_js_1.getClient; + } }); + var operationOptionHelpers_js_1 = require_operationOptionHelpers(); + Object.defineProperty(exports2, "operationOptionsToRequestParameters", { enumerable: true, get: function() { + return operationOptionHelpers_js_1.operationOptionsToRequestParameters; + } }); + var restError_js_2 = require_restError2(); + Object.defineProperty(exports2, "createRestError", { enumerable: true, get: function() { + return restError_js_2.createRestError; + } }); } }); -// node_modules/@azure/core-client/dist/commonjs/pipeline.js -var require_pipeline3 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/pipeline.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js +var require_pipeline2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createClientPipeline = createClientPipeline; - var deserializationPolicy_js_1 = require_deserializationPolicy(); - var core_rest_pipeline_1 = require_commonjs6(); - var serializationPolicy_js_1 = require_serializationPolicy(); - function createClientPipeline(options = {}) { - const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options ?? {}); - if (options.credentialOptions) { - pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ - credential: options.credentialOptions.credential, - scopes: options.credentialOptions.credentialScopes - })); - } - pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); - pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), { - phase: "Deserialize" - }); - return pipeline; + exports2.createEmptyPipeline = createEmptyPipeline; + var ts_http_runtime_1 = require_commonjs(); + function createEmptyPipeline() { + return (0, ts_http_runtime_1.createEmptyPipeline)(); } } }); -// node_modules/@azure/core-client/dist/commonjs/httpClientCache.js -var require_httpClientCache = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/httpClientCache.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/internal.js +var require_internal = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/internal.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getCachedDefaultHttpClient = getCachedDefaultHttpClient; - var core_rest_pipeline_1 = require_commonjs6(); - var cachedHttpClient; - function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); - } - return cachedHttpClient; - } + exports2.createLoggerContext = void 0; + var logger_js_1 = require_logger(); + Object.defineProperty(exports2, "createLoggerContext", { enumerable: true, get: function() { + return logger_js_1.createLoggerContext; + } }); } }); -// node_modules/@azure/core-client/dist/commonjs/urlHelpers.js -var require_urlHelpers2 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/urlHelpers.js"(exports2) { +// node_modules/@azure/logger/dist/commonjs/index.js +var require_commonjs2 = __commonJS({ + "node_modules/@azure/logger/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getRequestUrl = getRequestUrl; - exports2.appendQueryParams = appendQueryParams; - var operationHelpers_js_1 = require_operationHelpers(); - var interfaceHelpers_js_1 = require_interfaceHelpers(); - var CollectionFormatToDelimiterMap = { - CSV: ",", - SSV: " ", - Multi: "Multi", - TSV: " ", - Pipes: "|" - }; - function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { - const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); - let isAbsolutePath = false; - let requestUrl = replaceAll(baseUri, urlReplacements); - if (operationSpec.path) { - let path13 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path13.startsWith("/")) { - path13 = path13.substring(1); - } - if (isAbsoluteUrl(path13)) { - requestUrl = path13; - isAbsolutePath = true; - } else { - requestUrl = appendPath(requestUrl, path13); - } - } - const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); - requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); - return requestUrl; - } - function replaceAll(input, replacements) { - let result = input; - for (const [searchValue, replaceValue] of replacements) { - result = result.split(searchValue).join(replaceValue); - } - return result; + exports2.AzureLogger = void 0; + exports2.setLogLevel = setLogLevel; + exports2.getLogLevel = getLogLevel; + exports2.createClientLogger = createClientLogger; + var logger_1 = require_internal(); + var context2 = (0, logger_1.createLoggerContext)({ + logLevelEnvVarName: "AZURE_LOG_LEVEL", + namespace: "azure" + }); + exports2.AzureLogger = context2.logger; + function setLogLevel(level) { + context2.setLogLevel(level); } - function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { - const result = /* @__PURE__ */ new Map(); - if (operationSpec.urlParameters?.length) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); - const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); - } - } - return result; + function getLogLevel() { + return context2.getLogLevel(); } - function isAbsoluteUrl(url2) { - return url2.includes("://"); + function createClientLogger(namespace) { + return context2.createClientLogger(namespace); } - function appendPath(url2, pathToAppend) { - if (!pathToAppend) { - return url2; - } - const parsedUrl = new URL(url2); - let newPath = parsedUrl.pathname; - if (!newPath.endsWith("/")) { - newPath = `${newPath}/`; - } - if (pathToAppend.startsWith("/")) { - pathToAppend = pathToAppend.substring(1); - } - const searchStart = pathToAppend.indexOf("?"); - if (searchStart !== -1) { - const path13 = pathToAppend.substring(0, searchStart); - const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path13; - if (search) { - parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; - } - } else { - newPath = newPath + pathToAppend; - } - parsedUrl.pathname = newPath; - return parsedUrl.toString(); + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js +var require_log3 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("core-rest-pipeline"); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/exponentialRetryPolicy.js +var require_exponentialRetryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/exponentialRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.exponentialRetryPolicyName = void 0; + exports2.exponentialRetryPolicy = exponentialRetryPolicy; + var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); + var retryPolicy_js_1 = require_retryPolicy(); + var constants_js_1 = require_constants13(); + exports2.exponentialRetryPolicyName = "exponentialRetryPolicy"; + function exponentialRetryPolicy(options = {}) { + return (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)({ + ...options, + ignoreSystemErrors: true + }) + ], { + maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT + }); } - function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { - const result = /* @__PURE__ */ new Map(); - const sequenceParams = /* @__PURE__ */ new Set(); - if (operationSpec.queryParameters?.length) { - for (const queryParameter of operationSpec.queryParameters) { - if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { - sequenceParams.add(queryParameter.mapper.serializedName); - } - let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); - if (queryParameterValue !== void 0 && queryParameterValue !== null || queryParameter.mapper.required) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter)); - const delimiter = queryParameter.collectionFormat ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] : ""; - if (Array.isArray(queryParameterValue)) { - queryParameterValue = queryParameterValue.map((item) => { - if (item === null || item === void 0) { - return ""; - } - return item; - }); - } - if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { - continue; - } else if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { - queryParameterValue = queryParameterValue.join(delimiter); - } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - queryParameterValue = queryParameterValue.map((item) => { - return encodeURIComponent(item); - }); - } else { - queryParameterValue = encodeURIComponent(queryParameterValue); - } - } - if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { - queryParameterValue = queryParameterValue.join(delimiter); - } - result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue); - } - } - } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/systemErrorRetryPolicy.js +var require_systemErrorRetryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/systemErrorRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.systemErrorRetryPolicyName = void 0; + exports2.systemErrorRetryPolicy = systemErrorRetryPolicy; + var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); + var retryPolicy_js_1 = require_retryPolicy(); + var constants_js_1 = require_constants13(); + exports2.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; + function systemErrorRetryPolicy(options = {}) { return { - queryParams: result, - sequenceParams + name: exports2.systemErrorRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)({ + ...options, + ignoreHttpStatusCodes: true + }) + ], { + maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT + }).sendRequest }; } - function simpleParseQueryParams(queryString) { - const result = /* @__PURE__ */ new Map(); - if (!queryString || queryString[0] !== "?") { - return result; - } - queryString = queryString.slice(1); - const pairs2 = queryString.split("&"); - for (const pair of pairs2) { - const [name, value] = pair.split("=", 2); - const existingValue = result.get(name); - if (existingValue) { - if (Array.isArray(existingValue)) { - existingValue.push(value); - } else { - result.set(name, [existingValue, value]); - } - } else { - result.set(name, value); - } - } - return result; - } - function appendQueryParams(url2, queryParams, sequenceParams, noOverwrite = false) { - if (queryParams.size === 0) { - return url2; - } - const parsedUrl = new URL(url2); - const combinedParams = simpleParseQueryParams(parsedUrl.search); - for (const [name, value] of queryParams) { - const existingValue = combinedParams.get(name); - if (Array.isArray(existingValue)) { - if (Array.isArray(value)) { - existingValue.push(...value); - const valueSet = new Set(existingValue); - combinedParams.set(name, Array.from(valueSet)); - } else { - existingValue.push(value); - } - } else if (existingValue) { - if (Array.isArray(value)) { - value.unshift(existingValue); - } else if (sequenceParams.has(name)) { - combinedParams.set(name, [existingValue, value]); - } - if (!noOverwrite) { - combinedParams.set(name, value); - } - } else { - combinedParams.set(name, value); - } - } - const searchPieces = []; - for (const [name, value] of combinedParams) { - if (typeof value === "string") { - searchPieces.push(`${name}=${value}`); - } else if (Array.isArray(value)) { - for (const subValue of value) { - searchPieces.push(`${name}=${subValue}`); - } - } else { - searchPieces.push(`${name}=${value}`); - } - } - parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; - return parsedUrl.toString(); - } } }); -// node_modules/@azure/core-client/dist/commonjs/log.js -var require_log4 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/log.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/throttlingRetryPolicy.js +var require_throttlingRetryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/throttlingRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("core-client"); + exports2.throttlingRetryPolicyName = void 0; + exports2.throttlingRetryPolicy = throttlingRetryPolicy; + var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); + var retryPolicy_js_1 = require_retryPolicy(); + var constants_js_1 = require_constants13(); + exports2.throttlingRetryPolicyName = "throttlingRetryPolicy"; + function throttlingRetryPolicy(options = {}) { + return { + name: exports2.throttlingRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { + maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT + }).sendRequest + }; + } } }); -// node_modules/@azure/core-client/dist/commonjs/serviceClient.js -var require_serviceClient = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/serviceClient.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/internal.js +var require_internal2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/internal.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceClient = void 0; - var core_rest_pipeline_1 = require_commonjs6(); - var pipeline_js_1 = require_pipeline3(); - var utils_js_1 = require_utils6(); - var httpClientCache_js_1 = require_httpClientCache(); - var operationHelpers_js_1 = require_operationHelpers(); - var urlHelpers_js_1 = require_urlHelpers2(); - var interfaceHelpers_js_1 = require_interfaceHelpers(); - var log_js_1 = require_log4(); - var ServiceClient = class { - /** - * If specified, this is the base URI that requests will be made against for this ServiceClient. - * If it is not specified, then all OperationSpecs must contain a baseUrl property. - */ - _endpoint; - /** - * The default request content type for the service. - * Used if no requestContentType is present on an OperationSpec. - */ - _requestContentType; - /** - * Set to true if the request is sent over HTTP instead of HTTPS - */ - _allowInsecureConnection; - /** - * The HTTP client that will be used to send requests. - */ - _httpClient; - /** - * The pipeline used by this client to make requests - */ - pipeline; - /** - * The ServiceClient constructor - * @param options - The service client options that govern the behavior of the client. - */ - constructor(options = {}) { - this._requestContentType = options.requestContentType; - this._endpoint = options.endpoint ?? options.baseUri; - if (options.baseUri) { - log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); - } - this._allowInsecureConnection = options.allowInsecureConnection; - this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); - this.pipeline = options.pipeline || createDefaultPipeline(options); - if (options.additionalPolicies?.length) { - for (const { policy, position } of options.additionalPolicies) { - const afterPhase = position === "perRetry" ? "Sign" : void 0; - this.pipeline.addPolicy(policy, { - afterPhase - }); - } - } - } - /** - * Send the provided httpRequest. - */ - async sendRequest(request2) { - return this.pipeline.sendRequest(this._httpClient, request2); - } - /** - * Send an HTTP request that is populated using the provided OperationSpec. - * @typeParam T - The typed result of the request, based on the OperationSpec. - * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. - * @param operationSpec - The OperationSpec to use to populate the httpRequest. - */ - async sendOperationRequest(operationArguments, operationSpec) { - const endpoint2 = operationSpec.baseUrl || this._endpoint; - if (!endpoint2) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); - } - const url2 = (0, urlHelpers_js_1.getRequestUrl)(endpoint2, operationSpec, operationArguments, this); - const request2 = (0, core_rest_pipeline_1.createPipelineRequest)({ - url: url2 - }); - request2.method = operationSpec.httpMethod; - const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); - operationInfo.operationSpec = operationSpec; - operationInfo.operationArguments = operationArguments; - const contentType = operationSpec.contentType || this._requestContentType; - if (contentType && operationSpec.requestBody) { - request2.headers.set("Content-Type", contentType); - } - const options = operationArguments.options; - if (options) { - const requestOptions = options.requestOptions; - if (requestOptions) { - if (requestOptions.timeout) { - request2.timeout = requestOptions.timeout; - } - if (requestOptions.onUploadProgress) { - request2.onUploadProgress = requestOptions.onUploadProgress; - } - if (requestOptions.onDownloadProgress) { - request2.onDownloadProgress = requestOptions.onDownloadProgress; - } - if (requestOptions.shouldDeserialize !== void 0) { - operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; - } - if (requestOptions.allowInsecureConnection) { - request2.allowInsecureConnection = true; - } - } - if (options.abortSignal) { - request2.abortSignal = options.abortSignal; - } - if (options.tracingOptions) { - request2.tracingOptions = options.tracingOptions; - } - } - if (this._allowInsecureConnection) { - request2.allowInsecureConnection = true; - } - if (request2.streamResponseStatusCodes === void 0) { - request2.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); - } - try { - const rawResponse = await this.sendRequest(request2); - const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); - if (options?.onResponse) { - options.onResponse(rawResponse, flatResponse); - } - return flatResponse; - } catch (error3) { - if (typeof error3 === "object" && error3?.response) { - const rawResponse = error3.response; - const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error3.statusCode] || operationSpec.responses["default"]); - error3.details = flatResponse; - if (options?.onResponse) { - options.onResponse(rawResponse, flatResponse, error3); - } - } - throw error3; - } - } - }; - exports2.ServiceClient = ServiceClient; - function createDefaultPipeline(options) { - const credentialScopes = getCredentialScopes(options); - const credentialOptions = options.credential && credentialScopes ? { credentialScopes, credential: options.credential } : void 0; - return (0, pipeline_js_1.createClientPipeline)({ - ...options, - credentialOptions - }); - } - function getCredentialScopes(options) { - if (options.credentialScopes) { - return options.credentialScopes; - } - if (options.endpoint) { - return `${options.endpoint}/.default`; - } - if (options.baseUri) { - return `${options.baseUri}/.default`; - } - if (options.credential && !options.credentialScopes) { - throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); - } - return void 0; - } + exports2.userAgentPolicyName = exports2.userAgentPolicy = exports2.tlsPolicyName = exports2.tlsPolicy = exports2.redirectPolicyName = exports2.redirectPolicy = exports2.getDefaultProxySettings = exports2.proxyPolicyName = exports2.proxyPolicy = exports2.multipartPolicyName = exports2.multipartPolicy = exports2.logPolicyName = exports2.logPolicy = exports2.formDataPolicyName = exports2.formDataPolicy = exports2.throttlingRetryPolicyName = exports2.throttlingRetryPolicy = exports2.systemErrorRetryPolicyName = exports2.systemErrorRetryPolicy = exports2.retryPolicy = exports2.exponentialRetryPolicyName = exports2.exponentialRetryPolicy = exports2.defaultRetryPolicyName = exports2.defaultRetryPolicy = exports2.decompressResponsePolicyName = exports2.decompressResponsePolicy = exports2.agentPolicyName = exports2.agentPolicy = void 0; + var agentPolicy_js_1 = require_agentPolicy(); + Object.defineProperty(exports2, "agentPolicy", { enumerable: true, get: function() { + return agentPolicy_js_1.agentPolicy; + } }); + Object.defineProperty(exports2, "agentPolicyName", { enumerable: true, get: function() { + return agentPolicy_js_1.agentPolicyName; + } }); + var decompressResponsePolicy_js_1 = require_decompressResponsePolicy(); + Object.defineProperty(exports2, "decompressResponsePolicy", { enumerable: true, get: function() { + return decompressResponsePolicy_js_1.decompressResponsePolicy; + } }); + Object.defineProperty(exports2, "decompressResponsePolicyName", { enumerable: true, get: function() { + return decompressResponsePolicy_js_1.decompressResponsePolicyName; + } }); + var defaultRetryPolicy_js_1 = require_defaultRetryPolicy(); + Object.defineProperty(exports2, "defaultRetryPolicy", { enumerable: true, get: function() { + return defaultRetryPolicy_js_1.defaultRetryPolicy; + } }); + Object.defineProperty(exports2, "defaultRetryPolicyName", { enumerable: true, get: function() { + return defaultRetryPolicy_js_1.defaultRetryPolicyName; + } }); + var exponentialRetryPolicy_js_1 = require_exponentialRetryPolicy(); + Object.defineProperty(exports2, "exponentialRetryPolicy", { enumerable: true, get: function() { + return exponentialRetryPolicy_js_1.exponentialRetryPolicy; + } }); + Object.defineProperty(exports2, "exponentialRetryPolicyName", { enumerable: true, get: function() { + return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; + } }); + var retryPolicy_js_1 = require_retryPolicy(); + Object.defineProperty(exports2, "retryPolicy", { enumerable: true, get: function() { + return retryPolicy_js_1.retryPolicy; + } }); + var systemErrorRetryPolicy_js_1 = require_systemErrorRetryPolicy(); + Object.defineProperty(exports2, "systemErrorRetryPolicy", { enumerable: true, get: function() { + return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; + } }); + Object.defineProperty(exports2, "systemErrorRetryPolicyName", { enumerable: true, get: function() { + return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; + } }); + var throttlingRetryPolicy_js_1 = require_throttlingRetryPolicy(); + Object.defineProperty(exports2, "throttlingRetryPolicy", { enumerable: true, get: function() { + return throttlingRetryPolicy_js_1.throttlingRetryPolicy; + } }); + Object.defineProperty(exports2, "throttlingRetryPolicyName", { enumerable: true, get: function() { + return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; + } }); + var formDataPolicy_js_1 = require_formDataPolicy(); + Object.defineProperty(exports2, "formDataPolicy", { enumerable: true, get: function() { + return formDataPolicy_js_1.formDataPolicy; + } }); + Object.defineProperty(exports2, "formDataPolicyName", { enumerable: true, get: function() { + return formDataPolicy_js_1.formDataPolicyName; + } }); + var logPolicy_js_1 = require_logPolicy(); + Object.defineProperty(exports2, "logPolicy", { enumerable: true, get: function() { + return logPolicy_js_1.logPolicy; + } }); + Object.defineProperty(exports2, "logPolicyName", { enumerable: true, get: function() { + return logPolicy_js_1.logPolicyName; + } }); + var multipartPolicy_js_1 = require_multipartPolicy(); + Object.defineProperty(exports2, "multipartPolicy", { enumerable: true, get: function() { + return multipartPolicy_js_1.multipartPolicy; + } }); + Object.defineProperty(exports2, "multipartPolicyName", { enumerable: true, get: function() { + return multipartPolicy_js_1.multipartPolicyName; + } }); + var proxyPolicy_js_1 = require_proxyPolicy(); + Object.defineProperty(exports2, "proxyPolicy", { enumerable: true, get: function() { + return proxyPolicy_js_1.proxyPolicy; + } }); + Object.defineProperty(exports2, "proxyPolicyName", { enumerable: true, get: function() { + return proxyPolicy_js_1.proxyPolicyName; + } }); + Object.defineProperty(exports2, "getDefaultProxySettings", { enumerable: true, get: function() { + return proxyPolicy_js_1.getDefaultProxySettings; + } }); + var redirectPolicy_js_1 = require_redirectPolicy(); + Object.defineProperty(exports2, "redirectPolicy", { enumerable: true, get: function() { + return redirectPolicy_js_1.redirectPolicy; + } }); + Object.defineProperty(exports2, "redirectPolicyName", { enumerable: true, get: function() { + return redirectPolicy_js_1.redirectPolicyName; + } }); + var tlsPolicy_js_1 = require_tlsPolicy(); + Object.defineProperty(exports2, "tlsPolicy", { enumerable: true, get: function() { + return tlsPolicy_js_1.tlsPolicy; + } }); + Object.defineProperty(exports2, "tlsPolicyName", { enumerable: true, get: function() { + return tlsPolicy_js_1.tlsPolicyName; + } }); + var userAgentPolicy_js_1 = require_userAgentPolicy(); + Object.defineProperty(exports2, "userAgentPolicy", { enumerable: true, get: function() { + return userAgentPolicy_js_1.userAgentPolicy; + } }); + Object.defineProperty(exports2, "userAgentPolicyName", { enumerable: true, get: function() { + return userAgentPolicy_js_1.userAgentPolicyName; + } }); } }); -// node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js -var require_authorizeRequestOnClaimChallenge = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js +var require_logPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.parseCAEChallenge = parseCAEChallenge; - exports2.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; - var log_js_1 = require_log4(); - var base64_js_1 = require_base64(); - function parseCAEChallenge(challenges) { - const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); - return bearerChallenges.map((challenge) => { - const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); - return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {}); + exports2.logPolicyName = void 0; + exports2.logPolicy = logPolicy; + var log_js_1 = require_log3(); + var policies_1 = require_internal2(); + exports2.logPolicyName = policies_1.logPolicyName; + function logPolicy(options = {}) { + return (0, policies_1.logPolicy)({ + logger: log_js_1.logger.info, + ...options }); } - async function authorizeRequestOnClaimChallenge(onChallengeOptions) { - const { scopes, response } = onChallengeOptions; - const logger = onChallengeOptions.logger || log_js_1.logger; - const challenge = response.headers.get("WWW-Authenticate"); - if (!challenge) { - logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); - return false; - } - const challenges = parseCAEChallenge(challenge) || []; - const parsedChallenge = challenges.find((x) => x.claims); - if (!parsedChallenge) { - logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); - return false; - } - const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { - claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims) - }); - if (!accessToken) { - return false; - } - onChallengeOptions.request.headers.set("Authorization", `${accessToken.tokenType ?? "Bearer"} ${accessToken.token}`); - return true; + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js +var require_redirectPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.redirectPolicyName = void 0; + exports2.redirectPolicy = redirectPolicy; + var policies_1 = require_internal2(); + exports2.redirectPolicyName = policies_1.redirectPolicyName; + function redirectPolicy(options = {}) { + return (0, policies_1.redirectPolicy)(options); } } }); -// node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js -var require_authorizeRequestOnTenantChallenge = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js +var require_userAgentPlatform2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.authorizeRequestOnTenantChallenge = void 0; - var Constants = { - DefaultScope: "/.default", - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization" - } - }; - function isUuid(text) { - return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); + exports2.getHeaderName = getHeaderName; + exports2.setPlatformSpecificData = setPlatformSpecificData; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_os_1 = tslib_1.__importDefault(require("node:os")); + var node_process_1 = tslib_1.__importDefault(require("node:process")); + function getHeaderName() { + return "User-Agent"; } - var authorizeRequestOnTenantChallenge = async (challengeOptions) => { - const requestOptions = requestToOptions(challengeOptions.request); - const challenge = getChallenge(challengeOptions.response); - if (challenge) { - const challengeInfo = parseChallenge(challenge); - const challengeScopes = buildScopes(challengeOptions, challengeInfo); - const tenantId = extractTenantId(challengeInfo); - if (!tenantId) { - return false; - } - const accessToken = await challengeOptions.getAccessToken(challengeScopes, { - ...requestOptions, - tenantId - }); - if (!accessToken) { - return false; + async function setPlatformSpecificData(map2) { + if (node_process_1.default && node_process_1.default.versions) { + const osInfo = `${node_os_1.default.type()} ${node_os_1.default.release()}; ${node_os_1.default.arch()}`; + const versions = node_process_1.default.versions; + if (versions.bun) { + map2.set("Bun", `${versions.bun} (${osInfo})`); + } else if (versions.deno) { + map2.set("Deno", `${versions.deno} (${osInfo})`); + } else if (versions.node) { + map2.set("Node", `${versions.node} (${osInfo})`); } - challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `${accessToken.tokenType ?? "Bearer"} ${accessToken.token}`); - return true; } - return false; - }; - exports2.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; - function extractTenantId(challengeInfo) { - const parsedAuthUri = new URL(challengeInfo.authorization_uri); - const pathSegments = parsedAuthUri.pathname.split("/"); - const tenantId = pathSegments[1]; - if (tenantId && isUuid(tenantId)) { - return tenantId; - } - return void 0; } - function buildScopes(challengeOptions, challengeInfo) { - if (!challengeInfo.resource_id) { - return challengeOptions.scopes; - } - const challengeScopes = new URL(challengeInfo.resource_id); - challengeScopes.pathname = Constants.DefaultScope; - let scope = challengeScopes.toString(); - if (scope === "https://disk.azure.com/.default") { - scope = "https://disk.azure.com//.default"; + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js +var require_constants14 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DEFAULT_RETRY_POLICY_COUNT = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "1.22.2"; + exports2.DEFAULT_RETRY_POLICY_COUNT = 3; + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js +var require_userAgent2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getUserAgentHeaderName = getUserAgentHeaderName; + exports2.getUserAgentValue = getUserAgentValue; + var userAgentPlatform_js_1 = require_userAgentPlatform2(); + var constants_js_1 = require_constants14(); + function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); } - return [scope]; + return parts.join(" "); } - function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; - } - return; + function getUserAgentHeaderName() { + return (0, userAgentPlatform_js_1.getHeaderName)(); } - function parseChallenge(challenge) { - const bearerChallenge = challenge.slice("Bearer ".length); - const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); - return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {}); + async function getUserAgentValue(prefix) { + const runtimeInfo = /* @__PURE__ */ new Map(); + runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION); + await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; } - function requestToOptions(request2) { + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js +var require_userAgentPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.userAgentPolicyName = void 0; + exports2.userAgentPolicy = userAgentPolicy; + var userAgent_js_1 = require_userAgent2(); + var UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); + exports2.userAgentPolicyName = "userAgentPolicy"; + function userAgentPolicy(options = {}) { + const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); return { - abortSignal: request2.abortSignal, - requestOptions: { - timeout: request2.timeout - }, - tracingOptions: request2.tracingOptions + name: exports2.userAgentPolicyName, + async sendRequest(request2, next) { + if (!request2.headers.has(UserAgentHeaderName)) { + request2.headers.set(UserAgentHeaderName, await userAgentValue); + } + return next(request2); + } }; } } }); -// node_modules/@azure/core-client/dist/commonjs/index.js -var require_commonjs8 = __commonJS({ - "node_modules/@azure/core-client/dist/commonjs/index.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sha256.js +var require_sha256 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sha256.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.authorizeRequestOnTenantChallenge = exports2.authorizeRequestOnClaimChallenge = exports2.serializationPolicyName = exports2.serializationPolicy = exports2.deserializationPolicyName = exports2.deserializationPolicy = exports2.XML_CHARKEY = exports2.XML_ATTRKEY = exports2.createClientPipeline = exports2.ServiceClient = exports2.MapperTypeNames = exports2.createSerializer = void 0; - var serializer_js_1 = require_serializer(); - Object.defineProperty(exports2, "createSerializer", { enumerable: true, get: function() { - return serializer_js_1.createSerializer; + exports2.computeSha256Hmac = computeSha256Hmac; + exports2.computeSha256Hash = computeSha256Hash; + var node_crypto_1 = require("node:crypto"); + async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return (0, node_crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); + } + async function computeSha256Hash(content, encoding) { + return (0, node_crypto_1.createHash)("sha256").update(content).digest(encoding); + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/internal.js +var require_internal3 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/internal.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Sanitizer = exports2.uint8ArrayToString = exports2.stringToUint8Array = exports2.isWebWorker = exports2.isReactNative = exports2.isDeno = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isBun = exports2.isBrowser = exports2.randomUUID = exports2.computeSha256Hmac = exports2.computeSha256Hash = exports2.isError = exports2.isObject = exports2.getRandomIntegerInclusive = exports2.calculateRetryDelay = void 0; + var delay_js_1 = require_delay(); + Object.defineProperty(exports2, "calculateRetryDelay", { enumerable: true, get: function() { + return delay_js_1.calculateRetryDelay; } }); - Object.defineProperty(exports2, "MapperTypeNames", { enumerable: true, get: function() { - return serializer_js_1.MapperTypeNames; + var random_js_1 = require_random(); + Object.defineProperty(exports2, "getRandomIntegerInclusive", { enumerable: true, get: function() { + return random_js_1.getRandomIntegerInclusive; } }); - var serviceClient_js_1 = require_serviceClient(); - Object.defineProperty(exports2, "ServiceClient", { enumerable: true, get: function() { - return serviceClient_js_1.ServiceClient; + var object_js_1 = require_object(); + Object.defineProperty(exports2, "isObject", { enumerable: true, get: function() { + return object_js_1.isObject; } }); - var pipeline_js_1 = require_pipeline3(); - Object.defineProperty(exports2, "createClientPipeline", { enumerable: true, get: function() { - return pipeline_js_1.createClientPipeline; + var error_js_1 = require_error(); + Object.defineProperty(exports2, "isError", { enumerable: true, get: function() { + return error_js_1.isError; } }); - var interfaces_js_1 = require_interfaces(); - Object.defineProperty(exports2, "XML_ATTRKEY", { enumerable: true, get: function() { - return interfaces_js_1.XML_ATTRKEY; + var sha256_js_1 = require_sha256(); + Object.defineProperty(exports2, "computeSha256Hash", { enumerable: true, get: function() { + return sha256_js_1.computeSha256Hash; } }); - Object.defineProperty(exports2, "XML_CHARKEY", { enumerable: true, get: function() { - return interfaces_js_1.XML_CHARKEY; + Object.defineProperty(exports2, "computeSha256Hmac", { enumerable: true, get: function() { + return sha256_js_1.computeSha256Hmac; } }); - var deserializationPolicy_js_1 = require_deserializationPolicy(); - Object.defineProperty(exports2, "deserializationPolicy", { enumerable: true, get: function() { - return deserializationPolicy_js_1.deserializationPolicy; + var uuidUtils_js_1 = require_uuidUtils(); + Object.defineProperty(exports2, "randomUUID", { enumerable: true, get: function() { + return uuidUtils_js_1.randomUUID; } }); - Object.defineProperty(exports2, "deserializationPolicyName", { enumerable: true, get: function() { - return deserializationPolicy_js_1.deserializationPolicyName; + var checkEnvironment_js_1 = require_checkEnvironment(); + Object.defineProperty(exports2, "isBrowser", { enumerable: true, get: function() { + return checkEnvironment_js_1.isBrowser; } }); - var serializationPolicy_js_1 = require_serializationPolicy(); - Object.defineProperty(exports2, "serializationPolicy", { enumerable: true, get: function() { - return serializationPolicy_js_1.serializationPolicy; + Object.defineProperty(exports2, "isBun", { enumerable: true, get: function() { + return checkEnvironment_js_1.isBun; } }); - Object.defineProperty(exports2, "serializationPolicyName", { enumerable: true, get: function() { - return serializationPolicy_js_1.serializationPolicyName; + Object.defineProperty(exports2, "isNodeLike", { enumerable: true, get: function() { + return checkEnvironment_js_1.isNodeLike; } }); - var authorizeRequestOnClaimChallenge_js_1 = require_authorizeRequestOnClaimChallenge(); - Object.defineProperty(exports2, "authorizeRequestOnClaimChallenge", { enumerable: true, get: function() { - return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; + Object.defineProperty(exports2, "isNodeRuntime", { enumerable: true, get: function() { + return checkEnvironment_js_1.isNodeRuntime; } }); - var authorizeRequestOnTenantChallenge_js_1 = require_authorizeRequestOnTenantChallenge(); - Object.defineProperty(exports2, "authorizeRequestOnTenantChallenge", { enumerable: true, get: function() { - return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; + Object.defineProperty(exports2, "isDeno", { enumerable: true, get: function() { + return checkEnvironment_js_1.isDeno; + } }); + Object.defineProperty(exports2, "isReactNative", { enumerable: true, get: function() { + return checkEnvironment_js_1.isReactNative; + } }); + Object.defineProperty(exports2, "isWebWorker", { enumerable: true, get: function() { + return checkEnvironment_js_1.isWebWorker; + } }); + var bytesEncoding_js_1 = require_bytesEncoding(); + Object.defineProperty(exports2, "stringToUint8Array", { enumerable: true, get: function() { + return bytesEncoding_js_1.stringToUint8Array; + } }); + Object.defineProperty(exports2, "uint8ArrayToString", { enumerable: true, get: function() { + return bytesEncoding_js_1.uint8ArrayToString; + } }); + var sanitizer_js_1 = require_sanitizer(); + Object.defineProperty(exports2, "Sanitizer", { enumerable: true, get: function() { + return sanitizer_js_1.Sanitizer; } }); } }); -// node_modules/@azure/core-http-compat/dist/commonjs/util.js -var require_util17 = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/util.js"(exports2) { +// node_modules/@azure/core-util/dist/commonjs/aborterUtils.js +var require_aborterUtils = __commonJS({ + "node_modules/@azure/core-util/dist/commonjs/aborterUtils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.HttpHeaders = void 0; - exports2.toPipelineRequest = toPipelineRequest; - exports2.toWebResourceLike = toWebResourceLike; - exports2.toHttpHeadersLike = toHttpHeadersLike; - var core_rest_pipeline_1 = require_commonjs6(); - var originalRequestSymbol = /* @__PURE__ */ Symbol("Original PipelineRequest"); - var originalClientRequestSymbol = /* @__PURE__ */ Symbol.for("@azure/core-client original request"); - function toPipelineRequest(webResource, options = {}) { - const compatWebResource = webResource; - const request2 = compatWebResource[originalRequestSymbol]; - const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); - if (request2) { - request2.headers = headers; - return request2; - } else { - const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({ - url: webResource.url, - method: webResource.method, - headers, - withCredentials: webResource.withCredentials, - timeout: webResource.timeout, - requestId: webResource.requestId, - abortSignal: webResource.abortSignal, - body: webResource.body, - formData: webResource.formData, - disableKeepAlive: !!webResource.keepAlive, - onDownloadProgress: webResource.onDownloadProgress, - onUploadProgress: webResource.onUploadProgress, - proxySettings: webResource.proxySettings, - streamResponseStatusCodes: webResource.streamResponseStatusCodes, - agent: webResource.agent, - requestOverrides: webResource.requestOverrides - }); - if (options.originalRequest) { - newRequest[originalClientRequestSymbol] = options.originalRequest; - } - return newRequest; + exports2.cancelablePromiseRace = cancelablePromiseRace; + async function cancelablePromiseRace(abortablePromiseBuilders, options) { + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); } - } - function toWebResourceLike(request2, options) { - const originalRequest = options?.originalRequest ?? request2; - const webResource = { - url: request2.url, - method: request2.method, - headers: toHttpHeadersLike(request2.headers), - withCredentials: request2.withCredentials, - timeout: request2.timeout, - requestId: request2.headers.get("x-ms-client-request-id") || request2.requestId, - abortSignal: request2.abortSignal, - body: request2.body, - formData: request2.formData, - keepAlive: !!request2.disableKeepAlive, - onDownloadProgress: request2.onDownloadProgress, - onUploadProgress: request2.onUploadProgress, - proxySettings: request2.proxySettings, - streamResponseStatusCodes: request2.streamResponseStatusCodes, - agent: request2.agent, - requestOverrides: request2.requestOverrides, - clone() { - throw new Error("Cannot clone a non-proxied WebResourceLike"); - }, - prepare() { - throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); - }, - validateRequestProperties() { - } - }; - if (options?.createProxy) { - return new Proxy(webResource, { - get(target, prop, receiver) { - if (prop === originalRequestSymbol) { - return request2; - } else if (prop === "clone") { - return () => { - return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { - createProxy: true, - originalRequest - }); - }; - } - return Reflect.get(target, prop, receiver); - }, - set(target, prop, value, receiver) { - if (prop === "keepAlive") { - request2.disableKeepAlive = !value; - } - const passThroughProps = [ - "url", - "method", - "withCredentials", - "timeout", - "requestId", - "abortSignal", - "body", - "formData", - "onDownloadProgress", - "onUploadProgress", - "proxySettings", - "streamResponseStatusCodes", - "agent", - "requestOverrides" - ]; - if (typeof prop === "string" && passThroughProps.includes(prop)) { - request2[prop] = value; - } - return Reflect.set(target, prop, value, receiver); - } - }); - } else { - return webResource; + options?.abortSignal?.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } finally { + aborter.abort(); + options?.abortSignal?.removeEventListener("abort", abortHandler); } } - function toHttpHeadersLike(headers) { - return new HttpHeaders(headers.toJSON({ preserveCase: true })); - } - function getHeaderKey(headerName) { - return headerName.toLowerCase(); - } - var HttpHeaders = class _HttpHeaders { - _headersMap; - constructor(rawHeaders) { - this._headersMap = {}; - if (rawHeaders) { - for (const headerName in rawHeaders) { - this.set(headerName, rawHeaders[headerName]); - } - } - } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param headerName - The name of the header to set. This value is case-insensitive. - * @param headerValue - The value of the header to set. - */ - set(headerName, headerValue) { - this._headersMap[getHeaderKey(headerName)] = { - name: headerName, - value: headerValue.toString() - }; - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param headerName - The name of the header. - */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; - return !header ? void 0 : header.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - */ - contains(headerName) { - return !!this._headersMap[getHeaderKey(headerName)]; - } - /** - * Remove the header with the provided headerName. Return whether or not the header existed and - * was removed. - * @param headerName - The name of the header to remove. - */ - remove(headerName) { - const result = this.contains(headerName); - delete this._headersMap[getHeaderKey(headerName)]; - return result; - } - /** - * Get the headers that are contained this collection as an object. - */ - rawHeaders() { - return this.toJson({ preserveCase: true }); - } - /** - * Get the headers that are contained in this collection as an array. - */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { - headers.push(this._headersMap[headerKey]); - } - return headers; - } - /** - * Get the header names that are contained in this collection. - */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerNames.push(headers[i].name); - } - return headerNames; - } - /** - * Get the header values that are contained in this collection. - */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerValues.push(headers[i].value); - } - return headerValues; - } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJson(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name] = header.value; - } - } else { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[getHeaderKey(header.name)] = header.value; - } - } - return result; - } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJson({ preserveCase: true })); - } - /** - * Create a deep clone/copy of this HttpHeaders collection. - */ - clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new _HttpHeaders(resultPreservingCasing); + } +}); + +// node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js +var require_AbortError2 = __commonJS({ + "node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AbortError = void 0; + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } }; - exports2.HttpHeaders = HttpHeaders; + exports2.AbortError = AbortError; } }); -// node_modules/@azure/core-http-compat/dist/commonjs/response.js -var require_response3 = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/response.js"(exports2) { +// node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js +var require_commonjs3 = __commonJS({ + "node_modules/@azure/core-util/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.toCompatResponse = toCompatResponse; - exports2.toPipelineResponse = toPipelineResponse; - var core_rest_pipeline_1 = require_commonjs6(); - var util_js_1 = require_util17(); - var originalResponse = /* @__PURE__ */ Symbol("Original FullOperationResponse"); - function toCompatResponse(response, options) { - let request2 = (0, util_js_1.toWebResourceLike)(response.request); - let headers = (0, util_js_1.toHttpHeadersLike)(response.headers); - if (options?.createProxy) { - return new Proxy(response, { - get(target, prop, receiver) { - if (prop === "headers") { - return headers; - } else if (prop === "request") { - return request2; - } else if (prop === originalResponse) { - return response; - } - return Reflect.get(target, prop, receiver); - }, - set(target, prop, value, receiver) { - if (prop === "headers") { - headers = value; - } else if (prop === "request") { - request2 = value; - } - return Reflect.set(target, prop, value, receiver); - } - }); - } else { - return { - ...response, - request: request2, - headers - }; - } - } - function toPipelineResponse(compatResponse) { - const extendedCompatResponse = compatResponse; - const response = extendedCompatResponse[originalResponse]; - const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); - if (response) { - response.headers = headers; - return response; - } else { - return { - ...compatResponse, - headers, - request: (0, util_js_1.toPipelineRequest)(compatResponse.request) - }; - } - } + exports2.AbortError = void 0; + var AbortError_js_1 = require_AbortError2(); + Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { + return AbortError_js_1.AbortError; + } }); } }); -// node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js -var require_extendedClient = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js"(exports2) { +// node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js +var require_createAbortablePromise = __commonJS({ + "node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ExtendedServiceClient = void 0; - var disableKeepAlivePolicy_js_1 = require_disableKeepAlivePolicy(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_client_1 = require_commonjs8(); - var response_js_1 = require_response3(); - var ExtendedServiceClient = class extends core_client_1.ServiceClient { - constructor(options) { - super(options); - if (options.keepAliveOptions?.enable === false && !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) { - this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)()); + exports2.createAbortablePromise = createAbortablePromise; + var abort_controller_1 = require_commonjs3(); + function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + return new Promise((resolve6, reject) => { + function rejectOnAbort() { + reject(new abort_controller_1.AbortError(abortErrorMsg ?? "The operation was aborted.")); } - if (options.redirectOptions?.handleRedirects === false) { - this.pipeline.removePolicy({ - name: core_rest_pipeline_1.redirectPolicyName - }); + function removeListeners() { + abortSignal?.removeEventListener("abort", onAbort); } - } - /** - * Compatible send operation request function. - * - * @param operationArguments - Operation arguments - * @param operationSpec - Operation Spec - * @returns - */ - async sendOperationRequest(operationArguments, operationSpec) { - const userProvidedCallBack = operationArguments?.options?.onResponse; - let lastResponse; - function onResponse(rawResponse, flatResponse, error3) { - lastResponse = rawResponse; - if (userProvidedCallBack) { - userProvidedCallBack(rawResponse, flatResponse, error3); - } + function onAbort() { + cleanupBeforeAbort?.(); + removeListeners(); + rejectOnAbort(); } - operationArguments.options = { - ...operationArguments.options, - onResponse - }; - const result = await super.sendOperationRequest(operationArguments, operationSpec); - if (lastResponse) { - Object.defineProperty(result, "_response", { - value: (0, response_js_1.toCompatResponse)(lastResponse) + if (abortSignal?.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve6(x); + }, (x) => { + removeListeners(); + reject(x); }); + } catch (err) { + reject(err); } - return result; - } - }; - exports2.ExtendedServiceClient = ExtendedServiceClient; + abortSignal?.addEventListener("abort", onAbort); + }); + } } }); -// node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js -var require_requestPolicyFactoryPolicy = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js"(exports2) { +// node_modules/@azure/core-util/dist/commonjs/delay.js +var require_delay2 = __commonJS({ + "node_modules/@azure/core-util/dist/commonjs/delay.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.requestPolicyFactoryPolicyName = exports2.HttpPipelineLogLevel = void 0; - exports2.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; - var util_js_1 = require_util17(); - var response_js_1 = require_response3(); - var HttpPipelineLogLevel; - (function(HttpPipelineLogLevel2) { - HttpPipelineLogLevel2[HttpPipelineLogLevel2["ERROR"] = 1] = "ERROR"; - HttpPipelineLogLevel2[HttpPipelineLogLevel2["INFO"] = 3] = "INFO"; - HttpPipelineLogLevel2[HttpPipelineLogLevel2["OFF"] = 0] = "OFF"; - HttpPipelineLogLevel2[HttpPipelineLogLevel2["WARNING"] = 2] = "WARNING"; - })(HttpPipelineLogLevel || (exports2.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); - var mockRequestPolicyOptions = { - log(_logLevel, _message) { - }, - shouldLog(_logLevel) { - return false; - } - }; - exports2.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; - function createRequestPolicyFactoryPolicy(factories) { - const orderedFactories = factories.slice().reverse(); - return { - name: exports2.requestPolicyFactoryPolicyName, - async sendRequest(request2, next) { - let httpPipeline = { - async sendRequest(httpRequest) { - const response2 = await next((0, util_js_1.toPipelineRequest)(httpRequest)); - return (0, response_js_1.toCompatResponse)(response2, { createProxy: true }); - } - }; - for (const factory of orderedFactories) { - httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); - } - const webResourceLike = (0, util_js_1.toWebResourceLike)(request2, { createProxy: true }); - const response = await httpPipeline.sendRequest(webResourceLike); - return (0, response_js_1.toPipelineResponse)(response); - } - }; - } - } -}); + exports2.delay = delay2; + exports2.calculateRetryDelay = calculateRetryDelay; + var createAbortablePromise_js_1 = require_createAbortablePromise(); + var util_1 = require_internal3(); + var StandardAbortMessage = "The delay was aborted."; + function delay2(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options ?? {}; + return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve6) => { + token = setTimeout(resolve6, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg ?? StandardAbortMessage + }); + } + function calculateRetryDelay(retryAttempt, config) { + const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); + const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); + const retryAfterInMs = clampedDelay / 2 + (0, util_1.getRandomIntegerInclusive)(0, clampedDelay / 2); + return { retryAfterInMs }; + } + } +}); -// node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js -var require_httpClientAdapter = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js"(exports2) { +// node_modules/@azure/core-util/dist/commonjs/error.js +var require_error2 = __commonJS({ + "node_modules/@azure/core-util/dist/commonjs/error.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.convertHttpClient = convertHttpClient; - var response_js_1 = require_response3(); - var util_js_1 = require_util17(); - function convertHttpClient(requestPolicyClient) { - return { - sendRequest: async (request2) => { - const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request2, { createProxy: true })); - return (0, response_js_1.toPipelineResponse)(response); + exports2.getErrorMessage = getErrorMessage2; + var util_1 = require_internal3(); + function getErrorMessage2(e) { + if ((0, util_1.isError)(e)) { + return e.message; + } else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } else { + stringified = String(e); + } + } catch (err) { + stringified = "[unable to stringify input]"; } - }; + return `Unknown error ${stringified}`; + } } } }); -// node_modules/@azure/core-http-compat/dist/commonjs/index.js -var require_commonjs9 = __commonJS({ - "node_modules/@azure/core-http-compat/dist/commonjs/index.js"(exports2) { +// node_modules/@azure/core-util/dist/commonjs/typeGuards.js +var require_typeGuards2 = __commonJS({ + "node_modules/@azure/core-util/dist/commonjs/typeGuards.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.toHttpHeadersLike = exports2.convertHttpClient = exports2.disableKeepAlivePolicyName = exports2.HttpPipelineLogLevel = exports2.createRequestPolicyFactoryPolicy = exports2.requestPolicyFactoryPolicyName = exports2.ExtendedServiceClient = void 0; - var extendedClient_js_1 = require_extendedClient(); - Object.defineProperty(exports2, "ExtendedServiceClient", { enumerable: true, get: function() { - return extendedClient_js_1.ExtendedServiceClient; + exports2.isDefined = isDefined3; + exports2.isObjectWithProperties = isObjectWithProperties; + exports2.objectHasProperty = objectHasProperty; + function isDefined3(thing) { + return typeof thing !== "undefined" && thing !== null; + } + function isObjectWithProperties(thing, properties) { + if (!isDefined3(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; + } + function objectHasProperty(thing, property) { + return isDefined3(thing) && typeof thing === "object" && property in thing; + } + } +}); + +// node_modules/@azure/core-util/dist/commonjs/index.js +var require_commonjs4 = __commonJS({ + "node_modules/@azure/core-util/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isWebWorker = exports2.isReactNative = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isNode = exports2.isDeno = exports2.isBun = exports2.isBrowser = exports2.objectHasProperty = exports2.isObjectWithProperties = exports2.isDefined = exports2.getErrorMessage = exports2.delay = exports2.createAbortablePromise = exports2.cancelablePromiseRace = void 0; + exports2.calculateRetryDelay = calculateRetryDelay; + exports2.computeSha256Hash = computeSha256Hash; + exports2.computeSha256Hmac = computeSha256Hmac; + exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; + exports2.isError = isError; + exports2.isObject = isObject2; + exports2.randomUUID = randomUUID2; + exports2.uint8ArrayToString = uint8ArrayToString; + exports2.stringToUint8Array = stringToUint8Array; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var tspRuntime = tslib_1.__importStar(require_internal3()); + var aborterUtils_js_1 = require_aborterUtils(); + Object.defineProperty(exports2, "cancelablePromiseRace", { enumerable: true, get: function() { + return aborterUtils_js_1.cancelablePromiseRace; } }); - var requestPolicyFactoryPolicy_js_1 = require_requestPolicyFactoryPolicy(); - Object.defineProperty(exports2, "requestPolicyFactoryPolicyName", { enumerable: true, get: function() { - return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; + var createAbortablePromise_js_1 = require_createAbortablePromise(); + Object.defineProperty(exports2, "createAbortablePromise", { enumerable: true, get: function() { + return createAbortablePromise_js_1.createAbortablePromise; } }); - Object.defineProperty(exports2, "createRequestPolicyFactoryPolicy", { enumerable: true, get: function() { - return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; + var delay_js_1 = require_delay2(); + Object.defineProperty(exports2, "delay", { enumerable: true, get: function() { + return delay_js_1.delay; } }); - Object.defineProperty(exports2, "HttpPipelineLogLevel", { enumerable: true, get: function() { - return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; + var error_js_1 = require_error2(); + Object.defineProperty(exports2, "getErrorMessage", { enumerable: true, get: function() { + return error_js_1.getErrorMessage; } }); - var disableKeepAlivePolicy_js_1 = require_disableKeepAlivePolicy(); - Object.defineProperty(exports2, "disableKeepAlivePolicyName", { enumerable: true, get: function() { - return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; + var typeGuards_js_1 = require_typeGuards2(); + Object.defineProperty(exports2, "isDefined", { enumerable: true, get: function() { + return typeGuards_js_1.isDefined; } }); - var httpClientAdapter_js_1 = require_httpClientAdapter(); - Object.defineProperty(exports2, "convertHttpClient", { enumerable: true, get: function() { - return httpClientAdapter_js_1.convertHttpClient; + Object.defineProperty(exports2, "isObjectWithProperties", { enumerable: true, get: function() { + return typeGuards_js_1.isObjectWithProperties; } }); - var util_js_1 = require_util17(); - Object.defineProperty(exports2, "toHttpHeadersLike", { enumerable: true, get: function() { - return util_js_1.toHttpHeadersLike; + Object.defineProperty(exports2, "objectHasProperty", { enumerable: true, get: function() { + return typeGuards_js_1.objectHasProperty; } }); + function calculateRetryDelay(retryAttempt, config) { + return tspRuntime.calculateRetryDelay(retryAttempt, config); + } + function computeSha256Hash(content, encoding) { + return tspRuntime.computeSha256Hash(content, encoding); + } + function computeSha256Hmac(key, stringToSign, encoding) { + return tspRuntime.computeSha256Hmac(key, stringToSign, encoding); + } + function getRandomIntegerInclusive(min, max) { + return tspRuntime.getRandomIntegerInclusive(min, max); + } + function isError(e) { + return tspRuntime.isError(e); + } + function isObject2(input) { + return tspRuntime.isObject(input); + } + function randomUUID2() { + return tspRuntime.randomUUID(); + } + exports2.isBrowser = tspRuntime.isBrowser; + exports2.isBun = tspRuntime.isBun; + exports2.isDeno = tspRuntime.isDeno; + exports2.isNode = tspRuntime.isNodeLike; + exports2.isNodeLike = tspRuntime.isNodeLike; + exports2.isNodeRuntime = tspRuntime.isNodeRuntime; + exports2.isReactNative = tspRuntime.isReactNative; + exports2.isWebWorker = tspRuntime.isWebWorker; + function uint8ArrayToString(bytes, format) { + return tspRuntime.uint8ArrayToString(bytes, format); + } + function stringToUint8Array(value, format) { + return tspRuntime.stringToUint8Array(value, format); + } } }); -// node_modules/fast-xml-parser/lib/fxp.cjs -var require_fxp = __commonJS({ - "node_modules/fast-xml-parser/lib/fxp.cjs"(exports2, module2) { - (() => { - "use strict"; - var t = { d: (e2, n2) => { - for (var i2 in n2) t.o(n2, i2) && !t.o(e2, i2) && Object.defineProperty(e2, i2, { enumerable: true, get: n2[i2] }); - }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { - "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); - } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); - const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); - function s(t2, e2) { - const n2 = []; - let i2 = e2.exec(t2); - for (; i2; ) { - const s2 = []; - s2.startIndex = e2.lastIndex - i2[0].length; - const r2 = i2.length; - for (let t3 = 0; t3 < r2; t3++) s2.push(i2[t3]); - n2.push(s2), i2 = e2.exec(t2); - } - return n2; - } - const r = function(t2) { - return !(null == i.exec(t2)); - }, o = { allowBooleanAttributes: false, unpairedTags: [] }; - function a(t2, e2) { - e2 = Object.assign({}, o, e2); - const n2 = []; - let i2 = false, s2 = false; - "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; - } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); - } - { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); - continue; - } - { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { - let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); - } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); - i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); - { - const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); - } - 0 == n2.length && (s2 = true); - } - } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; - } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); - continue; - } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; - } - } - } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); - } - function l(t2) { - return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; - } - function u(t2, e2) { - const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { - const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); - if ("?" == t2[e2] && ">" == t2[e2 + 1]) { - e2++; - break; - } - } - return e2; +// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js +var require_file3 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.hasRawContent = hasRawContent; + exports2.getRawContent = getRawContent; + exports2.createFileFromStream = createFileFromStream; + exports2.createFile = createFile; + var core_util_1 = require_commonjs4(); + function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); + } + var unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + bytes: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); } - function h(t2, e2) { - if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { - for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { - e2 += 2; - break; - } - } else if (t2.length > e2 + 8 && "D" === t2[e2 + 1] && "O" === t2[e2 + 2] && "C" === t2[e2 + 3] && "T" === t2[e2 + 4] && "Y" === t2[e2 + 5] && "P" === t2[e2 + 6] && "E" === t2[e2 + 7]) { - let n2 = 1; - for (e2 += 8; e2 < t2.length; e2++) if ("<" === t2[e2]) n2++; - else if (">" === t2[e2] && (n2--, 0 === n2)) break; - } else if (t2.length > e2 + 9 && "[" === t2[e2 + 1] && "C" === t2[e2 + 2] && "D" === t2[e2 + 3] && "A" === t2[e2 + 4] && "T" === t2[e2 + 5] && "A" === t2[e2 + 6] && "[" === t2[e2 + 7]) { - for (e2 += 8; e2 < t2.length; e2++) if ("]" === t2[e2] && "]" === t2[e2 + 1] && ">" === t2[e2 + 2]) { - e2 += 2; - break; - } - } - return e2; + }; + var rawContent = /* @__PURE__ */ Symbol("rawContent"); + function hasRawContent(x) { + return typeof x[rawContent] === "function"; + } + function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); + } else { + return blob; } - const d = '"', p = "'"; - function f(t2, e2) { - let n2 = "", i2 = "", s2 = false; - for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); - else if (">" === t2[e2] && "" === i2) { - s2 = true; - break; + } + function createFileFromStream(stream2, name, options = {}) { + return { + ...unimplementedMethods, + type: options.type ?? "", + lastModified: options.lastModified ?? (/* @__PURE__ */ new Date()).getTime(), + webkitRelativePath: options.webkitRelativePath ?? "", + size: options.size ?? -1, + name, + stream: () => { + const s = stream2(); + if (isNodeReadableStream(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); } - n2 += t2[e2]; - } - return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; + return s; + }, + [rawContent]: stream2 + }; + } + function createFile(content, name, options = {}) { + if (core_util_1.isNodeLike) { + return { + ...unimplementedMethods, + type: options.type ?? "", + lastModified: options.lastModified ?? (/* @__PURE__ */ new Date()).getTime(), + webkitRelativePath: options.webkitRelativePath ?? "", + size: content.byteLength, + name, + arrayBuffer: async () => content.buffer, + stream: () => new Blob([toArrayBuffer(content)]).stream(), + [rawContent]: () => content + }; + } else { + return new File([toArrayBuffer(content)], name, options); } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); - function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; - for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); - const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); - i2[s2] = 1; - } - return true; + } + function toArrayBuffer(source) { + if ("resize" in source.buffer) { + return source; } - function x(t2, e2) { - if (";" === t2[++e2]) return -1; - if ("#" === t2[e2]) return (function(t3, e3) { - let n3 = /\d/; - for ("x" === t3[e3] && (e3++, n3 = /[\da-fA-F]/); e3 < t3.length; e3++) { - if (";" === t3[e3]) return e3; - if (!t3[e3].match(n3)) break; + return source.map((x) => x); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js +var require_multipartPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.multipartPolicyName = void 0; + exports2.multipartPolicy = multipartPolicy; + var policies_1 = require_internal2(); + var file_js_1 = require_file3(); + exports2.multipartPolicyName = policies_1.multipartPolicyName; + function multipartPolicy() { + const tspPolicy = (0, policies_1.multipartPolicy)(); + return { + name: exports2.multipartPolicyName, + sendRequest: async (request2, next) => { + if (request2.multipartBody) { + for (const part of request2.multipartBody.parts) { + if ((0, file_js_1.hasRawContent)(part.body)) { + part.body = (0, file_js_1.getRawContent)(part.body); + } + } } - return -1; - })(t2, ++e2); - let n2 = 0; - for (; e2 < t2.length; e2++, n2++) if (!(t2[e2].match(/\w/) && n2 < 20)) { - if (";" === t2[e2]) break; - return -1; + return tspPolicy.sendRequest(request2, next); } - return e2; - } - function m(t2, e2, n2) { - return { err: { code: t2, msg: e2, line: n2.line || n2, col: n2.col } }; - } - function E(t2) { - return r(t2); - } - function b(t2, e2) { - const n2 = t2.substring(0, e2).split(/\r?\n/); - return { line: n2.length, col: n2[n2.length - 1].length + 1 }; - } - function N(t2) { - return t2.startIndex + t2[1].length; - } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { - return e2; - }, attributeValueProcessor: function(t2, e2) { - return e2; - }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { - return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); - } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); - class I { - constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; - } - add(t2, e2) { - "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); - } - addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); - } - static getMetaDataSymbol() { - return v; - } - } - class O { - constructor(t2) { - this.suppressValidationErr = !t2, this.options = t2; - } - readDocType(t2, e2) { - const n2 = {}; - if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); - { - e2 += 9; - let i2 = 1, s2 = false, r2 = false, o2 = ""; - for (; e2 < t2.length; e2++) if ("<" !== t2[e2] || r2) if (">" === t2[e2]) { - if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; - } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; - else { - if (s2 && A(t2, "!ENTITY", e2)) { - let i3, s3; - if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { - const t3 = i3.replace(/[.\-+*:]/g, "\\."); - n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; - } - } else if (s2 && A(t2, "!ELEMENT", e2)) { - e2 += 8; - const { index: n3 } = this.readElementExp(t2, e2 + 1); - e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { - e2 += 9; - const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); - e2 = n3; - } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); - r2 = true; - } - i2++, o2 = ""; - } - if (0 !== i2) throw new Error("Unclosed DOCTYPE"); - } - return { entities: n2, i: e2 }; - } - readEntityExp(t2, e2) { - e2 = P(t2, e2); - let n2 = ""; - for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { - if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); - if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); - } - let i2 = ""; - if ([e2, i2] = this.readIdentifierVal(t2, e2, "entity"), false !== this.options.enabled && this.options.maxEntitySize && i2.length > this.options.maxEntitySize) throw new Error(`Entity "${n2}" size (${i2.length}) exceeds maximum allowed size (${this.options.maxEntitySize})`); - return [n2, i2, --e2]; - } - readNotationExp(t2, e2) { - e2 = P(t2, e2); - let n2 = ""; - for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); - const i2 = t2.substring(e2, e2 + 6).toUpperCase(); - if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); - let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); - else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); - return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; - } - readIdentifierVal(t2, e2, n2) { - let i2 = ""; - const s2 = t2[e2]; - if ('"' !== s2 && "'" !== s2) throw new Error(`Expected quoted string, found "${s2}"`); - for (e2++; e2 < t2.length && t2[e2] !== s2; ) i2 += t2[e2], e2++; - if (t2[e2] !== s2) throw new Error(`Unterminated ${n2} value`); - return [++e2, i2]; - } - readElementExp(t2, e2) { - e2 = P(t2, e2); - let n2 = ""; - for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); - let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; - else if ("(" === t2[e2]) { - for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; - if (")" !== t2[e2]) throw new Error("Unterminated content model"); - } else if (!this.suppressValidationErr) throw new Error(`Invalid Element Expression, found "${t2[e2]}"`); - return { elementName: n2, contentModel: i2.trim(), index: e2 }; - } - readAttlistExp(t2, e2) { - e2 = P(t2, e2); - let n2 = ""; - for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); - let i2 = ""; - for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); - let s2 = ""; - if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); - e2++; - let n3 = []; - for (; e2 < t2.length && ")" !== t2[e2]; ) { - let i3 = ""; - for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); - } - if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); - e2++, s2 += " (" + n3.join("|") + ")"; - } else { - for (; e2 < t2.length && !/\s/.test(t2[e2]); ) s2 += t2[e2], e2++; - const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; - if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js +var require_decompressResponsePolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.decompressResponsePolicyName = void 0; + exports2.decompressResponsePolicy = decompressResponsePolicy; + var policies_1 = require_internal2(); + exports2.decompressResponsePolicyName = policies_1.decompressResponsePolicyName; + function decompressResponsePolicy() { + return (0, policies_1.decompressResponsePolicy)(); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js +var require_defaultRetryPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.defaultRetryPolicyName = void 0; + exports2.defaultRetryPolicy = defaultRetryPolicy; + var policies_1 = require_internal2(); + exports2.defaultRetryPolicyName = policies_1.defaultRetryPolicyName; + function defaultRetryPolicy(options = {}) { + return (0, policies_1.defaultRetryPolicy)(options); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js +var require_formDataPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.formDataPolicyName = void 0; + exports2.formDataPolicy = formDataPolicy; + var policies_1 = require_internal2(); + exports2.formDataPolicyName = policies_1.formDataPolicyName; + function formDataPolicy() { + return (0, policies_1.formDataPolicy)(); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js +var require_proxyPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.proxyPolicyName = void 0; + exports2.getDefaultProxySettings = getDefaultProxySettings; + exports2.proxyPolicy = proxyPolicy; + var policies_1 = require_internal2(); + exports2.proxyPolicyName = policies_1.proxyPolicyName; + function getDefaultProxySettings(proxyUrl) { + return (0, policies_1.getDefaultProxySettings)(proxyUrl); + } + function proxyPolicy(proxySettings, options) { + return (0, policies_1.proxyPolicy)(proxySettings, options); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js +var require_setClientRequestIdPolicy = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.setClientRequestIdPolicyName = void 0; + exports2.setClientRequestIdPolicy = setClientRequestIdPolicy; + exports2.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; + function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: exports2.setClientRequestIdPolicyName, + async sendRequest(request2, next) { + if (!request2.headers.has(requestIdHeaderName)) { + request2.headers.set(requestIdHeaderName, request2.requestId); } - e2 = P(t2, e2); - let r2 = ""; - return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; + return next(request2); } - } - const P = (t2, e2) => { - for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; - return e2; }; - function A(t2, e2, n2) { - for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; - return true; + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/agentPolicy.js +var require_agentPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/agentPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.agentPolicyName = void 0; + exports2.agentPolicy = agentPolicy; + var policies_1 = require_internal2(); + exports2.agentPolicyName = policies_1.agentPolicyName; + function agentPolicy(agent) { + return (0, policies_1.agentPolicy)(agent); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js +var require_tlsPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.tlsPolicyName = void 0; + exports2.tlsPolicy = tlsPolicy; + var policies_1 = require_internal2(); + exports2.tlsPolicyName = policies_1.tlsPolicyName; + function tlsPolicy(tlsSettings) { + return (0, policies_1.tlsPolicy)(tlsSettings); + } + } +}); + +// node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js +var require_tracingContext = __commonJS({ + "node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.TracingContextImpl = exports2.knownContextKeys = void 0; + exports2.createTracingContext = createTracingContext; + exports2.knownContextKeys = { + span: /* @__PURE__ */ Symbol.for("@azure/core-tracing span"), + namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") + }; + function createTracingContext(options = {}) { + let context2 = new TracingContextImpl(options.parentContext); + if (options.span) { + context2 = context2.setValue(exports2.knownContextKeys.span, options.span); } - function S(t2) { - if (r(t2)) return t2; - throw new Error(`Invalid entity name ${t2}`); + if (options.namespace) { + context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; + return context2; + } + var TracingContextImpl = class _TracingContextImpl { + _contextMap; + constructor(initialContext) { + this._contextMap = initialContext instanceof _TracingContextImpl ? new Map(initialContext._contextMap) : /* @__PURE__ */ new Map(); } - class F { - constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { - this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); - for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); - } - } - } + setValue(key, value) { + const newContext = new _TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; } - function j(t2) { - const e2 = Object.keys(t2); - for (let n2 = 0; n2 < e2.length; n2++) { - const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); - this.lastEntities[i2] = { regex: new RegExp("&" + s2 + ";", "g"), val: t2[i2] }; - } + getValue(key) { + return this._contextMap.get(key); } - function M(t2, e2, n2, i2, s2, r2, o2) { - if (void 0 !== t2 && (this.options.trimValues && !i2 && (t2 = t2.trim()), t2.length > 0)) { - o2 || (t2 = this.replaceEntitiesValue(t2, e2, n2)); - const i3 = this.options.tagValueProcessor(e2, t2, n2, s2, r2); - return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; - } + deleteValue(key) { + const newContext = new _TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; } - function _(t2) { - if (this.options.removeNSPrefix) { - const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; - if ("xmlns" === e2[0]) return ""; - 2 === e2.length && (t2 = n2 + e2[1]); + }; + exports2.TracingContextImpl = TracingContextImpl; + } +}); + +// node_modules/@azure/core-tracing/dist/commonjs/state.js +var require_state = __commonJS({ + "node_modules/@azure/core-tracing/dist/commonjs/state.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.state = void 0; + exports2.state = { + instrumenterImplementation: void 0 + }; + } +}); + +// node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js +var require_instrumenter = __commonJS({ + "node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createDefaultTracingSpan = createDefaultTracingSpan; + exports2.createDefaultInstrumenter = createDefaultInstrumenter; + exports2.useInstrumenter = useInstrumenter; + exports2.getInstrumenter = getInstrumenter; + var tracingContext_js_1 = require_tracingContext(); + var state_js_1 = require_state(); + function createDefaultTracingSpan() { + return { + end: () => { + }, + isRecording: () => false, + recordException: () => { + }, + setAttribute: () => { + }, + setStatus: () => { + }, + addEvent: () => { } - return t2; + }; + } + function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return void 0; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }) + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + } + }; + } + function useInstrumenter(instrumenter) { + state_js_1.state.instrumenterImplementation = instrumenter; + } + function getInstrumenter() { + if (!state_js_1.state.instrumenterImplementation) { + state_js_1.state.instrumenterImplementation = createDefaultInstrumenter(); } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); - function U(t2, e2, n2) { - if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; - for (let t3 = 0; t3 < r2; t3++) { - const s2 = this.resolveNameSpace(i2[t3][1]); - if (this.ignoreAttributesFn(s2, e2)) continue; - let r3 = i2[t3][4], a2 = this.options.attributeNamePrefix + s2; - if (s2.length) if (this.options.transformAttributeName && (a2 = this.options.transformAttributeName(a2)), "__proto__" === a2 && (a2 = "#__proto__"), void 0 !== r3) { - this.options.trimValues && (r3 = r3.trim()), r3 = this.replaceEntitiesValue(r3, n2, e2); - const t4 = this.options.attributeValueProcessor(s2, r3, e2); - o2[a2] = null == t4 ? r3 : typeof t4 != typeof r3 || t4 !== r3 ? t4 : Z(r3, this.options.parseAttributeValue, this.options.numberParseOptions); - } else this.options.allowBooleanAttributes && (o2[a2] = true); - } - if (!Object.keys(o2).length) return; - if (this.options.attributesGroupName) { - const t3 = {}; - return t3[this.options.attributesGroupName] = o2, t3; - } - return o2; + return state_js_1.state.instrumenterImplementation; + } + } +}); + +// node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js +var require_tracingClient = __commonJS({ + "node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createTracingClient = createTracingClient; + var instrumenter_js_1 = require_instrumenter(); + var tracingContext_js_1 = require_tracingContext(); + function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, { + ...spanOptions, + packageName, + packageVersion, + tracingContext: operationOptions?.tracingOptions?.tracingContext + }); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); } + span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: { ...operationOptions?.tracingOptions, tracingContext } + }); + return { + span, + updatedOptions + }; } - const B = function(t2) { - t2 = t2.replace(/\r\n?/g, "\n"); - const e2 = new I("!xml"); - let n2 = e2, i2 = "", s2 = ""; - this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { - const e3 = z(t2, ">", o2, "Closing Tag is not closed."); - let r3 = t2.substring(o2 + 2, e3).trim(); - if (this.options.removeNSPrefix) { - const t3 = r3.indexOf(":"); - -1 !== t3 && (r3 = r3.substr(t3 + 1)); - } - this.options.transformTagName && (r3 = this.options.transformTagName(r3)), n2 && (i2 = this.saveTextToParentTag(i2, n2, s2)); - const a2 = s2.substring(s2.lastIndexOf(".") + 1); - if (r3 && -1 !== this.options.unpairedTags.indexOf(r3)) throw new Error(`Unpaired tag can not be used as closing tag: `); - let l2 = 0; - a2 && -1 !== this.options.unpairedTags.indexOf(a2) ? (l2 = s2.lastIndexOf(".", s2.lastIndexOf(".") - 1), this.tagsNodeStack.pop()) : l2 = s2.lastIndexOf("."), s2 = s2.substring(0, l2), n2 = this.tagsNodeStack.pop(), i2 = "", o2 = e3; - } else if ("?" === t2[o2 + 1]) { - let e3 = W(t2, o2, false, "?>"); - if (!e3) throw new Error("Pi Tag is not closed."); - if (i2 = this.saveTextToParentTag(i2, n2, s2), this.options.ignoreDeclaration && "?xml" === e3.tagName || this.options.ignorePiTags) ; - else { - const t3 = new I(e3.tagName); - t3.add(this.options.textNodeName, ""), e3.tagName !== e3.tagExp && e3.attrExpPresent && (t3[":@"] = this.buildAttributesMap(e3.tagExp, s2, e3.tagName)), this.addChild(n2, t3, s2, o2); - } - o2 = e3.closeIndex + 1; - } else if ("!--" === t2.substr(o2 + 1, 3)) { - const e3 = z(t2, "-->", o2 + 4, "Comment is not closed."); - if (this.options.commentPropName) { - const r3 = t2.substring(o2 + 4, e3 - 2); - i2 = this.saveTextToParentTag(i2, n2, s2), n2.add(this.options.commentPropName, [{ [this.options.textNodeName]: r3 }]); - } - o2 = e3; - } else if ("!D" === t2.substr(o2 + 1, 2)) { - const e3 = r2.readDocType(t2, o2); - this.docTypeEntities = e3.entities, o2 = e3.i; - } else if ("![" === t2.substr(o2 + 1, 2)) { - const e3 = z(t2, "]]>", o2, "CDATA is not closed.") - 2, r3 = t2.substring(o2 + 9, e3); - i2 = this.saveTextToParentTag(i2, n2, s2); - let a2 = this.parseTextData(r3, n2.tagname, s2, true, false, true, true); - null == a2 && (a2 = ""), this.options.cdataPropName ? n2.add(this.options.cdataPropName, [{ [this.options.textNodeName]: r3 }]) : n2.add(this.options.textNodeName, a2), o2 = e3 + 2; - } else { - let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; - const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; - if (this.options.transformTagName) { - const t3 = this.options.transformTagName(a2); - u2 === a2 && (u2 = t3), a2 = t3; - } - n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); - const p2 = n2; - p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; - if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { - let e3 = ""; - if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; - else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; - else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); - if (!n3) throw new Error(`Unexpected end of ${l2}`); - o2 = n3.i, e3 = n3.tagContent; - } - const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); - } else { - if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { - if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { - const t4 = this.options.transformTagName(a2); - u2 === a2 && (u2 = t4), a2 = t4; - } - const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); - } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; - } - i2 = "", o2 = d2; - } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } finally { + span.end(); } - else i2 += t2[o2]; - return e2.child; - }; - function R(t2, e2, n2, i2) { - this.options.captureMetaData || (i2 = void 0); - const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); - false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); } - const Y = function(t2, e2, n2) { - if (-1 === t2.indexOf("&")) return t2; - const i2 = this.options.processEntities; - if (!i2.enabled) return t2; - if (i2.allowedTags && !i2.allowedTags.includes(e2)) return t2; - if (i2.tagFilter && !i2.tagFilter(e2, n2)) return t2; - for (let e3 in this.docTypeEntities) { - const n3 = this.docTypeEntities[e3], s2 = t2.match(n3.regx); - if (s2) { - if (this.entityExpansionCount += s2.length, i2.maxTotalExpansions && this.entityExpansionCount > i2.maxTotalExpansions) throw new Error(`Entity expansion limit exceeded: ${this.entityExpansionCount} > ${i2.maxTotalExpansions}`); - const e4 = t2.length; - if (t2 = t2.replace(n3.regx, n3.val), i2.maxExpandedLength && (this.currentExpandedLength += t2.length - e4, this.currentExpandedLength > i2.maxExpandedLength)) throw new Error(`Total expanded content size exceeded: ${this.currentExpandedLength} > ${i2.maxExpandedLength}`); - } - } - if (-1 === t2.indexOf("&")) return t2; - for (let e3 in this.lastEntities) { - const n3 = this.lastEntities[e3]; - t2 = t2.replace(n3.regex, n3.val); - } - if (-1 === t2.indexOf("&")) return t2; - if (this.options.htmlEntities) for (let e3 in this.htmlEntities) { - const n3 = this.htmlEntities[e3]; - t2 = t2.replace(n3.regex, n3.val); - } - return t2.replace(this.ampEntity.regex, this.ampEntity.val); - }; - function G(t2, e2, n2, i2) { - return t2 && (void 0 === i2 && (i2 = 0 === e2.child.length), void 0 !== (t2 = this.parseTextData(t2, e2.tagname, n2, false, !!e2[":@"] && 0 !== Object.keys(e2[":@"]).length, i2)) && "" !== t2 && e2.add(this.options.textNodeName, t2), t2 = ""), t2; + function withContext(context2, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); } - function X(t2, e2, n2, i2) { - return !(!e2 || !e2.has(i2)) || !(!t2 || !t2.has(n2)); + function parseTraceparentHeader(traceparentHeader) { + return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); } - function z(t2, e2, n2, i2) { - const s2 = t2.indexOf(e2, n2); - if (-1 === s2) throw new Error(i2); - return s2 + e2.length - 1; + function createRequestHeaders(tracingContext) { + return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext); } - function W(t2, e2, n2, i2 = ">") { - const s2 = (function(t3, e3, n3 = ">") { - let i3, s3 = ""; - for (let r3 = e3; r3 < t3.length; r3++) { - let e4 = t3[r3]; - if (i3) e4 === i3 && (i3 = ""); - else if ('"' === e4 || "'" === e4) i3 = e4; - else if (e4 === n3[0]) { - if (!n3[1]) return { data: s3, index: r3 }; - if (t3[r3 + 1] === n3[1]) return { data: s3, index: r3 }; - } else " " === e4 && (e4 = " "); - s3 += e4; + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders + }; + } + } +}); + +// node_modules/@azure/core-tracing/dist/commonjs/index.js +var require_commonjs5 = __commonJS({ + "node_modules/@azure/core-tracing/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createTracingClient = exports2.useInstrumenter = void 0; + var instrumenter_js_1 = require_instrumenter(); + Object.defineProperty(exports2, "useInstrumenter", { enumerable: true, get: function() { + return instrumenter_js_1.useInstrumenter; + } }); + var tracingClient_js_1 = require_tracingClient(); + Object.defineProperty(exports2, "createTracingClient", { enumerable: true, get: function() { + return tracingClient_js_1.createTracingClient; + } }); + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js +var require_restError3 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RestError = void 0; + exports2.isRestError = isRestError; + var ts_http_runtime_1 = require_commonjs(); + exports2.RestError = ts_http_runtime_1.RestError; + function isRestError(e) { + return (0, ts_http_runtime_1.isRestError)(e); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js +var require_tracingPolicy = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.tracingPolicyName = void 0; + exports2.tracingPolicy = tracingPolicy; + var core_tracing_1 = require_commonjs5(); + var constants_js_1 = require_constants14(); + var userAgent_js_1 = require_userAgent2(); + var log_js_1 = require_log3(); + var core_util_1 = require_commonjs4(); + var restError_js_1 = require_restError3(); + var util_1 = require_internal3(); + exports2.tracingPolicyName = "tracingPolicy"; + function tracingPolicy(options = {}) { + const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + const sanitizer = new util_1.Sanitizer({ + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters + }); + const tracingClient = tryCreateTracingClient(); + return { + name: exports2.tracingPolicyName, + async sendRequest(request2, next) { + if (!tracingClient) { + return next(request2); + } + const userAgent2 = await userAgentPromise; + const spanAttributes = { + "http.url": sanitizer.sanitizeUrl(request2.url), + "http.method": request2.method, + "http.user_agent": userAgent2, + requestId: request2.requestId + }; + if (userAgent2) { + spanAttributes["http.user_agent"] = userAgent2; + } + const { span, tracingContext } = tryCreateSpan(tracingClient, request2, spanAttributes) ?? {}; + if (!span || !tracingContext) { + return next(request2); + } + try { + const response = await tracingClient.withContext(tracingContext, next, request2); + tryProcessResponse(span, response); + return response; + } catch (err) { + tryProcessError(span, err); + throw err; } - })(t2, e2 + 1, i2); - if (!s2) return; - let r2 = s2.data; - const o2 = s2.index, a2 = r2.search(/\s/); - let l2 = r2, u2 = true; - -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; - if (n2) { - const t3 = l2.indexOf(":"); - -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + }; + } + function tryCreateTracingClient() { + try { + return (0, core_tracing_1.createTracingClient)({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: constants_js_1.SDK_VERSION + }); + } catch (e) { + log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`); + return void 0; } - function q(t2, e2, n2) { - const i2 = n2; - let s2 = 1; - for (; n2 < t2.length; n2++) if ("<" === t2[n2]) if ("/" === t2[n2 + 1]) { - const r2 = z(t2, ">", n2, `${e2} is not closed`); - if (t2.substring(n2 + 2, r2).trim() === e2 && (s2--, 0 === s2)) return { tagContent: t2.substring(i2, n2), i: r2 }; - n2 = r2; - } else if ("?" === t2[n2 + 1]) n2 = z(t2, "?>", n2 + 1, "StopNode is not closed."); - else if ("!--" === t2.substr(n2 + 1, 3)) n2 = z(t2, "-->", n2 + 3, "StopNode is not closed."); - else if ("![" === t2.substr(n2 + 1, 2)) n2 = z(t2, "]]>", n2, "StopNode is not closed.") - 2; - else { - const i3 = W(t2, n2, ">"); - i3 && ((i3 && i3.tagName) === e2 && "/" !== i3.tagExp[i3.tagExp.length - 1] && s2++, n2 = i3.closeIndex); + } + function tryCreateSpan(tracingClient, request2, spanAttributes) { + try { + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request2.method}`, { tracingOptions: request2.tracingOptions }, { + spanKind: "client", + spanAttributes + }); + if (!span.isRecording()) { + span.end(); + return void 0; } - } - function Z(t2, e2, n2) { - if (e2 && "string" == typeof t2) { - const e3 = t2.trim(); - return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; - let n3 = t3.trim(); - if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; - if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { - if (parseInt) return parseInt(t4, 16); - if (Number.parseInt) return Number.parseInt(t4, 16); - if (window && window.parseInt) return window.parseInt(t4, 16); - throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); - })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { - if (!n4.eNotation) return t4; - const i3 = e5.match(D); - if (i3) { - let s2 = i3[1] || ""; - const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; - return o2.length > 1 && a2 ? t4 : 1 !== o2.length || !i3[3].startsWith(`.${r2}`) && i3[3][0] !== r2 ? n4.leadingZeros && !a2 ? (e5 = (i3[1] || "") + i3[3], Number(e5)) : t4 : Number(e5); - } - return t4; - })(t3, n3, e4); - { - const s2 = $.exec(n3); - if (s2) { - const r2 = s2[1] || "", o2 = s2[2]; - let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; - const l2 = r2 ? "." === t3[o2.length + 1] : "." === t3[o2.length]; - if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; - { - const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; - if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; - if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; - let l3 = o2 ? a2 : n3; - return o2 ? l3 === s3 || r2 + l3 === s3 ? i3 : t3 : l3 === s3 || l3 === r2 + s3 ? i3 : t3; - } - } - return t3; - } - var i2; - })(t2, n2); + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request2.headers.set(key, value); } - return void 0 !== t2 ? t2 : ""; - } - function K(t2, e2, n2) { - const i2 = Number.parseInt(t2, e2); - return i2 >= 0 && i2 <= 1114111 ? String.fromCodePoint(i2) : n2 + t2 + ";"; - } - const Q = I.getMetaDataSymbol(); - function J(t2, e2) { - return H(t2, e2); + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + } catch (e) { + log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + return void 0; } - function H(t2, e2, n2) { - let i2; - const s2 = {}; - for (let r2 = 0; r2 < t2.length; r2++) { - const o2 = t2[r2], a2 = tt(o2); - let l2 = ""; - if (l2 = void 0 === n2 ? a2 : n2 + "." + a2, a2 === e2.textNodeName) void 0 === i2 ? i2 = o2[a2] : i2 += "" + o2[a2]; - else { - if (void 0 === a2) continue; - if (o2[a2]) { - let t3 = H(o2[a2], e2, l2); - const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; - } - } + } + function tryProcessError(span, error3) { + try { + span.setStatus({ + status: "error", + error: (0, core_util_1.isError)(error3) ? error3 : void 0 + }); + if ((0, restError_js_1.isRestError)(error3) && error3.statusCode) { + span.setAttribute("http.status_code", error3.statusCode); } - return "string" == typeof i2 ? i2.length > 0 && (s2[e2.textNodeName] = i2) : void 0 !== i2 && (s2[e2.textNodeName] = i2), s2; + span.end(); + } catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); } - function tt(t2) { - const e2 = Object.keys(t2); - for (let t3 = 0; t3 < e2.length; t3++) { - const n2 = e2[t3]; - if (":@" !== n2) return n2; + } + function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); } - } - function et(t2, e2, n2, i2) { - if (e2) { - const s2 = Object.keys(e2), r2 = s2.length; - for (let o2 = 0; o2 < r2; o2++) { - const r3 = s2[o2]; - i2.isArray(r3, n2 + "." + r3, true, true) ? t2[r3] = [e2[r3]] : t2[r3] = e2[r3]; - } + if (response.status >= 400) { + span.setStatus({ + status: "error" + }); } + span.end(); + } catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); } - function nt(t2, e2) { - const { textNodeName: n2 } = e2, i2 = Object.keys(t2).length; - return 0 === i2 || !(1 !== i2 || !t2[n2] && "boolean" != typeof t2[n2] && 0 !== t2[n2]); + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/wrapAbortSignal.js +var require_wrapAbortSignal = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/wrapAbortSignal.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.wrapAbortSignalLike = wrapAbortSignalLike; + function wrapAbortSignalLike(abortSignalLike) { + if (abortSignalLike instanceof AbortSignal) { + return { abortSignal: abortSignalLike }; } - class it { - constructor(t2) { - this.externalEntities = {}, this.options = w(t2); - } - parse(t2, e2) { - if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); - else if ("string" != typeof t2) throw new Error("XML data is accepted in String or Bytes[] form."); - if (e2) { - true === e2 && (e2 = {}); - const n3 = a(t2, e2); - if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); - } - const n2 = new F(this.options); - n2.addExternalEntities(this.externalEntities); - const i2 = n2.parseXml(t2); - return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); - } - addEntity(t2, e2) { - if (-1 !== e2.indexOf("&")) throw new Error("Entity value can't have '&'"); - if (-1 !== t2.indexOf("&") || -1 !== t2.indexOf(";")) throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '"); - if ("&" === e2) throw new Error("An entity with value '&' is not permitted"); - this.externalEntities[t2] = e2; - } - static getMetaDataSymbol() { - return I.getMetaDataSymbol(); + if (abortSignalLike.aborted) { + return { abortSignal: AbortSignal.abort(abortSignalLike.reason) }; + } + const controller = new AbortController(); + let needsCleanup = true; + function cleanup() { + if (needsCleanup) { + abortSignalLike.removeEventListener("abort", listener); + needsCleanup = false; } } - function st(t2, e2) { - let n2 = ""; - return e2.format && e2.indentBy.length > 0 && (n2 = "\n"), rt(t2, e2, "", n2); + function listener() { + controller.abort(abortSignalLike.reason); + cleanup(); } - function rt(t2, e2, n2, i2) { - let s2 = "", r2 = false; - for (let o2 = 0; o2 < t2.length; o2++) { - const a2 = t2[o2], l2 = ot(a2); - if (void 0 === l2) continue; - let u2 = ""; - if (u2 = 0 === n2.length ? l2 : `${n2}.${l2}`, l2 === e2.textNodeName) { - let t3 = a2[l2]; - lt(u2, e2) || (t3 = e2.tagValueProcessor(l2, t3), t3 = ut(t3, e2)), r2 && (s2 += i2), s2 += t3, r2 = false; - continue; - } - if (l2 === e2.cdataPropName) { - r2 && (s2 += i2), s2 += ``, r2 = false; - continue; - } - if (l2 === e2.commentPropName) { - s2 += i2 + ``, r2 = true; - continue; + abortSignalLike.addEventListener("abort", listener); + return { abortSignal: controller.signal, cleanup }; + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/wrapAbortSignalLikePolicy.js +var require_wrapAbortSignalLikePolicy = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/wrapAbortSignalLikePolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.wrapAbortSignalLikePolicyName = void 0; + exports2.wrapAbortSignalLikePolicy = wrapAbortSignalLikePolicy; + var wrapAbortSignal_js_1 = require_wrapAbortSignal(); + exports2.wrapAbortSignalLikePolicyName = "wrapAbortSignalLikePolicy"; + function wrapAbortSignalLikePolicy() { + return { + name: exports2.wrapAbortSignalLikePolicyName, + sendRequest: async (request2, next) => { + if (!request2.abortSignal) { + return next(request2); } - if ("?" === l2[0]) { - const t3 = at(a2[":@"], e2), n3 = "?xml" === l2 ? "" : i2; - let o3 = a2[l2][0][e2.textNodeName]; - o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; - continue; + const { abortSignal, cleanup } = (0, wrapAbortSignal_js_1.wrapAbortSignalLike)(request2.abortSignal); + request2.abortSignal = abortSignal; + try { + return await next(request2); + } finally { + cleanup?.(); } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; } - return s2; - } - function ot(t2) { - const e2 = Object.keys(t2); - for (let n2 = 0; n2 < e2.length; n2++) { - const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + }; + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js +var require_createPipelineFromOptions2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createPipelineFromOptions = createPipelineFromOptions; + var logPolicy_js_1 = require_logPolicy2(); + var pipeline_js_1 = require_pipeline2(); + var redirectPolicy_js_1 = require_redirectPolicy2(); + var userAgentPolicy_js_1 = require_userAgentPolicy2(); + var multipartPolicy_js_1 = require_multipartPolicy2(); + var decompressResponsePolicy_js_1 = require_decompressResponsePolicy2(); + var defaultRetryPolicy_js_1 = require_defaultRetryPolicy2(); + var formDataPolicy_js_1 = require_formDataPolicy2(); + var core_util_1 = require_commonjs4(); + var proxyPolicy_js_1 = require_proxyPolicy2(); + var setClientRequestIdPolicy_js_1 = require_setClientRequestIdPolicy(); + var agentPolicy_js_1 = require_agentPolicy2(); + var tlsPolicy_js_1 = require_tlsPolicy2(); + var tracingPolicy_js_1 = require_tracingPolicy(); + var wrapAbortSignalLikePolicy_js_1 = require_wrapAbortSignalLikePolicy(); + function createPipelineFromOptions(options) { + const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); + if (core_util_1.isNodeLike) { + if (options.agent) { + pipeline.addPolicy((0, agentPolicy_js_1.agentPolicy)(options.agent)); } - } - function at(t2, e2) { - let n2 = ""; - if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; - let s2 = e2.attributeValueProcessor(i2, t2[i2]); - s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; + if (options.tlsOptions) { + pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); } - return n2; + pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); + pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); } - function lt(t2, e2) { - let n2 = (t2 = t2.substr(0, t2.length - e2.textNodeName.length - 1)).substr(t2.lastIndexOf(".") + 1); - for (let i2 in e2.stopNodes) if (e2.stopNodes[i2] === t2 || e2.stopNodes[i2] === "*." + n2) return true; - return false; + pipeline.addPolicy((0, wrapAbortSignalLikePolicy_js_1.wrapAbortSignalLikePolicy)()); + pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); + pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); + pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)(options.telemetryOptions?.clientRequestIdHeaderName)); + pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); + pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)({ ...options.userAgentOptions, ...options.loggingOptions }), { + afterPhase: "Retry" + }); + if (core_util_1.isNodeLike) { + pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); } - function ut(t2, e2) { - if (t2 && t2.length > 0 && e2.processEntities) for (let n2 = 0; n2 < e2.entities.length; n2++) { - const i2 = e2.entities[n2]; - t2 = t2.replace(i2.regex, i2.val); + pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; + } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js +var require_defaultHttpClient2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createDefaultHttpClient = createDefaultHttpClient; + var ts_http_runtime_1 = require_commonjs(); + var wrapAbortSignal_js_1 = require_wrapAbortSignal(); + function createDefaultHttpClient() { + const client = (0, ts_http_runtime_1.createDefaultHttpClient)(); + return { + async sendRequest(request2) { + const { abortSignal, cleanup } = request2.abortSignal ? (0, wrapAbortSignal_js_1.wrapAbortSignalLike)(request2.abortSignal) : {}; + try { + request2.abortSignal = abortSignal; + return await client.sendRequest(request2); + } finally { + cleanup?.(); + } } - return t2; - } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { - return e2; - }, attributeValueProcessor: function(t2, e2) { - return e2; - }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { - return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { - return ""; - }, this.tagEndChar = ">", this.newLine = ""); - } - function pt(t2, e2, n2, i2) { - const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); - return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); - } - function ft(t2) { - return this.options.indentBy.repeat(t2); - } - function ct(t2) { - return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); - } - dt.prototype.build = function(t2) { - return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { - let i2 = "", s2 = ""; - const r2 = n2.join("."); - for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); - else if (null === t2[o2]) this.isAttribute(o2) || o2 === this.options.cdataPropName ? s2 += "" : "?" === o2[0] ? s2 += this.indentate(e2) + "<" + o2 + "?" + this.tagEndChar : s2 += this.indentate(e2) + "<" + o2 + "/" + this.tagEndChar; - else if (t2[o2] instanceof Date) s2 += this.buildTextValNode(t2[o2], o2, "", e2); - else if ("object" != typeof t2[o2]) { - const n3 = this.isAttribute(o2); - if (n3 && !this.ignoreAttributesFn(n3, r2)) i2 += this.buildAttrPairStr(n3, "" + t2[o2]); - else if (!n3) if (o2 === this.options.textNodeName) { - let e3 = this.options.tagValueProcessor(o2, "" + t2[o2]); - s2 += this.replaceEntitiesValue(e3); - } else s2 += this.buildTextValNode(t2[o2], o2, "", e2); - } else if (Array.isArray(t2[o2])) { - const i3 = t2[o2].length; - let r3 = "", a2 = ""; - for (let l2 = 0; l2 < i3; l2++) { - const i4 = t2[o2][l2]; - if (void 0 === i4) ; - else if (null === i4) "?" === o2[0] ? s2 += this.indentate(e2) + "<" + o2 + "?" + this.tagEndChar : s2 += this.indentate(e2) + "<" + o2 + "/" + this.tagEndChar; - else if ("object" == typeof i4) if (this.options.oneListGroup) { - const t3 = this.j2x(i4, e2 + 1, n2.concat(o2)); - r3 += t3.val, this.options.attributesGroupName && i4.hasOwnProperty(this.options.attributesGroupName) && (a2 += t3.attrStr); - } else r3 += this.processTextOrObjNode(i4, o2, e2, n2); - else if (this.options.oneListGroup) { - let t3 = this.options.tagValueProcessor(o2, i4); - t3 = this.replaceEntitiesValue(t3), r3 += t3; - } else r3 += this.buildTextValNode(i4, o2, "", e2); - } - this.options.oneListGroup && (r3 = this.buildObjectNode(r3, o2, a2, e2)), s2 += r3; - } else if (this.options.attributesGroupName && o2 === this.options.attributesGroupName) { - const e3 = Object.keys(t2[o2]), n3 = e3.length; - for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); - } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); - return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { - return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { - if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; - { - let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; - } - }, dt.prototype.closeTag = function(t2) { - let e2 = ""; - return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; - if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; - if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; - { - let s2 = this.options.tagValueProcessor(e2, t2); - return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { - const n2 = this.options.entities[e2]; - t2 = t2.replace(n2.regex, n2.val); - } - return t2; }; - const gt = { validate: a }; - module2.exports = e; - })(); + } } }); -// node_modules/@azure/core-xml/dist/commonjs/xml.common.js -var require_xml_common = __commonJS({ - "node_modules/@azure/core-xml/dist/commonjs/xml.common.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js +var require_httpHeaders2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.XML_CHARKEY = exports2.XML_ATTRKEY = void 0; - exports2.XML_ATTRKEY = "$"; - exports2.XML_CHARKEY = "_"; + exports2.createHttpHeaders = createHttpHeaders; + var ts_http_runtime_1 = require_commonjs(); + function createHttpHeaders(rawHeaders) { + return (0, ts_http_runtime_1.createHttpHeaders)(rawHeaders); + } } }); -// node_modules/@azure/core-xml/dist/commonjs/xml.js -var require_xml = __commonJS({ - "node_modules/@azure/core-xml/dist/commonjs/xml.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js +var require_pipelineRequest2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.stringifyXML = stringifyXML; - exports2.parseXML = parseXML; - var fast_xml_parser_1 = require_fxp(); - var xml_common_js_1 = require_xml_common(); - function getCommonOptions(options) { - var _a; - return { - attributesGroupName: xml_common_js_1.XML_ATTRKEY, - textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY, - ignoreAttributes: false, - suppressBooleanAttributes: false - }; - } - function getSerializerOptions(options = {}) { - var _a, _b; - return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); - } - function getParserOptions(options = {}) { - return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true, trimValues: false }); - } - function stringifyXML(obj, opts = {}) { - const parserOptions = getSerializerOptions(opts); - const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); - const node = { [parserOptions.rootNodeName]: obj }; - const xmlData = j2x.build(node); - return `${xmlData}`.replace(/\n/g, ""); - } - async function parseXML(str2, opts = {}) { - if (!str2) { - throw new Error("Document is empty"); - } - const validation = fast_xml_parser_1.XMLValidator.validate(str2); - if (validation !== true) { - throw validation; - } - const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); - const parsedXml = parser.parse(str2); - if (parsedXml["?xml"]) { - delete parsedXml["?xml"]; - } - if (!opts.includeRoot) { - for (const key of Object.keys(parsedXml)) { - const value = parsedXml[key]; - return typeof value === "object" ? Object.assign({}, value) : value; - } - } - return parsedXml; + exports2.createPipelineRequest = createPipelineRequest; + var ts_http_runtime_1 = require_commonjs(); + function createPipelineRequest(options) { + return (0, ts_http_runtime_1.createPipelineRequest)(options); } } }); -// node_modules/@azure/core-xml/dist/commonjs/index.js -var require_commonjs10 = __commonJS({ - "node_modules/@azure/core-xml/dist/commonjs/index.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js +var require_exponentialRetryPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.XML_CHARKEY = exports2.XML_ATTRKEY = exports2.parseXML = exports2.stringifyXML = void 0; - var xml_js_1 = require_xml(); - Object.defineProperty(exports2, "stringifyXML", { enumerable: true, get: function() { - return xml_js_1.stringifyXML; - } }); - Object.defineProperty(exports2, "parseXML", { enumerable: true, get: function() { - return xml_js_1.parseXML; - } }); - var xml_common_js_1 = require_xml_common(); - Object.defineProperty(exports2, "XML_ATTRKEY", { enumerable: true, get: function() { - return xml_common_js_1.XML_ATTRKEY; - } }); - Object.defineProperty(exports2, "XML_CHARKEY", { enumerable: true, get: function() { - return xml_common_js_1.XML_CHARKEY; - } }); + exports2.exponentialRetryPolicyName = void 0; + exports2.exponentialRetryPolicy = exponentialRetryPolicy; + var policies_1 = require_internal2(); + exports2.exponentialRetryPolicyName = policies_1.exponentialRetryPolicyName; + function exponentialRetryPolicy(options = {}) { + return (0, policies_1.exponentialRetryPolicy)(options); + } } }); -// node_modules/@azure/storage-blob/dist/commonjs/log.js -var require_log5 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/log.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js +var require_systemErrorRetryPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("storage-blob"); + exports2.systemErrorRetryPolicyName = void 0; + exports2.systemErrorRetryPolicy = systemErrorRetryPolicy; + var policies_1 = require_internal2(); + exports2.systemErrorRetryPolicyName = policies_1.systemErrorRetryPolicyName; + function systemErrorRetryPolicy(options = {}) { + return (0, policies_1.systemErrorRetryPolicy)(options); + } } }); -// node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js -var require_AbortError3 = __commonJS({ - "node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js +var require_throttlingRetryPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError = class extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } - }; - exports2.AbortError = AbortError; + exports2.throttlingRetryPolicyName = void 0; + exports2.throttlingRetryPolicy = throttlingRetryPolicy; + var policies_1 = require_internal2(); + exports2.throttlingRetryPolicyName = policies_1.throttlingRetryPolicyName; + function throttlingRetryPolicy(options = {}) { + return (0, policies_1.throttlingRetryPolicy)(options); + } } }); -// node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/index.js -var require_commonjs11 = __commonJS({ - "node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js +var require_retryPolicy2 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError_js_1 = require_AbortError3(); - Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { - return AbortError_js_1.AbortError; - } }); + exports2.retryPolicy = retryPolicy; + var logger_1 = require_commonjs2(); + var constants_js_1 = require_constants14(); + var policies_1 = require_internal2(); + var retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy"); + function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { + return (0, policies_1.retryPolicy)(strategies, { + logger: retryPolicyLogger, + ...options + }); + } } }); -// node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js -var require_RequestPolicy = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js +var require_tokenCycler = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BaseRequestPolicy = void 0; - var BaseRequestPolicy = class { - _nextPolicy; - _options; - /** - * The main method to implement that manipulates a request/response. - */ - constructor(_nextPolicy, _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; + exports2.DEFAULT_CYCLER_OPTIONS = void 0; + exports2.createTokenCycler = createTokenCycler; + var core_util_1 = require_commonjs4(); + exports2.DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1e3, + // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3e3, + // Allow refresh attempts every 3s + refreshWindowInMs: 1e3 * 60 * 2 + // Start refreshing 2m before expiry + }; + async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); + } catch { + return null; + } + } else { + const finalToken = await getAccessToken(); + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); + let token = await tryGetAccessToken(); + while (token === null) { + await (0, core_util_1.delay)(retryIntervalInMs); + token = await tryGetAccessToken(); } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); + return token; + } + function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = { + ...exports2.DEFAULT_CYCLER_OPTIONS, + ...tokenCyclerOptions + }; + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + if (cycler.isRefreshing) { + return false; + } + if (token?.refreshAfterTimestamp && token.refreshAfterTimestamp < Date.now()) { + return true; + } + return (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now(); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now(); + } + }; + function refresh(scopes, getTokenOptions) { + if (!cycler.isRefreshing) { + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + refreshWorker = beginRefresh( + tryGetAccessToken, + options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + token?.expiresOnTimestamp ?? Date.now() + ).then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }).catch((reason) => { + refreshWorker = null; + token = null; + tenantId = void 0; + throw reason; + }); + } + return refreshWorker; } - }; - exports2.BaseRequestPolicy = BaseRequestPolicy; + return async (scopes, tokenOptions) => { + const hasClaimChallenge = Boolean(tokenOptions.claims); + const tenantIdChanged = tenantId !== tokenOptions.tenantId; + if (hasClaimChallenge) { + token = null; + } + const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh; + if (mustRefresh) { + return refresh(scopes, tokenOptions); + } + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); + } + return token; + }; + } } }); -// node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js -var require_constants15 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js"(exports2) { +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js +var require_bearerTokenAuthenticationPolicy = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PathStylePorts = exports2.BlobDoesNotUseCustomerSpecifiedEncryption = exports2.BlobUsesCustomerSpecifiedEncryptionMsg = exports2.StorageBlobLoggingAllowedQueryParameters = exports2.StorageBlobLoggingAllowedHeaderNames = exports2.DevelopmentConnectionString = exports2.EncryptionAlgorithmAES25 = exports2.HTTP_VERSION_1_1 = exports2.HTTP_LINE_ENDING = exports2.BATCH_MAX_PAYLOAD_IN_BYTES = exports2.BATCH_MAX_REQUEST = exports2.SIZE_1_MB = exports2.ETagAny = exports2.ETagNone = exports2.HeaderConstants = exports2.HTTPURLConnection = exports2.URLConstants = exports2.StorageOAuthScopes = exports2.REQUEST_TIMEOUT = exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = exports2.BLOCK_BLOB_MAX_BLOCKS = exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = exports2.SERVICE_VERSION = exports2.SDK_VERSION = void 0; - exports2.SDK_VERSION = "12.29.1"; - exports2.SERVICE_VERSION = "2025-11-05"; - exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; - exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; - exports2.BLOCK_BLOB_MAX_BLOCKS = 5e4; - exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; - exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; - exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; - exports2.REQUEST_TIMEOUT = 100 * 1e3; - exports2.StorageOAuthScopes = "https://storage.azure.com/.default"; - exports2.URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SIGNATURE: "sig", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout" + exports2.bearerTokenAuthenticationPolicyName = void 0; + exports2.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; + exports2.parseChallenges = parseChallenges; + var tokenCycler_js_1 = require_tokenCycler(); + var log_js_1 = require_log3(); + var restError_js_1 = require_restError3(); + exports2.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; + async function trySendRequest(request2, next) { + try { + return [await next(request2), void 0]; + } catch (e) { + if ((0, restError_js_1.isRestError)(e) && e.response) { + return [e.response, e]; + } else { + throw e; + } } - }; - exports2.HTTPURLConnection = { - HTTP_ACCEPTED: 202, - HTTP_CONFLICT: 409, - HTTP_NOT_FOUND: 404, - HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416 - }; - exports2.HeaderConstants = { - AUTHORIZATION: "Authorization", - AUTHORIZATION_SCHEME: "Bearer", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - USER_AGENT: "User-Agent", - X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", - X_MS_COPY_SOURCE: "x-ms-copy-source", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version", - X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" - }; - exports2.ETagNone = ""; - exports2.ETagAny = "*"; - exports2.SIZE_1_MB = 1 * 1024 * 1024; - exports2.BATCH_MAX_REQUEST = 256; - exports2.BATCH_MAX_PAYLOAD_IN_BYTES = 4 * exports2.SIZE_1_MB; - exports2.HTTP_LINE_ENDING = "\r\n"; - exports2.HTTP_VERSION_1_1 = "HTTP/1.1"; - exports2.EncryptionAlgorithmAES25 = "AES256"; - exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; - exports2.StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-copy-source-error-code", - "x-ms-copy-source-status-code", - "x-ms-if-tags", - "x-ms-source-if-tags" - ]; - exports2.StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot" - ]; - exports2.BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; - exports2.BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; - exports2.PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104" - ]; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js -var require_utils_common = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.escapeURLPath = escapeURLPath; - exports2.getValueInConnString = getValueInConnString; - exports2.extractConnectionStringParts = extractConnectionStringParts; - exports2.appendToURLPath = appendToURLPath; - exports2.setURLParameter = setURLParameter; - exports2.getURLParameter = getURLParameter; - exports2.setURLHost = setURLHost; - exports2.getURLPath = getURLPath; - exports2.getURLScheme = getURLScheme; - exports2.getURLPathAndQuery = getURLPathAndQuery; - exports2.getURLQueries = getURLQueries; - exports2.appendToURLQuery = appendToURLQuery; - exports2.truncatedISO8061Date = truncatedISO8061Date; - exports2.base64encode = base64encode; - exports2.base64decode = base64decode; - exports2.generateBlockID = generateBlockID; - exports2.delay = delay2; - exports2.padStart = padStart2; - exports2.sanitizeURL = sanitizeURL; - exports2.sanitizeHeaders = sanitizeHeaders; - exports2.iEqual = iEqual; - exports2.getAccountNameFromUrl = getAccountNameFromUrl; - exports2.isIpEndpointStyle = isIpEndpointStyle; - exports2.toBlobTagsString = toBlobTagsString; - exports2.toBlobTags = toBlobTags; - exports2.toTags = toTags; - exports2.toQuerySerialization = toQuerySerialization; - exports2.parseObjectReplicationRecord = parseObjectReplicationRecord; - exports2.attachCredential = attachCredential; - exports2.httpAuthorizationToString = httpAuthorizationToString; - exports2.BlobNameToString = BlobNameToString; - exports2.ConvertInternalResponseOfListBlobFlat = ConvertInternalResponseOfListBlobFlat; - exports2.ConvertInternalResponseOfListBlobHierarchy = ConvertInternalResponseOfListBlobHierarchy; - exports2.ExtractPageRangeInfoItems = ExtractPageRangeInfoItems; - exports2.EscapePath = EscapePath; - exports2.assertResponse = assertResponse; - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants15(); - function escapeURLPath(url2) { - const urlParsed = new URL(url2); - let path13 = urlParsed.pathname; - path13 = path13 || "/"; - path13 = escape(path13); - urlParsed.pathname = path13; - return urlParsed.toString(); } - function getProxyUriFromDevConnString(connectionString) { - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } - } + async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request: request2 } = options; + const getTokenOptions = { + abortSignal: request2.abortSignal, + tracingOptions: request2.tracingOptions, + enableCae: true + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); } - return proxyUri; } - function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } - } - return ""; + function isChallengeResponse(response) { + return response.status === 401 && response.headers.has("WWW-Authenticate"); } - function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = constants_js_1.DevelopmentConnectionString; + async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) { + const { scopes } = onChallengeOptions; + const accessToken = await onChallengeOptions.getAccessToken(scopes, { + enableCae: true, + claims: caeClaims + }); + if (!accessToken) { + return false; } - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + onChallengeOptions.request.headers.set("Authorization", `${accessToken.tokenType ?? "Bearer"} ${accessToken.token}`); + return true; + } + function bearerTokenAuthenticationPolicy(options) { + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || log_js_1.logger; + const callbacks = { + authorizeRequest: challengeCallbacks?.authorizeRequest?.bind(challengeCallbacks) ?? defaultAuthorizeRequest, + authorizeRequestOnChallenge: challengeCallbacks?.authorizeRequestOnChallenge?.bind(challengeCallbacks) + }; + const getAccessToken = credential ? (0, tokenCycler_js_1.createTokenCycler)( + credential + /* , options */ + ) : () => Promise.resolve(null); + return { + name: exports2.bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request2, next) { + if (!request2.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request: request2, + getAccessToken, + logger + }); + let response; + let error3; + let shouldSendRequest; + [response, error3] = await trySendRequest(request2, next); + if (isChallengeResponse(response)) { + let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); + if (claims) { + let parsedClaim; + try { + parsedClaim = atob(claims); + } catch (e) { + logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); + return response; + } + shouldSendRequest = await authorizeRequestOnCaeChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + response, + request: request2, + getAccessToken, + logger + }, parsedClaim); + if (shouldSendRequest) { + [response, error3] = await trySendRequest(request2, next); + } + } else if (callbacks.authorizeRequestOnChallenge) { + shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request: request2, + response, + getAccessToken, + logger + }); + if (shouldSendRequest) { + [response, error3] = await trySendRequest(request2, next); + } + if (isChallengeResponse(response)) { + claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); + if (claims) { + let parsedClaim; + try { + parsedClaim = atob(claims); + } catch (e) { + logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); + return response; + } + shouldSendRequest = await authorizeRequestOnCaeChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + response, + request: request2, + getAccessToken, + logger + }, parsedClaim); + if (shouldSendRequest) { + [response, error3] = await trySendRequest(request2, next); + } + } + } + } + } + if (error3) { + throw error3; + } else { + return response; } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); - } else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); - } - return { - kind: "AccountConnString", - url: blobEndpoint, - accountName, - accountKey, - proxyUri - }; - } else { - let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - let accountName = getValueInConnString(connectionString, "AccountName"); - if (!accountName) { - accountName = getAccountNameFromUrl(blobEndpoint); - } - if (!blobEndpoint) { - throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); - } else if (!accountSas) { - throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); } - if (accountSas.startsWith("?")) { - accountSas = accountSas.substring(1); + }; + } + function parseChallenges(challenges) { + const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g; + const paramRegex = /(\w+)="([^"]*)"/g; + const parsedChallenges = []; + let match; + while ((match = challengeRegex.exec(challenges)) !== null) { + const scheme = match[1]; + const paramsString = match[2]; + const params = {}; + let paramMatch; + while ((paramMatch = paramRegex.exec(paramsString)) !== null) { + params[paramMatch[1]] = paramMatch[2]; } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + parsedChallenges.push({ scheme, params }); } + return parsedChallenges; } - function escape(text) { - return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); - } - function appendToURLPath(url2, name) { - const urlParsed = new URL(url2); - let path13 = urlParsed.pathname; - path13 = path13 ? path13.endsWith("/") ? `${path13}${name}` : `${path13}/${name}` : name; - urlParsed.pathname = path13; - return urlParsed.toString(); + function getCaeChallengeClaims(challenges) { + if (!challenges) { + return; + } + const parsedChallenges = parseChallenges(challenges); + return parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")?.params.claims; } - function setURLParameter(url2, name, value) { - const urlParsed = new URL(url2); - const encodedName = encodeURIComponent(name); - const encodedValue = value ? encodeURIComponent(value) : void 0; - const searchString = urlParsed.search === "" ? "?" : urlParsed.search; - const searchPieces = []; - for (const pair of searchString.slice(1).split("&")) { - if (pair) { - const [key] = pair.split("=", 2); - if (key !== encodedName) { - searchPieces.push(pair); + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js +var require_ndJsonPolicy = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ndJsonPolicyName = void 0; + exports2.ndJsonPolicy = ndJsonPolicy; + exports2.ndJsonPolicyName = "ndJsonPolicy"; + function ndJsonPolicy() { + return { + name: exports2.ndJsonPolicyName, + async sendRequest(request2, next) { + if (typeof request2.body === "string" && request2.body.startsWith("[")) { + const body = JSON.parse(request2.body); + if (Array.isArray(body)) { + request2.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } } + return next(request2); } - } - if (encodedValue) { - searchPieces.push(`${encodedName}=${encodedValue}`); - } - urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; - return urlParsed.toString(); - } - function getURLParameter(url2, name) { - const urlParsed = new URL(url2); - return urlParsed.searchParams.get(name) ?? void 0; - } - function setURLHost(url2, host) { - const urlParsed = new URL(url2); - urlParsed.hostname = host; - return urlParsed.toString(); - } - function getURLPath(url2) { - try { - const urlParsed = new URL(url2); - return urlParsed.pathname; - } catch (e) { - return void 0; - } + }; } - function getURLScheme(url2) { - try { - const urlParsed = new URL(url2); - return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; - } catch (e) { - return void 0; - } + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js +var require_auxiliaryAuthenticationHeaderPolicy = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.auxiliaryAuthenticationHeaderPolicyName = void 0; + exports2.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; + var tokenCycler_js_1 = require_tokenCycler(); + var log_js_1 = require_log3(); + exports2.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; + var AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; + async function sendAuthorizeRequest(options) { + const { scopes, getAccessToken, request: request2 } = options; + const getTokenOptions = { + abortSignal: request2.abortSignal, + tracingOptions: request2.tracingOptions + }; + return (await getAccessToken(scopes, getTokenOptions))?.token ?? ""; } - function getURLPathAndQuery(url2) { - const urlParsed = new URL(url2); - const pathString = urlParsed.pathname; - if (!pathString) { - throw new RangeError("Invalid url without valid path."); - } - let queryString = urlParsed.search || ""; - queryString = queryString.trim(); - if (queryString !== "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; - } - return `${pathString}${queryString}`; + function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || log_js_1.logger; + const tokenCyclerMap = /* @__PURE__ */ new WeakMap(); + return { + name: exports2.auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request2, next) { + if (!request2.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${exports2.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request2); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request: request2, + getAccessToken, + logger + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request2); + } + request2.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request2); + } + }; } - function getURLQueries(url2) { - let queryString = new URL(url2).search; - if (!queryString) { - return {}; + } +}); + +// node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js +var require_commonjs6 = __commonJS({ + "node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createFileFromStream = exports2.createFile = exports2.agentPolicyName = exports2.agentPolicy = exports2.auxiliaryAuthenticationHeaderPolicyName = exports2.auxiliaryAuthenticationHeaderPolicy = exports2.ndJsonPolicyName = exports2.ndJsonPolicy = exports2.bearerTokenAuthenticationPolicyName = exports2.bearerTokenAuthenticationPolicy = exports2.formDataPolicyName = exports2.formDataPolicy = exports2.tlsPolicyName = exports2.tlsPolicy = exports2.userAgentPolicyName = exports2.userAgentPolicy = exports2.defaultRetryPolicy = exports2.tracingPolicyName = exports2.tracingPolicy = exports2.retryPolicy = exports2.throttlingRetryPolicyName = exports2.throttlingRetryPolicy = exports2.systemErrorRetryPolicyName = exports2.systemErrorRetryPolicy = exports2.redirectPolicyName = exports2.redirectPolicy = exports2.getDefaultProxySettings = exports2.proxyPolicyName = exports2.proxyPolicy = exports2.multipartPolicyName = exports2.multipartPolicy = exports2.logPolicyName = exports2.logPolicy = exports2.setClientRequestIdPolicyName = exports2.setClientRequestIdPolicy = exports2.exponentialRetryPolicyName = exports2.exponentialRetryPolicy = exports2.decompressResponsePolicyName = exports2.decompressResponsePolicy = exports2.isRestError = exports2.RestError = exports2.createPipelineRequest = exports2.createHttpHeaders = exports2.createDefaultHttpClient = exports2.createPipelineFromOptions = exports2.createEmptyPipeline = void 0; + var pipeline_js_1 = require_pipeline2(); + Object.defineProperty(exports2, "createEmptyPipeline", { enumerable: true, get: function() { + return pipeline_js_1.createEmptyPipeline; + } }); + var createPipelineFromOptions_js_1 = require_createPipelineFromOptions2(); + Object.defineProperty(exports2, "createPipelineFromOptions", { enumerable: true, get: function() { + return createPipelineFromOptions_js_1.createPipelineFromOptions; + } }); + var defaultHttpClient_js_1 = require_defaultHttpClient2(); + Object.defineProperty(exports2, "createDefaultHttpClient", { enumerable: true, get: function() { + return defaultHttpClient_js_1.createDefaultHttpClient; + } }); + var httpHeaders_js_1 = require_httpHeaders2(); + Object.defineProperty(exports2, "createHttpHeaders", { enumerable: true, get: function() { + return httpHeaders_js_1.createHttpHeaders; + } }); + var pipelineRequest_js_1 = require_pipelineRequest2(); + Object.defineProperty(exports2, "createPipelineRequest", { enumerable: true, get: function() { + return pipelineRequest_js_1.createPipelineRequest; + } }); + var restError_js_1 = require_restError3(); + Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { + return restError_js_1.RestError; + } }); + Object.defineProperty(exports2, "isRestError", { enumerable: true, get: function() { + return restError_js_1.isRestError; + } }); + var decompressResponsePolicy_js_1 = require_decompressResponsePolicy2(); + Object.defineProperty(exports2, "decompressResponsePolicy", { enumerable: true, get: function() { + return decompressResponsePolicy_js_1.decompressResponsePolicy; + } }); + Object.defineProperty(exports2, "decompressResponsePolicyName", { enumerable: true, get: function() { + return decompressResponsePolicy_js_1.decompressResponsePolicyName; + } }); + var exponentialRetryPolicy_js_1 = require_exponentialRetryPolicy2(); + Object.defineProperty(exports2, "exponentialRetryPolicy", { enumerable: true, get: function() { + return exponentialRetryPolicy_js_1.exponentialRetryPolicy; + } }); + Object.defineProperty(exports2, "exponentialRetryPolicyName", { enumerable: true, get: function() { + return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; + } }); + var setClientRequestIdPolicy_js_1 = require_setClientRequestIdPolicy(); + Object.defineProperty(exports2, "setClientRequestIdPolicy", { enumerable: true, get: function() { + return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; + } }); + Object.defineProperty(exports2, "setClientRequestIdPolicyName", { enumerable: true, get: function() { + return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; + } }); + var logPolicy_js_1 = require_logPolicy2(); + Object.defineProperty(exports2, "logPolicy", { enumerable: true, get: function() { + return logPolicy_js_1.logPolicy; + } }); + Object.defineProperty(exports2, "logPolicyName", { enumerable: true, get: function() { + return logPolicy_js_1.logPolicyName; + } }); + var multipartPolicy_js_1 = require_multipartPolicy2(); + Object.defineProperty(exports2, "multipartPolicy", { enumerable: true, get: function() { + return multipartPolicy_js_1.multipartPolicy; + } }); + Object.defineProperty(exports2, "multipartPolicyName", { enumerable: true, get: function() { + return multipartPolicy_js_1.multipartPolicyName; + } }); + var proxyPolicy_js_1 = require_proxyPolicy2(); + Object.defineProperty(exports2, "proxyPolicy", { enumerable: true, get: function() { + return proxyPolicy_js_1.proxyPolicy; + } }); + Object.defineProperty(exports2, "proxyPolicyName", { enumerable: true, get: function() { + return proxyPolicy_js_1.proxyPolicyName; + } }); + Object.defineProperty(exports2, "getDefaultProxySettings", { enumerable: true, get: function() { + return proxyPolicy_js_1.getDefaultProxySettings; + } }); + var redirectPolicy_js_1 = require_redirectPolicy2(); + Object.defineProperty(exports2, "redirectPolicy", { enumerable: true, get: function() { + return redirectPolicy_js_1.redirectPolicy; + } }); + Object.defineProperty(exports2, "redirectPolicyName", { enumerable: true, get: function() { + return redirectPolicy_js_1.redirectPolicyName; + } }); + var systemErrorRetryPolicy_js_1 = require_systemErrorRetryPolicy2(); + Object.defineProperty(exports2, "systemErrorRetryPolicy", { enumerable: true, get: function() { + return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; + } }); + Object.defineProperty(exports2, "systemErrorRetryPolicyName", { enumerable: true, get: function() { + return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; + } }); + var throttlingRetryPolicy_js_1 = require_throttlingRetryPolicy2(); + Object.defineProperty(exports2, "throttlingRetryPolicy", { enumerable: true, get: function() { + return throttlingRetryPolicy_js_1.throttlingRetryPolicy; + } }); + Object.defineProperty(exports2, "throttlingRetryPolicyName", { enumerable: true, get: function() { + return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; + } }); + var retryPolicy_js_1 = require_retryPolicy2(); + Object.defineProperty(exports2, "retryPolicy", { enumerable: true, get: function() { + return retryPolicy_js_1.retryPolicy; + } }); + var tracingPolicy_js_1 = require_tracingPolicy(); + Object.defineProperty(exports2, "tracingPolicy", { enumerable: true, get: function() { + return tracingPolicy_js_1.tracingPolicy; + } }); + Object.defineProperty(exports2, "tracingPolicyName", { enumerable: true, get: function() { + return tracingPolicy_js_1.tracingPolicyName; + } }); + var defaultRetryPolicy_js_1 = require_defaultRetryPolicy2(); + Object.defineProperty(exports2, "defaultRetryPolicy", { enumerable: true, get: function() { + return defaultRetryPolicy_js_1.defaultRetryPolicy; + } }); + var userAgentPolicy_js_1 = require_userAgentPolicy2(); + Object.defineProperty(exports2, "userAgentPolicy", { enumerable: true, get: function() { + return userAgentPolicy_js_1.userAgentPolicy; + } }); + Object.defineProperty(exports2, "userAgentPolicyName", { enumerable: true, get: function() { + return userAgentPolicy_js_1.userAgentPolicyName; + } }); + var tlsPolicy_js_1 = require_tlsPolicy2(); + Object.defineProperty(exports2, "tlsPolicy", { enumerable: true, get: function() { + return tlsPolicy_js_1.tlsPolicy; + } }); + Object.defineProperty(exports2, "tlsPolicyName", { enumerable: true, get: function() { + return tlsPolicy_js_1.tlsPolicyName; + } }); + var formDataPolicy_js_1 = require_formDataPolicy2(); + Object.defineProperty(exports2, "formDataPolicy", { enumerable: true, get: function() { + return formDataPolicy_js_1.formDataPolicy; + } }); + Object.defineProperty(exports2, "formDataPolicyName", { enumerable: true, get: function() { + return formDataPolicy_js_1.formDataPolicyName; + } }); + var bearerTokenAuthenticationPolicy_js_1 = require_bearerTokenAuthenticationPolicy(); + Object.defineProperty(exports2, "bearerTokenAuthenticationPolicy", { enumerable: true, get: function() { + return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; + } }); + Object.defineProperty(exports2, "bearerTokenAuthenticationPolicyName", { enumerable: true, get: function() { + return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; + } }); + var ndJsonPolicy_js_1 = require_ndJsonPolicy(); + Object.defineProperty(exports2, "ndJsonPolicy", { enumerable: true, get: function() { + return ndJsonPolicy_js_1.ndJsonPolicy; + } }); + Object.defineProperty(exports2, "ndJsonPolicyName", { enumerable: true, get: function() { + return ndJsonPolicy_js_1.ndJsonPolicyName; + } }); + var auxiliaryAuthenticationHeaderPolicy_js_1 = require_auxiliaryAuthenticationHeaderPolicy(); + Object.defineProperty(exports2, "auxiliaryAuthenticationHeaderPolicy", { enumerable: true, get: function() { + return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; + } }); + Object.defineProperty(exports2, "auxiliaryAuthenticationHeaderPolicyName", { enumerable: true, get: function() { + return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; + } }); + var agentPolicy_js_1 = require_agentPolicy2(); + Object.defineProperty(exports2, "agentPolicy", { enumerable: true, get: function() { + return agentPolicy_js_1.agentPolicy; + } }); + Object.defineProperty(exports2, "agentPolicyName", { enumerable: true, get: function() { + return agentPolicy_js_1.agentPolicyName; + } }); + var file_js_1 = require_file3(); + Object.defineProperty(exports2, "createFile", { enumerable: true, get: function() { + return file_js_1.createFile; + } }); + Object.defineProperty(exports2, "createFileFromStream", { enumerable: true, get: function() { + return file_js_1.createFileFromStream; + } }); + } +}); + +// node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js +var require_azureKeyCredential = __commonJS({ + "node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AzureKeyCredential = void 0; + var AzureKeyCredential = class { + _key; + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; } - queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); - return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; - }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; - queries[key] = value; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; } - return queries; - } - function appendToURLQuery(url2, queryParts) { - const urlParsed = new URL(url2); - let query = urlParsed.search; - if (query) { - query += "&" + queryParts; - } else { - query = queryParts; + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; } - urlParsed.search = query; - return urlParsed.toString(); - } - function truncatedISO8061Date(date, withMilliseconds = true) { - const dateString = date.toISOString(); - return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; - } - function base64encode(content) { - return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); - } - function base64decode(encodedString) { - return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); + }; + exports2.AzureKeyCredential = AzureKeyCredential; + } +}); + +// node_modules/@azure/core-auth/dist/commonjs/keyCredential.js +var require_keyCredential = __commonJS({ + "node_modules/@azure/core-auth/dist/commonjs/keyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isKeyCredential = isKeyCredential; + var core_util_1 = require_commonjs4(); + function isKeyCredential(credential) { + return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; } - function generateBlockID(blockIDPrefix, blockIndex) { - const maxSourceStringLength = 48; - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; - if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { - blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } +}); + +// node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js +var require_azureNamedKeyCredential = __commonJS({ + "node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AzureNamedKeyCredential = void 0; + exports2.isNamedKeyCredential = isNamedKeyCredential; + var core_util_1 = require_commonjs4(); + var AzureNamedKeyCredential = class { + _key; + _name; + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; } - const res = blockIDPrefix + padStart2(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); - return base64encode(res); - } - async function delay2(timeInMs, aborter, abortError) { - return new Promise((resolve6, reject) => { - let timeout; - const abortHandler = () => { - if (timeout !== void 0) { - clearTimeout(timeout); - } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== void 0) { - aborter.removeEventListener("abort", abortHandler); - } - resolve6(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== void 0) { - aborter.addEventListener("abort", abortHandler); + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); } - }); - } - function padStart2(currentString, targetLength, padString = " ") { - if (String.prototype.padStart) { - return currentString.padStart(targetLength, padString); + this._name = name; + this._key = key; } - padString = padString || " "; - if (currentString.length > targetLength) { - return currentString; - } else { - targetLength = targetLength - currentString.length; - if (targetLength > padString.length) { - padString += padString.repeat(targetLength / padString.length); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); } - return padString.slice(0, targetLength) + currentString; + this._name = newName; + this._key = newKey; } + }; + exports2.AzureNamedKeyCredential = AzureNamedKeyCredential; + function isNamedKeyCredential(credential) { + return (0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) && typeof credential.key === "string" && typeof credential.name === "string"; } - function sanitizeURL(url2) { - let safeURL = url2; - if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { - safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); + } +}); + +// node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js +var require_azureSASCredential = __commonJS({ + "node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AzureSASCredential = void 0; + exports2.isSASCredential = isSASCredential; + var core_util_1 = require_commonjs4(); + var AzureSASCredential = class { + _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; } - return safeURL; - } - function sanitizeHeaders(originalHeader) { - const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); - for (const [name, value] of originalHeader) { - if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { - headers.set(name, "*****"); - } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { - headers.set(name, sanitizeURL(value)); - } else { - headers.set(name, value); + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); } + this._signature = signature; } - return headers; - } - function iEqual(str1, str2) { - return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); - } - function getAccountNameFromUrl(url2) { - const parsedUrl = new URL(url2); - let accountName; - try { - if (parsedUrl.hostname.split(".")[1] === "blob") { - accountName = parsedUrl.hostname.split(".")[0]; - } else if (isIpEndpointStyle(parsedUrl)) { - accountName = parsedUrl.pathname.split("/")[1]; - } else { - accountName = ""; + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); } - return accountName; - } catch (error3) { - throw new Error("Unable to extract accountName with provided information."); + this._signature = newSignature; } + }; + exports2.AzureSASCredential = AzureSASCredential; + function isSASCredential(credential) { + return (0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"; } - function isIpEndpointStyle(parsedUrl) { - const host = parsedUrl.host; - return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); - } - function toBlobTagsString(tags) { - if (tags === void 0) { - return void 0; - } - const tagPairs = []; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); - } - } - return tagPairs.join("&"); + } +}); + +// node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js +var require_tokenCredential = __commonJS({ + "node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isBearerToken = isBearerToken; + exports2.isPopToken = isPopToken; + exports2.isTokenCredential = isTokenCredential; + function isBearerToken(accessToken) { + return !accessToken.tokenType || accessToken.tokenType === "Bearer"; } - function toBlobTags(tags) { - if (tags === void 0) { - return void 0; - } - const res = { - blobTagSet: [] - }; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - res.blobTagSet.push({ - key, - value - }); - } - } - return res; + function isPopToken(accessToken) { + return accessToken.tokenType === "pop"; } - function toTags(tags) { - if (tags === void 0) { - return void 0; - } - const res = {}; - for (const blobTag of tags.blobTagSet) { - res[blobTag.key] = blobTag.value; - } - return res; + function isTokenCredential(credential) { + const castCredential = credential; + return castCredential && typeof castCredential.getToken === "function" && (castCredential.signRequest === void 0 || castCredential.getToken.length > 0); } - function toQuerySerialization(textConfiguration) { - if (textConfiguration === void 0) { - return void 0; - } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema - } - } - }; - case "parquet": - return { - format: { - type: "parquet" - } - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); - } - } - function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return void 0; - } - if ("policy-id" in objectReplicationRecord) { - return void 0; - } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); - } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); - } else { - orProperties.push({ - policyId: ids[0], - rules: [rule] - }); - } - } - return orProperties; - } - function attachCredential(thing, credential) { - thing.credential = credential; - return thing; - } - function httpAuthorizationToString(httpAuthorization) { - return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; - } - function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); - } else { - return name.content; - } - } - function ConvertInternalResponseOfListBlobFlat(internalResponse) { + } +}); + +// node_modules/@azure/core-auth/dist/commonjs/index.js +var require_commonjs7 = __commonJS({ + "node_modules/@azure/core-auth/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isTokenCredential = exports2.isSASCredential = exports2.AzureSASCredential = exports2.isNamedKeyCredential = exports2.AzureNamedKeyCredential = exports2.isKeyCredential = exports2.AzureKeyCredential = void 0; + var azureKeyCredential_js_1 = require_azureKeyCredential(); + Object.defineProperty(exports2, "AzureKeyCredential", { enumerable: true, get: function() { + return azureKeyCredential_js_1.AzureKeyCredential; + } }); + var keyCredential_js_1 = require_keyCredential(); + Object.defineProperty(exports2, "isKeyCredential", { enumerable: true, get: function() { + return keyCredential_js_1.isKeyCredential; + } }); + var azureNamedKeyCredential_js_1 = require_azureNamedKeyCredential(); + Object.defineProperty(exports2, "AzureNamedKeyCredential", { enumerable: true, get: function() { + return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; + } }); + Object.defineProperty(exports2, "isNamedKeyCredential", { enumerable: true, get: function() { + return azureNamedKeyCredential_js_1.isNamedKeyCredential; + } }); + var azureSASCredential_js_1 = require_azureSASCredential(); + Object.defineProperty(exports2, "AzureSASCredential", { enumerable: true, get: function() { + return azureSASCredential_js_1.AzureSASCredential; + } }); + Object.defineProperty(exports2, "isSASCredential", { enumerable: true, get: function() { + return azureSASCredential_js_1.isSASCredential; + } }); + var tokenCredential_js_1 = require_tokenCredential(); + Object.defineProperty(exports2, "isTokenCredential", { enumerable: true, get: function() { + return tokenCredential_js_1.isTokenCredential; + } }); + } +}); + +// node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js +var require_disableKeepAlivePolicy = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.disableKeepAlivePolicyName = void 0; + exports2.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; + exports2.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; + exports2.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; + function createDisableKeepAlivePolicy() { return { - ...internalResponse, - segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = { - ...blobItemInteral, - name: BlobNameToString(blobItemInteral.name) - }; - return blobItem; - }) + name: exports2.disableKeepAlivePolicyName, + async sendRequest(request2, next) { + request2.disableKeepAlive = true; + return next(request2); } }; } - function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - return { - ...internalResponse, - segment: { - blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => { - const blobPrefix = { - ...blobPrefixInternal, - name: BlobNameToString(blobPrefixInternal.name) - }; - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = { - ...blobItemInteral, - name: BlobNameToString(blobItemInteral.name) - }; - return blobItem; - }) - } - }; + function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === exports2.disableKeepAlivePolicyName); } - function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - ++pageRangeIndex; - } else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - ++clearRangeIndex; - } - } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - } + } +}); + +// node_modules/@azure/core-client/dist/commonjs/base64.js +var require_base64 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/base64.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.encodeString = encodeString; + exports2.encodeByteArray = encodeByteArray; + exports2.decodeString = decodeString; + exports2.decodeStringToString = decodeStringToString; + function encodeString(value) { + return Buffer.from(value).toString("base64"); } - function EscapePath(blobName) { - const split = blobName.split("/"); - for (let i = 0; i < split.length; i++) { - split[i] = encodeURIComponent(split[i]); - } - return split.join("/"); + function encodeByteArray(value) { + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); } - function assertResponse(response) { - if (`_response` in response) { - return response; - } - throw new TypeError(`Unexpected response object ${response}`); + function decodeString(value) { + return Buffer.from(value, "base64"); + } + function decodeStringToString(value) { + return Buffer.from(value, "base64").toString(); } } }); -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js -var require_StorageRetryPolicyType = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/interfaces.js +var require_interfaces = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/interfaces.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageRetryPolicyType = void 0; - var StorageRetryPolicyType; - (function(StorageRetryPolicyType2) { - StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; - })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); + exports2.XML_CHARKEY = exports2.XML_ATTRKEY = void 0; + exports2.XML_ATTRKEY = "$"; + exports2.XML_CHARKEY = "_"; } }); -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js -var require_StorageRetryPolicy = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/utils.js +var require_utils6 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/utils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageRetryPolicy = void 0; - exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; - var abort_controller_1 = require_commonjs11(); - var RequestPolicy_js_1 = require_RequestPolicy(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - var log_js_1 = require_log5(); - var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); - function NewRetryPolicyFactory(retryOptions) { - return { - create: (nextPolicy, options) => { - return new StorageRetryPolicy(nextPolicy, options, retryOptions); - } + exports2.isPrimitiveBody = isPrimitiveBody; + exports2.isDuration = isDuration; + exports2.isValidUuid = isValidUuid; + exports2.flattenResponse = flattenResponse; + function isPrimitiveBody(value, mapperTypeName) { + return mapperTypeName !== "Composite" && mapperTypeName !== "Dictionary" && (typeof value === "string" || typeof value === "number" || typeof value === "boolean" || mapperTypeName?.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i) !== null || value === void 0 || value === null); + } + var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + function isDuration(value) { + return validateISODuration.test(value); + } + var validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; + function isValidUuid(uuid) { + return validUuidRegex.test(uuid); + } + function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = { + ...responseObject.headers, + ...responseObject.body }; + if (responseObject.hasNullableType && Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; + } else { + return responseObject.shouldWrapBody ? { + ...responseObject.headers, + body: responseObject.body + } : combinedHeadersAndBody; + } } - var DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); - var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { - /** - * RetryOptions. - */ - retryOptions; - /** - * Creates an instance of RetryPolicy. - * - * @param nextPolicy - - * @param options - - * @param retryOptions - - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost + function flattenResponse(fullResponse, responseSpec) { + const parsedHeaders = fullResponse.parsedHeaders; + if (fullResponse.request.method === "HEAD") { + return { + ...parsedHeaders, + body: fullResponse.parsedBody }; } - /** - * Sends request. - * - * @param request - - */ - async sendRequest(request2) { - return this.attemptSendRequest(request2, false, 1); + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper?.nullable); + const expectedBodyTypeName = bodyMapper?.type.name; + if (expectedBodyTypeName === "Stream") { + return { + ...parsedHeaders, + blobBody: fullResponse.blobBody, + readableStreamBody: fullResponse.readableStreamBody + }; } - /** - * Decide and perform next retry. Won't mutate request parameter. - * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. - */ - async attemptSendRequest(request2, secondaryHas404, attempt) { - const newRequest = request2.clone(); - const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request2.method === "GET" || request2.method === "HEAD" || request2.method === "OPTIONS") || attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); - } - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); - } - let response; - try { - log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; + const modelProperties = expectedBodyTypeName === "Composite" && bodyMapper.type.modelProperties || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = fullResponse.parsedBody ?? []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = fullResponse.parsedBody?.[key]; } - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (err) { - log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; } } - await this.delay(isPrimaryRetry, attempt, request2.abortSignal); - return this.attemptSendRequest(request2, secondaryHas404, ++attempt); + return isNullable && !fullResponse.parsedBody && !parsedHeaders && Object.getOwnPropertyNames(modelProperties).length === 0 ? null : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName) + }); + } + } +}); + +// node_modules/@azure/core-client/dist/commonjs/serializer.js +var require_serializer = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/serializer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.MapperTypeNames = void 0; + exports2.createSerializer = createSerializer; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var base64 = tslib_1.__importStar(require_base64()); + var interfaces_js_1 = require_interfaces(); + var utils_js_1 = require_utils6(); + var SerializerImpl = class { + modelMappers; + isXML; + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; } /** - * Decide whether to retry according to last HTTP response and retry counters. - * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - + * @deprecated Removing the constraints validation on client side. */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); - return false; - } - const retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors) { - if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { - log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== void 0 && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + if (ExclusiveMaximum !== void 0 && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); } - } - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; + if (ExclusiveMinimum !== void 0 && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); } - if (statusCode === 503 || statusCode === 500) { - log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; + if (InclusiveMaximum !== void 0 && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); } - } - if (response) { - if (response?.status >= 400) { - const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); - if (copySourceError !== void 0) { - switch (copySourceError) { - case "InternalError": - case "OperationTimedOut": - case "ServerBusy": - return true; - } + if (InclusiveMinimum !== void 0 && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== void 0 && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== void 0 && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== void 0 && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== void 0 && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== void 0 && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); } } + if (UniqueItems && value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } } - if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { - log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; - } - return false; } /** - * Delay a calculated time between retries. + * Serialize the given object based on its metadata defined in the mapper * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + const updatedOptions = { + xml: { + rootName: options.xml.rootName ?? "", + includeRoot: options.xml.includeRoot ?? false, + xmlCharKey: options.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY } + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + const { required, nullable } = mapper; + if (required && nullable && object === void 0) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === void 0 || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === void 0 || object === null) { + payload = object; } else { - delayTimeInMs = Math.random() * 1e3; + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } } - log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); - } - }; - exports2.StorageRetryPolicy = StorageRetryPolicy; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js -var require_StorageRetryPolicyFactory = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; - var StorageRetryPolicy_js_1 = require_StorageRetryPolicy(); - Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { - return StorageRetryPolicy_js_1.StorageRetryPolicy; - } }); - var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); - Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { - return StorageRetryPolicyType_js_1.StorageRetryPolicyType; - } }); - var StorageRetryPolicyFactory = class { - retryOptions; - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; + return payload; } /** - * Creates a StorageRetryPolicy object. + * Deserialize the given object based on its metadata defined in the mapper * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); - } - }; - exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js -var require_CredentialPolicy = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CredentialPolicy = void 0; - var RequestPolicy_js_1 = require_RequestPolicy(); - var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { - /** - * Sends out request. + * @param mapper - The mapper which defines the metadata of the serializable object * - * @param request - - */ - sendRequest(request2) { - return this._nextPolicy.sendRequest(this.signRequest(request2)); - } - /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. + * @param responseBody - A valid Javascript entity to be deserialized * - * @param request - + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object */ - signRequest(request2) { - return request2; + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + const updatedOptions = { + xml: { + rootName: options.xml.rootName ?? "", + includeRoot: options.xml.includeRoot ?? false, + xmlCharKey: options.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY + }, + ignoreUnknownProperties: options.ignoreUnknownProperties ?? false + }; + if (responseBody === void 0 || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + responseBody = []; + } + if (mapper.defaultValue !== void 0) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (responseBody[interfaces_js_1.XML_ATTRKEY] !== void 0 && responseBody[xmlCharKey] !== void 0) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } else if (responseBody === "false") { + payload = false; + } else { + payload = responseBody; + } + } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; } }; - exports2.CredentialPolicy = CredentialPolicy; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js -var require_SharedKeyComparator = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.compareHeader = compareHeader; - var table_lv0 = new Uint32Array([ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1820, - 0, - 1823, - 1825, - 1827, - 1829, - 0, - 0, - 0, - 1837, - 2051, - 0, - 0, - 1843, - 0, - 3331, - 3354, - 3356, - 3358, - 3360, - 3362, - 3364, - 3366, - 3368, - 3370, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3586, - 3593, - 3594, - 3610, - 3617, - 3619, - 3621, - 3628, - 3634, - 3637, - 3638, - 3656, - 3665, - 3696, - 3708, - 3710, - 3721, - 3722, - 3729, - 3737, - 3743, - 3746, - 3748, - 3750, - 3751, - 3753, - 0, - 0, - 0, - 1859, - 1860, - 1864, - 3586, - 3593, - 3594, - 3610, - 3617, - 3619, - 3621, - 3628, - 3634, - 3637, - 3638, - 3656, - 3665, - 3696, - 3708, - 3710, - 3721, - 3722, - 3729, - 3737, - 3743, - 3746, - 3748, - 3750, - 3751, - 3753, - 0, - 1868, - 0, - 1872, - 0 - ]); - var table_lv2 = new Uint32Array([ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ]); - var table_lv4 = new Uint32Array([ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 32786, - 0, - 0, - 0, - 0, - 0, - 33298, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ]); - function compareHeader(lhs, rhs) { - if (isLessThan(lhs, rhs)) - return -1; - return 1; + function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); } - function isLessThan(lhs, rhs) { - const tables = [table_lv0, table_lv2, table_lv4]; - let curr_level = 0; - let i = 0; - let j = 0; - while (curr_level < tables.length) { - if (curr_level === tables.length - 1 && i !== j) { - return i > j; + function trimEnd(str2, ch) { + let len = str2.length; + while (len - 1 >= 0 && str2[len - 1] === ch) { + --len; + } + return str2.substr(0, len); + } + function bufferToBase64Url(buffer) { + if (!buffer) { + return void 0; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + const str2 = base64.encodeByteArray(buffer); + return trimEnd(str2, "=").replace(/\+/g, "-").replace(/\//g, "_"); + } + function base64UrlToByteArray(str2) { + if (!str2) { + return void 0; + } + if (str2 && typeof str2.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + str2 = str2.replace(/-/g, "+").replace(/_/g, "/"); + return base64.decodeString(str2); + } + function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } } - const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; - const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; - if (weight1 === 1 && weight2 === 1) { - i = 0; - j = 0; - ++curr_level; - } else if (weight1 === weight2) { - ++i; - ++j; - } else if (weight1 === 0) { - ++i; - } else if (weight2 === 0) { - ++j; - } else { - return weight1 < weight2; + } + return classes; + } + function dateToUnixTime(d) { + if (!d) { + return void 0; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1e3); + } + function unixTimeToDate(n) { + if (!n) { + return void 0; + } + return new Date(n * 1e3); + } + function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== void 0) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && !ArrayBuffer.isView(value) && // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } } } - return false; + return value; } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js -var require_StorageSharedKeyCredentialPolicy = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageSharedKeyCredentialPolicy = void 0; - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - var CredentialPolicy_js_1 = require_CredentialPolicy(); - var SharedKeyComparator_js_1 = require_SharedKeyComparator(); - var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { - /** - * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy - */ - factory; - /** - * Creates an instance of StorageSharedKeyCredentialPolicy. - * @param nextPolicy - - * @param options - - * @param factory - - */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; + function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); } - /** - * Signs request. - * - * @param request - - */ - signRequest(request2) { - request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); - if (request2.body && (typeof request2.body === "string" || request2.body !== void 0) && request2.body.length > 0) { - request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); } - const stringToSign = [ - request2.method.toUpperCase(), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) - ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request2) + this.getCanonicalizedResourceString(request2); - const signature = this.factory.computeHMACSHA256(stringToSign); - request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); - return request2; + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); } - /** - * Retrieve header value according to shared key sign rules. - * @see https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key - * - * @param request - - * @param headerName - - */ - getHeaderValueToSign(request2, headerName) { - const value = request2.headers.get(headerName); - if (!value) { - return ""; + return value; + } + function serializeByteArrayType(objectName, value) { + if (value !== void 0 && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); } - if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; + value = base64.encodeByteArray(value); + } + return value; + } + function serializeBase64UrlType(objectName, value) { + if (value !== void 0 && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); } - return value; + value = bufferToBase64Url(value); } - /** - * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: - * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. - * 2. Convert each HTTP header name to lowercase. - * 3. Sort the headers lexicographically by header name, in ascending order. - * Each header may appear only once in the string. - * 4. Replace any linear whitespace in the header value with a single space. - * 5. Trim any whitespace around the colon in the header. - * 6. Finally, append a new-line character to each canonicalized header in the resulting list. - * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. - * - * @param request - - */ - getCanonicalizedHeadersString(request2) { - let headersArray = request2.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); - }); - headersArray.sort((a, b) => { - return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); - }); - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; + return value; + } + function serializeDateTypes(typeName, value, objectName) { + if (value !== void 0 && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} -`; - }); - return canonicalizedHeadersStringToSign; - } - /** - * Retrieves the webResource canonicalized resource string. - * - * @param request - - */ - getCanonicalizedResourceString(request2) { - const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path13}`; - const queries = (0, utils_common_js_1.getURLQueries)(request2.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); - } + value = value instanceof Date ? value.toISOString().substring(0, 10) : new Date(value).toISOString().substring(0, 10); + } else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += ` -${key}:${decodeURIComponent(lowercaseQueries[key])}`; + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!(0, utils_js_1.isDuration)(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); } } - return canonicalizedResourceString; } - }; - exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js -var require_Credential = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Credential = void 0; - var Credential = class { - /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - - */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); + return value; + } + function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); } - }; - exports2.Credential = Credential; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js -var require_StorageSharedKeyCredential = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageSharedKeyCredential = void 0; - var node_crypto_1 = require("node:crypto"); - var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy(); - var Credential_js_1 = require_Credential(); - var StorageSharedKeyCredential = class extends Credential_js_1.Credential { - /** - * Azure Storage account name; readonly. - */ - accountName; - /** - * Azure Storage account key; readonly. - */ - accountKey; - /** - * Creates an instance of StorageSharedKeyCredential. - * @param accountName - - * @param accountKey - - */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the mapper and it must of type "object" in ${objectName}.`); } - /** - * Creates a StorageSharedKeyCredentialPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = serializer.modelMappers[elementType.type.className] ?? elementType; } - /** - * Generates a hash signature for an HTTP request or for a SAS. - * - * @param stringToSign - - */ - computeHMACSHA256(stringToSign) { - return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix ? `xmlns:${elementType.xmlNamespacePrefix}` : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = { ...serializedValue }; + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } else { + tempArray[i] = serializedValue; + } } - }; - exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js -var require_AnonymousCredentialPolicy = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AnonymousCredentialPolicy = void 0; - var CredentialPolicy_js_1 = require_CredentialPolicy(); - var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { - /** - * Creates an instance of AnonymousCredentialPolicy. - * @param nextPolicy - - * @param options - - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + return tempArray; + } + function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); } - }; - exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js -var require_AnonymousCredential = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AnonymousCredential = void 0; - var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy(); - var Credential_js_1 = require_Credential(); - var AnonymousCredential = class extends Credential_js_1.Credential { - /** - * Creates an {@link AnonymousCredentialPolicy} object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the mapper and it must of type "object" in ${objectName}.`); } - }; - exports2.AnonymousCredential = AnonymousCredential; - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js -var require_BuffersStream = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BuffersStream = void 0; - var node_stream_1 = require("node:stream"); - var BuffersStream = class extends node_stream_1.Readable { - buffers; - byteLength; - /** - * The offset of data to be read in the current buffer. - */ - byteOffsetInCurrentBuffer; - /** - * The index of buffer to be read in the array of buffers. - */ - bufferIndex; - /** - * The total length of data already read. - */ - pushedBytesLength; - /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. - * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers - */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; + } + function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper?.type.additionalProperties; + } + return additionalProperties; + } + function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, void 0, 2)}".`); + } + return serializer.modelMappers[className]; + } + function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); + modelProps = modelMapper?.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); } } - /** - * Internal _read() that will be called when the stream wants to pull more data in. - * - * @param size - Optional. The size of data to be read - */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); + return modelProps; + } + function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== void 0 && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === void 0 || childObject === null) && (object[key] !== void 0 && object[key] !== null || propertyMapper.defaultValue !== void 0)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== void 0 && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + parentObject[interfaces_js_1.XML_ATTRKEY] = { + ...parentObject[interfaces_js_1.XML_ATTRKEY], + [xmlnsKey]: mapper.xmlNamespace + }; + } + const propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && polymorphicDiscriminator.clientName === key && (toSerialize === void 0 || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== void 0 && propName !== void 0 && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {}; + parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue; + } else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } else { + parentObject[propName] = value; + } + } + } } - if (!size) { - size = this.readableHighWaterMark; + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - const end = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end; - i = size; - break; + return payload; + } + return object; + } + function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix ? `xmlns:${propertyMapper.xmlNamespacePrefix}` : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[interfaces_js_1.XML_ATTRKEY]) { + return serializedValue; + } else { + const result2 = { ...serializedValue }; + result2[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result2; + } + } + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; + } + function isSpecialXmlProperty(propertyName, options) { + return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); + } + function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + const xmlCharKey = options.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== void 0) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options); + } else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== void 0) { + instance[key] = responseBody[xmlCharKey]; + } else if (typeof responseBody === "string") { + instance[key] = responseBody; + } } else { - const end = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - if (remaining === remainingCapacityInThisBuffer) { - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + const wrapped = responseBody[xmlName]; + const elementList = wrapped?.[xmlElementName] ?? []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); } else { - this.byteOffsetInCurrentBuffer = end; + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); } - this.pushedBytesLength += remaining; - i += remaining; + } + } else { + let propertyInstance; + let res = responseBody; + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + if (res === null && steps < paths.length) { + res = void 0; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + if (polymorphicDiscriminator && key === polymorphicDiscriminator.clientName && (propertyInstance === void 0 || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } else if (propertyInstance !== void 0 || propertyMapper.defaultValue !== void 0) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; } } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } else if (outBuffers.length === 1) { - this.push(outBuffers[0]); + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === void 0 && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } } } - }; - exports2.BuffersStream = BuffersStream; - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js -var require_PooledBuffer = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PooledBuffer = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var BuffersStream_js_1 = require_BuffersStream(); - var node_buffer_1 = tslib_1.__importDefault(require("node:buffer")); - var maxBufferLength = node_buffer_1.default.constants.MAX_LENGTH; - var PooledBuffer = class { - /** - * Internal buffers used to keep the data. - * Each buffer has a length of the maxBufferLength except last one. - */ - buffers = []; - /** - * The total size of internal buffers. - */ - capacity; - /** - * The total size of data contained in internal buffers. - */ - _size; - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; + return instance; + } + function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the mapper and it must of type "object" in ${objectName}`); } - constructor(capacity, buffers, totalLength) { - this.capacity = capacity; - this._size = 0; - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; - } - this.buffers.push(Buffer.allocUnsafe(len)); + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); } - if (buffers) { - this.fill(buffers, totalLength); + return tempDictionary; + } + return responseBody; + } + function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + responseBody = [responseBody]; + } + if (element.type.name === "Composite" && element.type.className) { + element = serializer.modelMappers[element.type.className] ?? element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); } + return tempArray; } - /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. - * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. - * - */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; - } - if (targetOffset === target.length) { - j++; - targetOffset = 0; + return responseBody; + } + function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName ? discriminatorValue : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && mapper.type.uberParent === currentName && mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } } } - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); - } } - /** - * Get the readable stream assembled from all the data in the internal buffers. - * - */ - getReadableStream() { - return new BuffersStream_js_1.BuffersStream(this.buffers, this.size); + return void 0; + } + function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = mapper.type.uberParent ?? mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } } + return mapper; + } + function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return mapper.type.polymorphicDiscriminator || getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || getPolymorphicDiscriminatorSafely(serializer, mapper.type.className); + } + function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return typeName && serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator; + } + exports2.MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime" }; - exports2.PooledBuffer = PooledBuffer; } }); -// node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js -var require_BufferScheduler = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/state.js +var require_state2 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/state.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BufferScheduler = void 0; - var events_1 = require("events"); - var PooledBuffer_js_1 = require_PooledBuffer(); - var BufferScheduler = class { - /** - * Size of buffers in incoming and outgoing queues. This class will try to align - * data read from Readable stream into buffer chunks with bufferSize defined. - */ - bufferSize; - /** - * How many buffers can be created or maintained. - */ - maxBuffers; - /** - * A Node.js Readable stream. - */ - readable; - /** - * OutgoingHandler is an async function triggered by BufferScheduler when there - * are available buffers in outgoing array. - */ - outgoingHandler; - /** - * An internal event emitter. - */ - emitter = new events_1.EventEmitter(); - /** - * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers) - */ - concurrency; - /** - * An internal offset marker to track data offset in bytes of next outgoingHandler. - */ - offset = 0; - /** - * An internal marker to track whether stream is end. - */ - isStreamEnd = false; - /** - * An internal marker to track whether stream or outgoingHandler returns error. - */ - isError = false; - /** - * How many handlers are executing. - */ - executingOutgoingHandlers = 0; - /** - * Encoding of the input Readable stream which has string data type instead of Buffer. - */ - encoding; - /** - * How many buffers have been allocated. - */ - numBuffers = 0; - /** - * Because this class doesn't know how much data every time stream pops, which - * is defined by highWaterMarker of the stream. So BufferScheduler will cache - * data received from the stream, when data in unresolvedDataArray exceeds the - * blockSize defined, it will try to concat a blockSize of buffer, fill into available - * buffers from incoming and push to outgoing array. - */ - unresolvedDataArray = []; - /** - * How much data consisted in unresolvedDataArray. - */ - unresolvedLength = 0; - /** - * The array includes all the available buffers can be used to fill data from stream. - */ - incoming = []; - /** - * The array (queue) includes all the buffers filled from stream data. - */ - outgoing = []; - /** - * Creates an instance of BufferScheduler. - * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream - */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; + exports2.state = void 0; + exports2.state = { + operationRequestMap: /* @__PURE__ */ new WeakMap() + }; + } +}); + +// node_modules/@azure/core-client/dist/commonjs/operationHelpers.js +var require_operationHelpers = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/operationHelpers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; + exports2.getOperationRequestInfo = getOperationRequestInfo; + var state_js_1 = require_state2(); + function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; } - /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. - * - */ - async do() { - return new Promise((resolve6, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = parameterMapper.required || parameterPath[0] === "options" && parameterPath.length === 2; } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve6).catch(reject); - } else if (this.unresolvedLength >= this.bufferSize) { - return; - } else { - resolve6(); - } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper + }, fallbackObject); + if (propertyValue !== void 0) { + if (!value) { + value = {}; } - }); - }); + value[propertyName] = propertyValue; + } + } } - /** - * Insert a new data into unresolved array. - * - * @param data - - */ - appendUnresolvedData(data) { - this.unresolvedDataArray.push(data); - this.unresolvedLength += data.length; + return value; + } + function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } else { + break; + } } - /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. - * - */ - shiftBufferFromUnresolvedDataArray(buffer) { - if (!buffer) { - buffer = new PooledBuffer_js_1.PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; + } + var originalRequestSymbol = /* @__PURE__ */ Symbol.for("@azure/core-client original request"); + function hasOriginalRequest(request2) { + return originalRequestSymbol in request2; + } + function getOperationRequestInfo(request2) { + if (hasOriginalRequest(request2)) { + return getOperationRequestInfo(request2[originalRequestSymbol]); + } + let info6 = state_js_1.state.operationRequestMap.get(request2); + if (!info6) { + info6 = {}; + state_js_1.state.operationRequestMap.set(request2, info6); + } + return info6; + } + } +}); + +// node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js +var require_deserializationPolicy = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.deserializationPolicyName = void 0; + exports2.deserializationPolicy = deserializationPolicy; + var interfaces_js_1 = require_interfaces(); + var core_rest_pipeline_1 = require_commonjs6(); + var serializer_js_1 = require_serializer(); + var operationHelpers_js_1 = require_operationHelpers(); + var defaultJsonContentTypes = ["application/json", "text/json"]; + var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + exports2.deserializationPolicyName = "deserializationPolicy"; + function deserializationPolicy(options = {}) { + const jsonContentTypes = options.expectedContentTypes?.json ?? defaultJsonContentTypes; + const xmlContentTypes = options.expectedContentTypes?.xml ?? defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: serializerOptions?.xml.rootName ?? "", + includeRoot: serializerOptions?.xml.includeRoot ?? false, + xmlCharKey: serializerOptions?.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY + } + }; + return { + name: exports2.deserializationPolicyName, + async sendRequest(request2, next) { + const response = await next(request2); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + } + }; + } + function getOperationResponseMap(parsedResponse) { + let result; + const request2 = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); + const operationSpec = operationInfo?.operationSpec; + if (operationSpec) { + if (!operationInfo?.operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; } else { - buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + result = operationInfo?.operationResponseGetter(operationSpec, parsedResponse); } - this.unresolvedLength -= buffer.size; - return buffer; } - /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. - * - * @returns Return false when buffers in incoming are not enough, else true. - */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer; - if (this.incoming.length > 0) { - buffer = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer); - } else { - if (this.numBuffers < this.maxBuffers) { - buffer = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } else { - return false; - } + return result; + } + function shouldDeserializeResponse(parsedResponse) { + const request2 = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); + const shouldDeserialize = operationInfo?.shouldDeserialize; + let result; + if (shouldDeserialize === void 0) { + result = true; + } else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } else { + result = shouldDeserialize(parsedResponse); + } + return result; + } + async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse2(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request); + const operationSpec = operationInfo?.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error: error3, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error3) { + throw error3; + } else if (shouldReturnResponse) { + return parsedResponse; + } + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = typeof valueToDeserialize === "object" ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] : []; } - this.outgoing.push(buffer); - this.triggerOutgoingHandlers(); + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } catch (deserializeError) { + const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse + }); + throw restError; + } + } else if (operationSpec.httpMethod === "HEAD") { + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); } - return true; } - /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. - */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; + return parsedResponse; + } + function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return expectedStatusCodes.length === 0 || expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"; + } + function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); + } else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec ?? operationSpec.responses.default; + const initialErrorMessage = parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ? `Unexpected status code: ${parsedResponse.status}` : parsedResponse.bodyAsText; + const error3 = new core_rest_pipeline_1.RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse + }); + if (!errorResponseSpec && !(parsedResponse.parsedBody?.error?.code && parsedResponse.parsedBody?.error?.message)) { + throw error3; + } + const defaultBodyMapper = errorResponseSpec?.bodyMapper; + const defaultHeadersMapper = errorResponseSpec?.headersMapper; + try { + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); } - } while (buffer); + const internalError = parsedBody.error || deserializedError || parsedBody; + error3.code = internalError.code; + if (internalError.message) { + error3.message = internalError.message; + } + if (defaultBodyMapper) { + error3.response.parsedBody = deserializedError; + } + } + if (parsedResponse.headers && defaultHeadersMapper) { + error3.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } catch (defaultError) { + error3.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; } - /** - * Trigger a outgoing handler for a buffer shifted from outgoing. - * - * @param buffer - - */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; + return { error: error3, shouldReturnResponse: false }; + } + async function parse2(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + if (!operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType ? [] : contentType.split(";").map((component) => component.toLowerCase()); try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + if (contentComponents.length === 0 || contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } } catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); - } - /** - * Return buffer used by outgoing handler into incoming. - * - * @param buffer - - */ - reuseBuffer(buffer) { - this.incoming.push(buffer); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR; + const e = new core_rest_pipeline_1.RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse + }); + throw e; } } - }; - exports2.BufferScheduler = BufferScheduler; + return operationResponse; + } } }); -// node_modules/@azure/storage-common/dist/commonjs/cache.js -var require_cache3 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/cache.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js +var require_interfaceHelpers = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getCachedDefaultHttpClient = getCachedDefaultHttpClient; - var core_rest_pipeline_1 = require_commonjs6(); - var _defaultHttpClient; - function getCachedDefaultHttpClient() { - if (!_defaultHttpClient) { - _defaultHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); - } - return _defaultHttpClient; - } - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js -var require_RequestPolicy2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js"(exports2) { + exports2.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; + exports2.getPathStringFromParameter = getPathStringFromParameter; + var serializer_js_1 = require_serializer(); + function getStreamingResponseStatusCodes(operationSpec) { + const result = /* @__PURE__ */ new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; + } + function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } else { + result = mapper.serializedName; + } + return result; + } + } +}); + +// node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js +var require_serializationPolicy = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BaseRequestPolicy = void 0; - var BaseRequestPolicy = class { - _nextPolicy; - _options; - /** - * The main method to implement that manipulates a request/response. - */ - constructor(_nextPolicy, _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; + exports2.serializationPolicyName = void 0; + exports2.serializationPolicy = serializationPolicy; + exports2.serializeHeaders = serializeHeaders; + exports2.serializeRequestBody = serializeRequestBody; + var interfaces_js_1 = require_interfaces(); + var operationHelpers_js_1 = require_operationHelpers(); + var serializer_js_1 = require_serializer(); + var interfaceHelpers_js_1 = require_interfaceHelpers(); + exports2.serializationPolicyName = "serializationPolicy"; + function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: exports2.serializationPolicyName, + async sendRequest(request2, next) { + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); + const operationSpec = operationInfo?.operationSpec; + const operationArguments = operationInfo?.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request2, operationArguments, operationSpec); + serializeRequestBody(request2, operationArguments, operationSpec, stringifyXML); + } + return next(request2); + } + }; + } + function serializeHeaders(request2, operationArguments, operationSpec) { + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); + if (headerValue !== null && headerValue !== void 0 || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request2.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } else { + request2.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue); + } + } + } } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); + const customHeaders = operationArguments.options?.requestOptions?.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request2.headers.set(customHeaderName, customHeaders[customHeaderName]); + } } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); + } + function serializeRequestBody(request2, operationArguments, operationSpec, stringifyXML = function() { + throw new Error("XML serialization unsupported!"); + }) { + const serializerOptions = operationArguments.options?.serializerOptions; + const updatedOptions = { + xml: { + rootName: serializerOptions?.xml.rootName ?? "", + includeRoot: serializerOptions?.xml.includeRoot ?? false, + xmlCharKey: serializerOptions?.xml.xmlCharKey ?? interfaces_js_1.XML_CHARKEY + } + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request2.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if (request2.body !== void 0 && request2.body !== null || nullable && request2.body === null || required) { + const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody); + request2.body = operationSpec.serializer.serialize(bodyMapper, request2.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === serializer_js_1.MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request2.body, updatedOptions); + if (typeName === serializer_js_1.MapperTypeNames.Sequence) { + request2.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } else if (!isStream) { + request2.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey + }); + } + } else if (typeName === serializer_js_1.MapperTypeNames.String && (operationSpec.contentType?.match("text/plain") || operationSpec.mediaType === "text")) { + return; + } else if (!isStream) { + request2.body = JSON.stringify(request2.body); + } + } + } catch (error3) { + throw new Error(`Error "${error3.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, void 0, " ")}.`); + } + } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request2.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); + if (formDataParameterValue !== void 0 && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter); + request2.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions); + } + } } - }; - exports2.BaseRequestPolicy = BaseRequestPolicy; + } + function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; + } + function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; + } } }); -// node_modules/@azure/storage-common/dist/commonjs/utils/constants.js -var require_constants16 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/utils/constants.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/pipeline.js +var require_pipeline3 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/pipeline.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PathStylePorts = exports2.DevelopmentConnectionString = exports2.HeaderConstants = exports2.URLConstants = exports2.SDK_VERSION = void 0; - exports2.SDK_VERSION = "1.0.0"; - exports2.URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SIGNATURE: "sig", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout" + exports2.createClientPipeline = createClientPipeline; + var deserializationPolicy_js_1 = require_deserializationPolicy(); + var core_rest_pipeline_1 = require_commonjs6(); + var serializationPolicy_js_1 = require_serializationPolicy(); + function createClientPipeline(options = {}) { + const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options ?? {}); + if (options.credentialOptions) { + pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes + })); } - }; - exports2.HeaderConstants = { - AUTHORIZATION: "Authorization", - AUTHORIZATION_SCHEME: "Bearer", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - USER_AGENT: "User-Agent", - X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", - X_MS_COPY_SOURCE: "x-ms-copy-source", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version", - X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" - }; - exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; - exports2.PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104" - ]; + pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), { + phase: "Deserialize" + }); + return pipeline; + } } }); -// node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js -var require_utils_common2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/httpClientCache.js +var require_httpClientCache = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/httpClientCache.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.escapeURLPath = escapeURLPath; - exports2.getValueInConnString = getValueInConnString; - exports2.extractConnectionStringParts = extractConnectionStringParts; - exports2.appendToURLPath = appendToURLPath; - exports2.setURLParameter = setURLParameter; - exports2.getURLParameter = getURLParameter; - exports2.setURLHost = setURLHost; - exports2.getURLPath = getURLPath; - exports2.getURLScheme = getURLScheme; - exports2.getURLPathAndQuery = getURLPathAndQuery; - exports2.getURLQueries = getURLQueries; - exports2.appendToURLQuery = appendToURLQuery; - exports2.truncatedISO8061Date = truncatedISO8061Date; - exports2.base64encode = base64encode; - exports2.base64decode = base64decode; - exports2.generateBlockID = generateBlockID; - exports2.delay = delay2; - exports2.padStart = padStart2; - exports2.sanitizeURL = sanitizeURL; - exports2.sanitizeHeaders = sanitizeHeaders; - exports2.iEqual = iEqual; - exports2.getAccountNameFromUrl = getAccountNameFromUrl; - exports2.isIpEndpointStyle = isIpEndpointStyle; - exports2.attachCredential = attachCredential; - exports2.httpAuthorizationToString = httpAuthorizationToString; - exports2.EscapePath = EscapePath; - exports2.assertResponse = assertResponse; + exports2.getCachedDefaultHttpClient = getCachedDefaultHttpClient; var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants16(); - function escapeURLPath(url2) { - const urlParsed = new URL(url2); - let path13 = urlParsed.pathname; - path13 = path13 || "/"; - path13 = escape(path13); - urlParsed.pathname = path13; - return urlParsed.toString(); - } - function getProxyUriFromDevConnString(connectionString) { - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } - } - } - return proxyUri; - } - function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } + var cachedHttpClient; + function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); } - return ""; + return cachedHttpClient; } - function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = constants_js_1.DevelopmentConnectionString; - } - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); - } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); - } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); - } else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); - } - return { - kind: "AccountConnString", - url: blobEndpoint, - accountName, - accountKey, - proxyUri - }; - } else { - let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - let accountName = getValueInConnString(connectionString, "AccountName"); - if (!accountName) { - accountName = getAccountNameFromUrl(blobEndpoint); - } - if (!blobEndpoint) { - throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); - } else if (!accountSas) { - throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } +}); + +// node_modules/@azure/core-client/dist/commonjs/urlHelpers.js +var require_urlHelpers2 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/urlHelpers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getRequestUrl = getRequestUrl; + exports2.appendQueryParams = appendQueryParams; + var operationHelpers_js_1 = require_operationHelpers(); + var interfaceHelpers_js_1 = require_interfaceHelpers(); + var CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: " ", + Pipes: "|" + }; + function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path13 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path13.startsWith("/")) { + path13 = path13.substring(1); } - if (accountSas.startsWith("?")) { - accountSas = accountSas.substring(1); + if (isAbsoluteUrl(path13)) { + requestUrl = path13; + isAbsolutePath = true; + } else { + requestUrl = appendPath(requestUrl, path13); } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; } - function escape(text) { - return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); - } - function appendToURLPath(url2, name) { - const urlParsed = new URL(url2); - let path13 = urlParsed.pathname; - path13 = path13 ? path13.endsWith("/") ? `${path13}${name}` : `${path13}/${name}` : name; - urlParsed.pathname = path13; - return urlParsed.toString(); + function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); + } + return result; } - function setURLParameter(url2, name, value) { - const urlParsed = new URL(url2); - const encodedName = encodeURIComponent(name); - const encodedValue = value ? encodeURIComponent(value) : void 0; - const searchString = urlParsed.search === "" ? "?" : urlParsed.search; - const searchPieces = []; - for (const pair of searchString.slice(1).split("&")) { - if (pair) { - const [key] = pair.split("=", 2); - if (key !== encodedName) { - searchPieces.push(pair); + function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + const result = /* @__PURE__ */ new Map(); + if (operationSpec.urlParameters?.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); + const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); } } - if (encodedValue) { - searchPieces.push(`${encodedName}=${encodedValue}`); - } - urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; - return urlParsed.toString(); - } - function getURLParameter(url2, name) { - const urlParsed = new URL(url2); - return urlParsed.searchParams.get(name) ?? void 0; + return result; } - function setURLHost(url2, host) { - const urlParsed = new URL(url2); - urlParsed.hostname = host; - return urlParsed.toString(); + function isAbsoluteUrl(url2) { + return url2.includes("://"); } - function getURLPath(url2) { - try { - const urlParsed = new URL(url2); - return urlParsed.pathname; - } catch (e) { - return void 0; + function appendPath(url2, pathToAppend) { + if (!pathToAppend) { + return url2; } - } - function getURLScheme(url2) { - try { - const urlParsed = new URL(url2); - return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; - } catch (e) { - return void 0; + const parsedUrl = new URL(url2); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; } - } - function getURLPathAndQuery(url2) { - const urlParsed = new URL(url2); - const pathString = urlParsed.pathname; - if (!pathString) { - throw new RangeError("Invalid url without valid path."); + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); } - let queryString = urlParsed.search || ""; - queryString = queryString.trim(); - if (queryString !== "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path13 = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path13; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } + } else { + newPath = newPath + pathToAppend; } - return `${pathString}${queryString}`; + parsedUrl.pathname = newPath; + return parsedUrl.toString(); } - function getURLQueries(url2) { - let queryString = new URL(url2).search; - if (!queryString) { - return {}; - } - queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); - return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; - }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; - queries[key] = value; - } - return queries; - } - function appendToURLQuery(url2, queryParts) { - const urlParsed = new URL(url2); - let query = urlParsed.search; - if (query) { - query += "&" + queryParts; - } else { - query = queryParts; - } - urlParsed.search = query; - return urlParsed.toString(); - } - function truncatedISO8061Date(date, withMilliseconds = true) { - const dateString = date.toISOString(); - return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; - } - function base64encode(content) { - return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); - } - function base64decode(encodedString) { - return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); - } - function generateBlockID(blockIDPrefix, blockIndex) { - const maxSourceStringLength = 48; - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; - if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { - blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); - } - const res = blockIDPrefix + padStart2(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); - return base64encode(res); - } - async function delay2(timeInMs, aborter, abortError) { - return new Promise((resolve6, reject) => { - let timeout; - const abortHandler = () => { - if (timeout !== void 0) { - clearTimeout(timeout); + function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + const result = /* @__PURE__ */ new Map(); + const sequenceParams = /* @__PURE__ */ new Set(); + if (operationSpec.queryParameters?.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== void 0) { - aborter.removeEventListener("abort", abortHandler); + let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); + if (queryParameterValue !== void 0 && queryParameterValue !== null || queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter)); + const delimiter = queryParameter.collectionFormat ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] : ""; + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === void 0) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } else if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue); } - resolve6(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== void 0) { - aborter.addEventListener("abort", abortHandler); - } - }); - } - function padStart2(currentString, targetLength, padString = " ") { - if (String.prototype.padStart) { - return currentString.padStart(targetLength, padString); - } - padString = padString || " "; - if (currentString.length > targetLength) { - return currentString; - } else { - targetLength = targetLength - currentString.length; - if (targetLength > padString.length) { - padString += padString.repeat(targetLength / padString.length); } - return padString.slice(0, targetLength) + currentString; } + return { + queryParams: result, + sequenceParams + }; } - function sanitizeURL(url2) { - let safeURL = url2; - if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { - safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); + function simpleParseQueryParams(queryString) { + const result = /* @__PURE__ */ new Map(); + if (!queryString || queryString[0] !== "?") { + return result; } - return safeURL; - } - function sanitizeHeaders(originalHeader) { - const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); - for (const [name, value] of originalHeader) { - if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { - headers.set(name, "*****"); - } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { - headers.set(name, sanitizeURL(value)); + queryString = queryString.slice(1); + const pairs2 = queryString.split("&"); + for (const pair of pairs2) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } else { + result.set(name, [existingValue, value]); + } } else { - headers.set(name, value); + result.set(name, value); } } - return headers; - } - function iEqual(str1, str2) { - return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); + return result; } - function getAccountNameFromUrl(url2) { + function appendQueryParams(url2, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url2; + } const parsedUrl = new URL(url2); - let accountName; - try { - if (parsedUrl.hostname.split(".")[1] === "blob") { - accountName = parsedUrl.hostname.split(".")[0]; - } else if (isIpEndpointStyle(parsedUrl)) { - accountName = parsedUrl.pathname.split("/")[1]; + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } else { + existingValue.push(value); + } + } else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } } else { - accountName = ""; + combinedParams.set(name, value); } - return accountName; - } catch (error3) { - throw new Error("Unable to extract accountName with provided information."); - } - } - function isIpEndpointStyle(parsedUrl) { - const host = parsedUrl.host; - return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); - } - function attachCredential(thing, credential) { - thing.credential = credential; - return thing; - } - function httpAuthorizationToString(httpAuthorization) { - return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; - } - function EscapePath(blobName) { - const split = blobName.split("/"); - for (let i = 0; i < split.length; i++) { - split[i] = encodeURIComponent(split[i]); } - return split.join("/"); - } - function assertResponse(response) { - if (`_response` in response) { - return response; + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } else if (Array.isArray(value)) { + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } else { + searchPieces.push(`${name}=${value}`); + } } - throw new TypeError(`Unexpected response object ${response}`); + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); } } }); -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js -var require_StorageBrowserPolicy = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/log.js +var require_log4 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/log.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageBrowserPolicy = void 0; - var RequestPolicy_js_1 = require_RequestPolicy2(); - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants16(); - var utils_common_js_1 = require_utils_common2(); - var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("core-client"); + } +}); + +// node_modules/@azure/core-client/dist/commonjs/serviceClient.js +var require_serviceClient = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/serviceClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var pipeline_js_1 = require_pipeline3(); + var utils_js_1 = require_utils6(); + var httpClientCache_js_1 = require_httpClientCache(); + var operationHelpers_js_1 = require_operationHelpers(); + var urlHelpers_js_1 = require_urlHelpers2(); + var interfaceHelpers_js_1 = require_interfaceHelpers(); + var log_js_1 = require_log4(); + var ServiceClient = class { /** - * Creates an instance of StorageBrowserPolicy. - * @param nextPolicy - - * @param options - + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + _endpoint; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + _requestContentType; + /** + * Set to true if the request is sent over HTTP instead of HTTPS + */ + _allowInsecureConnection; + /** + * The HTTP client that will be used to send requests. + */ + _httpClient; + /** + * The pipeline used by this client to make requests + */ + pipeline; + /** + * The ServiceClient constructor + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + this._requestContentType = options.requestContentType; + this._endpoint = options.endpoint ?? options.baseUri; + if (options.baseUri) { + log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if (options.additionalPolicies?.length) { + for (const { policy, position } of options.additionalPolicies) { + const afterPhase = position === "perRetry" ? "Sign" : void 0; + this.pipeline.addPolicy(policy, { + afterPhase + }); + } + } } /** - * Sends out request. - * - * @param request - + * Send the provided httpRequest. */ async sendRequest(request2) { - if (core_util_1.isNodeLike) { - return this._nextPolicy.sendRequest(request2); + return this.pipeline.sendRequest(this._httpClient, request2); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint2 = operationSpec.baseUrl || this._endpoint; + if (!endpoint2) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); } - if (request2.method.toUpperCase() === "GET" || request2.method.toUpperCase() === "HEAD") { - request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + const url2 = (0, urlHelpers_js_1.getRequestUrl)(endpoint2, operationSpec, operationArguments, this); + const request2 = (0, core_rest_pipeline_1.createPipelineRequest)({ + url: url2 + }); + request2.method = operationSpec.httpMethod; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request2); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request2.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request2.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request2.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request2.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== void 0) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request2.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request2.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request2.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request2.allowInsecureConnection = true; + } + if (request2.streamResponseStatusCodes === void 0) { + request2.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request2); + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); + if (options?.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } catch (error3) { + if (typeof error3 === "object" && error3?.response) { + const rawResponse = error3.response; + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error3.statusCode] || operationSpec.responses["default"]); + error3.details = flatResponse; + if (options?.onResponse) { + options.onResponse(rawResponse, flatResponse, error3); + } + } + throw error3; } - request2.headers.remove(constants_js_1.HeaderConstants.COOKIE); - request2.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); - return this._nextPolicy.sendRequest(request2); } }; - exports2.StorageBrowserPolicy = StorageBrowserPolicy; + exports2.ServiceClient = ServiceClient; + function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes ? { credentialScopes, credential: options.credential } : void 0; + return (0, pipeline_js_1.createClientPipeline)({ + ...options, + credentialOptions + }); + } + function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; + } + if (options.endpoint) { + return `${options.endpoint}/.default`; + } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return void 0; + } } }); -// node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js -var require_StorageBrowserPolicyFactory = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js +var require_authorizeRequestOnClaimChallenge = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; - var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy(); - Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { - return StorageBrowserPolicy_js_1.StorageBrowserPolicy; - } }); - var StorageBrowserPolicyFactory = class { - /** - * Creates a StorageBrowserPolicyFactory object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); + exports2.parseCAEChallenge = parseCAEChallenge; + exports2.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; + var log_js_1 = require_log4(); + var base64_js_1 = require_base64(); + function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {}); + }); + } + async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || log_js_1.logger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; } - }; - exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims) + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `${accessToken.tokenType ?? "Bearer"} ${accessToken.token}`); + return true; + } } }); -// node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js -var require_CredentialPolicy2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js +var require_authorizeRequestOnTenantChallenge = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CredentialPolicy = void 0; - var RequestPolicy_js_1 = require_RequestPolicy2(); - var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { - /** - * Sends out request. - * - * @param request - - */ - sendRequest(request2) { - return this._nextPolicy.sendRequest(this.signRequest(request2)); - } + exports2.authorizeRequestOnTenantChallenge = void 0; + var Constants = { + DefaultScope: "/.default", /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. - * - * @param request - + * Defines constants for use with HTTP headers. */ - signRequest(request2) { - return request2; + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization" } }; - exports2.CredentialPolicy = CredentialPolicy; + function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); + } + var authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, { + ...requestOptions, + tenantId + }); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `${accessToken.tokenType ?? "Bearer"} ${accessToken.token}`); + return true; + } + return false; + }; + exports2.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; + function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return void 0; + } + function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + scope = "https://disk.azure.com//.default"; + } + return [scope]; + } + function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; + } + function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {}); + } + function requestToOptions(request2) { + return { + abortSignal: request2.abortSignal, + requestOptions: { + timeout: request2.timeout + }, + tracingOptions: request2.tracingOptions + }; + } } }); -// node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js -var require_AnonymousCredentialPolicy2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { +// node_modules/@azure/core-client/dist/commonjs/index.js +var require_commonjs8 = __commonJS({ + "node_modules/@azure/core-client/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AnonymousCredentialPolicy = void 0; - var CredentialPolicy_js_1 = require_CredentialPolicy2(); - var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + exports2.authorizeRequestOnTenantChallenge = exports2.authorizeRequestOnClaimChallenge = exports2.serializationPolicyName = exports2.serializationPolicy = exports2.deserializationPolicyName = exports2.deserializationPolicy = exports2.XML_CHARKEY = exports2.XML_ATTRKEY = exports2.createClientPipeline = exports2.ServiceClient = exports2.MapperTypeNames = exports2.createSerializer = void 0; + var serializer_js_1 = require_serializer(); + Object.defineProperty(exports2, "createSerializer", { enumerable: true, get: function() { + return serializer_js_1.createSerializer; + } }); + Object.defineProperty(exports2, "MapperTypeNames", { enumerable: true, get: function() { + return serializer_js_1.MapperTypeNames; + } }); + var serviceClient_js_1 = require_serviceClient(); + Object.defineProperty(exports2, "ServiceClient", { enumerable: true, get: function() { + return serviceClient_js_1.ServiceClient; + } }); + var pipeline_js_1 = require_pipeline3(); + Object.defineProperty(exports2, "createClientPipeline", { enumerable: true, get: function() { + return pipeline_js_1.createClientPipeline; + } }); + var interfaces_js_1 = require_interfaces(); + Object.defineProperty(exports2, "XML_ATTRKEY", { enumerable: true, get: function() { + return interfaces_js_1.XML_ATTRKEY; + } }); + Object.defineProperty(exports2, "XML_CHARKEY", { enumerable: true, get: function() { + return interfaces_js_1.XML_CHARKEY; + } }); + var deserializationPolicy_js_1 = require_deserializationPolicy(); + Object.defineProperty(exports2, "deserializationPolicy", { enumerable: true, get: function() { + return deserializationPolicy_js_1.deserializationPolicy; + } }); + Object.defineProperty(exports2, "deserializationPolicyName", { enumerable: true, get: function() { + return deserializationPolicy_js_1.deserializationPolicyName; + } }); + var serializationPolicy_js_1 = require_serializationPolicy(); + Object.defineProperty(exports2, "serializationPolicy", { enumerable: true, get: function() { + return serializationPolicy_js_1.serializationPolicy; + } }); + Object.defineProperty(exports2, "serializationPolicyName", { enumerable: true, get: function() { + return serializationPolicy_js_1.serializationPolicyName; + } }); + var authorizeRequestOnClaimChallenge_js_1 = require_authorizeRequestOnClaimChallenge(); + Object.defineProperty(exports2, "authorizeRequestOnClaimChallenge", { enumerable: true, get: function() { + return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; + } }); + var authorizeRequestOnTenantChallenge_js_1 = require_authorizeRequestOnTenantChallenge(); + Object.defineProperty(exports2, "authorizeRequestOnTenantChallenge", { enumerable: true, get: function() { + return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; + } }); + } +}); + +// node_modules/@azure/core-http-compat/dist/commonjs/util.js +var require_util17 = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/util.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.HttpHeaders = void 0; + exports2.toPipelineRequest = toPipelineRequest; + exports2.toWebResourceLike = toWebResourceLike; + exports2.toHttpHeadersLike = toHttpHeadersLike; + var core_rest_pipeline_1 = require_commonjs6(); + var originalRequestSymbol = /* @__PURE__ */ Symbol("Original PipelineRequest"); + var originalClientRequestSymbol = /* @__PURE__ */ Symbol.for("@azure/core-client original request"); + function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request2 = compatWebResource[originalRequestSymbol]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); + if (request2) { + request2.headers = headers; + return request2; + } else { + const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + agent: webResource.agent, + requestOverrides: webResource.requestOverrides + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = options.originalRequest; + } + return newRequest; + } + } + function toWebResourceLike(request2, options) { + const originalRequest = options?.originalRequest ?? request2; + const webResource = { + url: request2.url, + method: request2.method, + headers: toHttpHeadersLike(request2.headers), + withCredentials: request2.withCredentials, + timeout: request2.timeout, + requestId: request2.headers.get("x-ms-client-request-id") || request2.requestId, + abortSignal: request2.abortSignal, + body: request2.body, + formData: request2.formData, + keepAlive: !!request2.disableKeepAlive, + onDownloadProgress: request2.onDownloadProgress, + onUploadProgress: request2.onUploadProgress, + proxySettings: request2.proxySettings, + streamResponseStatusCodes: request2.streamResponseStatusCodes, + agent: request2.agent, + requestOverrides: request2.requestOverrides, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + } + }; + if (options?.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request2; + } else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request2.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + "agent", + "requestOverrides" + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request2[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + } + }); + } else { + return webResource; + } + } + function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); + } + function getHeaderKey(headerName) { + return headerName.toLowerCase(); + } + var HttpHeaders = class _HttpHeaders { + _headersMap; + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } /** - * Creates an instance of AnonymousCredentialPolicy. - * @param nextPolicy - - * @param options - + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString() + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? void 0 : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new _HttpHeaders(resultPreservingCasing); } }; - exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + exports2.HttpHeaders = HttpHeaders; } }); -// node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js -var require_Credential2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js"(exports2) { +// node_modules/@azure/core-http-compat/dist/commonjs/response.js +var require_response3 = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/response.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Credential = void 0; - var Credential = class { - /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - - */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); + exports2.toCompatResponse = toCompatResponse; + exports2.toPipelineResponse = toPipelineResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var util_js_1 = require_util17(); + var originalResponse = /* @__PURE__ */ Symbol("Original FullOperationResponse"); + function toCompatResponse(response, options) { + let request2 = (0, util_js_1.toWebResourceLike)(response.request); + let headers = (0, util_js_1.toHttpHeadersLike)(response.headers); + if (options?.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } else if (prop === "request") { + return request2; + } else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } else if (prop === "request") { + request2 = value; + } + return Reflect.set(target, prop, value, receiver); + } + }); + } else { + return { + ...response, + request: request2, + headers + }; } - }; - exports2.Credential = Credential; + } + function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } else { + return { + ...compatResponse, + headers, + request: (0, util_js_1.toPipelineRequest)(compatResponse.request) + }; + } + } } }); -// node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js -var require_AnonymousCredential2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { +// node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js +var require_extendedClient = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AnonymousCredential = void 0; - var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy2(); - var Credential_js_1 = require_Credential2(); - var AnonymousCredential = class extends Credential_js_1.Credential { + exports2.ExtendedServiceClient = void 0; + var disableKeepAlivePolicy_js_1 = require_disableKeepAlivePolicy(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_client_1 = require_commonjs8(); + var response_js_1 = require_response3(); + var ExtendedServiceClient = class extends core_client_1.ServiceClient { + constructor(options) { + super(options); + if (options.keepAliveOptions?.enable === false && !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) { + this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)()); + } + if (options.redirectOptions?.handleRedirects === false) { + this.pipeline.removePolicy({ + name: core_rest_pipeline_1.redirectPolicyName + }); + } + } /** - * Creates an {@link AnonymousCredentialPolicy} object. + * Compatible send operation request function. * - * @param nextPolicy - - * @param options - + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns */ - create(nextPolicy, options) { - return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); + async sendOperationRequest(operationArguments, operationSpec) { + const userProvidedCallBack = operationArguments?.options?.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error3) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error3); + } + } + operationArguments.options = { + ...operationArguments.options, + onResponse + }; + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: (0, response_js_1.toCompatResponse)(lastResponse) + }); + } + return result; } }; - exports2.AnonymousCredential = AnonymousCredential; + exports2.ExtendedServiceClient = ExtendedServiceClient; } }); -// node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js -var require_SharedKeyComparator2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { +// node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js +var require_requestPolicyFactoryPolicy = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.compareHeader = compareHeader; - var table_lv0 = new Uint32Array([ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1820, - 0, - 1823, - 1825, - 1827, - 1829, - 0, - 0, - 0, - 1837, - 2051, - 0, - 0, - 1843, - 0, - 3331, - 3354, - 3356, - 3358, - 3360, - 3362, - 3364, - 3366, - 3368, - 3370, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3586, - 3593, - 3594, - 3610, - 3617, - 3619, - 3621, - 3628, - 3634, - 3637, - 3638, - 3656, - 3665, - 3696, - 3708, - 3710, - 3721, - 3722, - 3729, - 3737, - 3743, - 3746, - 3748, - 3750, - 3751, - 3753, - 0, - 0, - 0, - 1859, - 1860, - 1864, - 3586, - 3593, - 3594, - 3610, - 3617, - 3619, - 3621, - 3628, - 3634, - 3637, - 3638, - 3656, - 3665, - 3696, - 3708, - 3710, - 3721, - 3722, - 3729, - 3737, - 3743, - 3746, - 3748, - 3750, - 3751, - 3753, - 0, - 1868, - 0, - 1872, - 0 - ]); - var table_lv2 = new Uint32Array([ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ]); - var table_lv4 = new Uint32Array([ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 32786, - 0, - 0, - 0, - 0, - 0, - 33298, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ]); - function compareHeader(lhs, rhs) { - if (isLessThan(lhs, rhs)) - return -1; - return 1; - } - function isLessThan(lhs, rhs) { - const tables = [table_lv0, table_lv2, table_lv4]; - let curr_level = 0; - let i = 0; - let j = 0; - while (curr_level < tables.length) { - if (curr_level === tables.length - 1 && i !== j) { - return i > j; - } - const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; - const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; - if (weight1 === 1 && weight2 === 1) { - i = 0; - j = 0; - ++curr_level; - } else if (weight1 === weight2) { - ++i; - ++j; - } else if (weight1 === 0) { - ++i; - } else if (weight2 === 0) { - ++j; - } else { - return weight1 < weight2; - } + exports2.requestPolicyFactoryPolicyName = exports2.HttpPipelineLogLevel = void 0; + exports2.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; + var util_js_1 = require_util17(); + var response_js_1 = require_response3(); + var HttpPipelineLogLevel; + (function(HttpPipelineLogLevel2) { + HttpPipelineLogLevel2[HttpPipelineLogLevel2["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel2[HttpPipelineLogLevel2["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel2[HttpPipelineLogLevel2["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel2[HttpPipelineLogLevel2["WARNING"] = 2] = "WARNING"; + })(HttpPipelineLogLevel || (exports2.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); + var mockRequestPolicyOptions = { + log(_logLevel, _message) { + }, + shouldLog(_logLevel) { + return false; } - return false; + }; + exports2.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; + function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: exports2.requestPolicyFactoryPolicyName, + async sendRequest(request2, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response2 = await next((0, util_js_1.toPipelineRequest)(httpRequest)); + return (0, response_js_1.toCompatResponse)(response2, { createProxy: true }); + } + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = (0, util_js_1.toWebResourceLike)(request2, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return (0, response_js_1.toPipelineResponse)(response); + } + }; } } }); -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js -var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { +// node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js +var require_httpClientAdapter = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageSharedKeyCredentialPolicy = void 0; - var constants_js_1 = require_constants16(); - var utils_common_js_1 = require_utils_common2(); - var CredentialPolicy_js_1 = require_CredentialPolicy2(); - var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); - var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { - /** - * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy - */ - factory; - /** - * Creates an instance of StorageSharedKeyCredentialPolicy. - * @param nextPolicy - - * @param options - - * @param factory - - */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; - } - /** - * Signs request. - * - * @param request - - */ - signRequest(request2) { - request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); - if (request2.body && (typeof request2.body === "string" || request2.body !== void 0) && request2.body.length > 0) { - request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); + exports2.convertHttpClient = convertHttpClient; + var response_js_1 = require_response3(); + var util_js_1 = require_util17(); + function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request2) => { + const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request2, { createProxy: true })); + return (0, response_js_1.toPipelineResponse)(response); } - const stringToSign = [ - request2.method.toUpperCase(), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) - ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request2) + this.getCanonicalizedResourceString(request2); - const signature = this.factory.computeHMACSHA256(stringToSign); - request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); - return request2; - } - /** - * Retrieve header value according to shared key sign rules. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key - * - * @param request - - * @param headerName - - */ - getHeaderValueToSign(request2, headerName) { - const value = request2.headers.get(headerName); - if (!value) { - return ""; - } - if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; - } - return value; - } - /** - * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: - * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. - * 2. Convert each HTTP header name to lowercase. - * 3. Sort the headers lexicographically by header name, in ascending order. - * Each header may appear only once in the string. - * 4. Replace any linear whitespace in the header value with a single space. - * 5. Trim any whitespace around the colon in the header. - * 6. Finally, append a new-line character to each canonicalized header in the resulting list. - * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. - * - * @param request - - */ - getCanonicalizedHeadersString(request2) { - let headersArray = request2.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); - }); - headersArray.sort((a, b) => { - return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); - }); - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; - } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} -`; - }); - return canonicalizedHeadersStringToSign; - } - /** - * Retrieves the webResource canonicalized resource string. - * - * @param request - - */ - getCanonicalizedResourceString(request2) { - const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path13}`; - const queries = (0, utils_common_js_1.getURLQueries)(request2.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); - } - } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += ` -${key}:${decodeURIComponent(lowercaseQueries[key])}`; - } - } - return canonicalizedResourceString; - } - }; - exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js -var require_StorageSharedKeyCredential2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageSharedKeyCredential = void 0; - var node_crypto_1 = require("node:crypto"); - var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy2(); - var Credential_js_1 = require_Credential2(); - var StorageSharedKeyCredential = class extends Credential_js_1.Credential { - /** - * Azure Storage account name; readonly. - */ - accountName; - /** - * Azure Storage account key; readonly. - */ - accountKey; - /** - * Creates an instance of StorageSharedKeyCredential. - * @param accountName - - * @param accountKey - - */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); - } - /** - * Creates a StorageSharedKeyCredentialPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); - } - /** - * Generates a hash signature for an HTTP request or for a SAS. - * - * @param stringToSign - - */ - computeHMACSHA256(stringToSign) { - return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); - } - }; - exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; - } -}); - -// node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js -var require_AbortError4 = __commonJS({ - "node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError = class extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } - }; - exports2.AbortError = AbortError; + }; + } } }); -// node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/index.js -var require_commonjs12 = __commonJS({ - "node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports2) { +// node_modules/@azure/core-http-compat/dist/commonjs/index.js +var require_commonjs9 = __commonJS({ + "node_modules/@azure/core-http-compat/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError_js_1 = require_AbortError4(); - Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { - return AbortError_js_1.AbortError; + exports2.toHttpHeadersLike = exports2.convertHttpClient = exports2.disableKeepAlivePolicyName = exports2.HttpPipelineLogLevel = exports2.createRequestPolicyFactoryPolicy = exports2.requestPolicyFactoryPolicyName = exports2.ExtendedServiceClient = void 0; + var extendedClient_js_1 = require_extendedClient(); + Object.defineProperty(exports2, "ExtendedServiceClient", { enumerable: true, get: function() { + return extendedClient_js_1.ExtendedServiceClient; + } }); + var requestPolicyFactoryPolicy_js_1 = require_requestPolicyFactoryPolicy(); + Object.defineProperty(exports2, "requestPolicyFactoryPolicyName", { enumerable: true, get: function() { + return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; + } }); + Object.defineProperty(exports2, "createRequestPolicyFactoryPolicy", { enumerable: true, get: function() { + return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; + } }); + Object.defineProperty(exports2, "HttpPipelineLogLevel", { enumerable: true, get: function() { + return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; + } }); + var disableKeepAlivePolicy_js_1 = require_disableKeepAlivePolicy(); + Object.defineProperty(exports2, "disableKeepAlivePolicyName", { enumerable: true, get: function() { + return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; + } }); + var httpClientAdapter_js_1 = require_httpClientAdapter(); + Object.defineProperty(exports2, "convertHttpClient", { enumerable: true, get: function() { + return httpClientAdapter_js_1.convertHttpClient; + } }); + var util_js_1 = require_util17(); + Object.defineProperty(exports2, "toHttpHeadersLike", { enumerable: true, get: function() { + return util_js_1.toHttpHeadersLike; } }); } }); -// node_modules/@azure/storage-common/dist/commonjs/log.js -var require_log6 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/log.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("storage-common"); - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js -var require_StorageRetryPolicyType2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageRetryPolicyType = void 0; - var StorageRetryPolicyType; - (function(StorageRetryPolicyType2) { - StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; - })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js -var require_StorageRetryPolicy2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageRetryPolicy = void 0; - exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; - var abort_controller_1 = require_commonjs12(); - var RequestPolicy_js_1 = require_RequestPolicy2(); - var constants_js_1 = require_constants16(); - var utils_common_js_1 = require_utils_common2(); - var log_js_1 = require_log6(); - var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); - function NewRetryPolicyFactory(retryOptions) { - return { - create: (nextPolicy, options) => { - return new StorageRetryPolicy(nextPolicy, options, retryOptions); +// node_modules/fast-xml-parser/lib/fxp.cjs +var require_fxp = __commonJS({ + "node_modules/fast-xml-parser/lib/fxp.cjs"(exports2, module2) { + (() => { + "use strict"; + var t = { d: (e2, n2) => { + for (var i2 in n2) t.o(n2, i2) && !t.o(e2, i2) && Object.defineProperty(e2, i2, { enumerable: true, get: n2[i2] }); + }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { + "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); + } }, e = {}; + t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); + function s(t2, e2) { + const n2 = []; + let i2 = e2.exec(t2); + for (; i2; ) { + const s2 = []; + s2.startIndex = e2.lastIndex - i2[0].length; + const r2 = i2.length; + for (let t3 = 0; t3 < r2; t3++) s2.push(i2[t3]); + n2.push(s2), i2 = e2.exec(t2); } - }; - } - var DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); - var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { - /** - * RetryOptions. - */ - retryOptions; - /** - * Creates an instance of RetryPolicy. - * - * @param nextPolicy - - * @param options - - * @param retryOptions - - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost - }; - } - /** - * Sends request. - * - * @param request - - */ - async sendRequest(request2) { - return this.attemptSendRequest(request2, false, 1); + return n2; } - /** - * Decide and perform next retry. Won't mutate request parameter. - * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. - */ - async attemptSendRequest(request2, secondaryHas404, attempt) { - const newRequest = request2.clone(); - const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request2.method === "GET" || request2.method === "HEAD" || request2.method === "OPTIONS") || attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); - } - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); - } - let response; - try { - log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; + const r = function(t2) { + return !(null == i.exec(t2)); + }, o = { allowBooleanAttributes: false, unpairedTags: [] }; + function a(t2, e2) { + e2 = Object.assign({}, o, e2); + const n2 = []; + let i2 = false, s2 = false; + "\uFEFF" === t2[0] && (t2 = t2.substr(1)); + for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { + if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + } else { + if ("<" !== t2[o2]) { + if (l(t2[o2])) continue; + return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); } - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (err) { - log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; + { + let a2 = o2; + if (o2++, "!" === t2[o2]) { + o2 = h(t2, o2); + continue; + } + { + let d2 = false; + "/" === t2[o2] && (d2 = true, o2++); + let p2 = ""; + for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; + if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let e3; + return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + } + const c2 = f(t2, o2); + if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); + let E2 = c2.value; + if (o2 = c2.index, "/" === E2[E2.length - 1]) { + const n3 = o2 - E2.length; + E2 = E2.substring(0, E2.length - 1); + const s3 = g(E2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + i2 = true; + } else if (d2) { + if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); + if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + { + const e3 = n2.pop(); + if (p2 !== e3.tagName) { + let n3 = b(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + } + 0 == n2.length && (s2 = true); + } + } else { + const r2 = g(E2, e2); + if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); + -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + } + for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { + if ("!" === t2[o2 + 1]) { + o2++, o2 = h(t2, o2); + continue; + } + if ("?" !== t2[o2 + 1]) break; + if (o2 = u(t2, ++o2), o2.err) return o2; + } else if ("&" === t2[o2]) { + const e3 = x(t2, o2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); + o2 = e3; + } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); + "<" === t2[o2] && o2--; + } } } - await this.delay(isPrimaryRetry, attempt, request2.abortSignal); - return this.attemptSendRequest(request2, secondaryHas404, ++attempt); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } - /** - * Decide whether to retry according to last HTTP response and retry counters. - * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); - return false; - } - const retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors) { - if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { - log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } + function l(t2) { + return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; + } + function u(t2, e2) { + const n2 = e2; + for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; + else { + const i2 = t2.substr(n2, e2 - n2); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if ("?" == t2[e2] && ">" == t2[e2 + 1]) { + e2++; + break; } } - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; + return e2; + } + function h(t2, e2) { + if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { + for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { + e2 += 2; + break; } - if (statusCode === 503 || statusCode === 500) { - log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; + } else if (t2.length > e2 + 8 && "D" === t2[e2 + 1] && "O" === t2[e2 + 2] && "C" === t2[e2 + 3] && "T" === t2[e2 + 4] && "Y" === t2[e2 + 5] && "P" === t2[e2 + 6] && "E" === t2[e2 + 7]) { + let n2 = 1; + for (e2 += 8; e2 < t2.length; e2++) if ("<" === t2[e2]) n2++; + else if (">" === t2[e2] && (n2--, 0 === n2)) break; + } else if (t2.length > e2 + 9 && "[" === t2[e2 + 1] && "C" === t2[e2 + 2] && "D" === t2[e2 + 3] && "A" === t2[e2 + 4] && "T" === t2[e2 + 5] && "A" === t2[e2 + 6] && "[" === t2[e2 + 7]) { + for (e2 += 8; e2 < t2.length; e2++) if ("]" === t2[e2] && "]" === t2[e2 + 1] && ">" === t2[e2 + 2]) { + e2 += 2; + break; } } - if (response) { - if (response?.status >= 400) { - const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); - if (copySourceError !== void 0) { - switch (copySourceError) { - case "InternalError": - case "OperationTimedOut": - case "ServerBusy": - return true; - } - } + return e2; + } + const d = '"', p = "'"; + function f(t2, e2) { + let n2 = "", i2 = "", s2 = false; + for (; e2 < t2.length; e2++) { + if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + else if (">" === t2[e2] && "" === i2) { + s2 = true; + break; } + n2 += t2[e2]; } - if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { - log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; + return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; + } + const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + function g(t2, e2) { + const n2 = s(t2, c), i2 = {}; + for (let t3 = 0; t3 < n2.length; t3++) { + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + const s2 = n2[t3][2]; + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); + if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + i2[s2] = 1; } - return false; + return true; } - /** - * Delay a calculated time between retries. - * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; + function x(t2, e2) { + if (";" === t2[++e2]) return -1; + if ("#" === t2[e2]) return (function(t3, e3) { + let n3 = /\d/; + for ("x" === t3[e3] && (e3++, n3 = /[\da-fA-F]/); e3 < t3.length; e3++) { + if (";" === t3[e3]) return e3; + if (!t3[e3].match(n3)) break; } - } else { - delayTimeInMs = Math.random() * 1e3; + return -1; + })(t2, ++e2); + let n2 = 0; + for (; e2 < t2.length; e2++, n2++) if (!(t2[e2].match(/\w/) && n2 < 20)) { + if (";" === t2[e2]) break; + return -1; } - log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + return e2; } - }; - exports2.StorageRetryPolicy = StorageRetryPolicy; - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js -var require_StorageRetryPolicyFactory2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; - var StorageRetryPolicy_js_1 = require_StorageRetryPolicy2(); - Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { - return StorageRetryPolicy_js_1.StorageRetryPolicy; - } }); - var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); - Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { - return StorageRetryPolicyType_js_1.StorageRetryPolicyType; - } }); - var StorageRetryPolicyFactory = class { - retryOptions; - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; + function m(t2, e2, n2) { + return { err: { code: t2, msg: e2, line: n2.line || n2, col: n2.col } }; } - /** - * Creates a StorageRetryPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); + function E(t2) { + return r(t2); } - }; - exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js -var require_StorageBrowserPolicyV2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageBrowserPolicyName = void 0; - exports2.storageBrowserPolicy = storageBrowserPolicy; - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants16(); - var utils_common_js_1 = require_utils_common2(); - exports2.storageBrowserPolicyName = "storageBrowserPolicy"; - function storageBrowserPolicy() { - return { - name: exports2.storageBrowserPolicyName, - async sendRequest(request2, next) { - if (core_util_1.isNodeLike) { - return next(request2); - } - if (request2.method === "GET" || request2.method === "HEAD") { - request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); - } - request2.headers.delete(constants_js_1.HeaderConstants.COOKIE); - request2.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); - return next(request2); - } - }; - } - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js -var require_StorageCorrectContentLengthPolicy = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageCorrectContentLengthPolicyName = void 0; - exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; - var constants_js_1 = require_constants16(); - exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; - function storageCorrectContentLengthPolicy() { - function correctContentLength(request2) { - if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { - request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); - } + function b(t2, e2) { + const n2 = t2.substring(0, e2).split(/\r?\n/); + return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - return { - name: exports2.storageCorrectContentLengthPolicyName, - async sendRequest(request2, next) { - correctContentLength(request2); - return next(request2); - } + function N(t2) { + return t2.startIndex + t2[1].length; + } + const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + return e2; + }, attributeValueProcessor: function(t2, e2) { + return e2; + }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { + return t2; + }, captureMetaData: false }; + function T(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + } + const w = function(t2) { + const e2 = Object.assign({}, y, t2); + return e2.processEntities = T(e2.processEntities), e2; }; - } - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js -var require_StorageRetryPolicyV2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageRetryPolicyName = void 0; - exports2.storageRetryPolicy = storageRetryPolicy; - var abort_controller_1 = require_commonjs12(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory2(); - var constants_js_1 = require_constants16(); - var utils_common_js_1 = require_utils_common2(); - var log_js_1 = require_log6(); - exports2.storageRetryPolicyName = "storageRetryPolicy"; - var DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - ]; - var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); - function storageRetryPolicy(options = {}) { - const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; - const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; - const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; - const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; - const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; - const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; - function shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }) { - if (attempt >= maxTries) { - log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); - return false; + let v; + v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + class I { + constructor(t2) { + this.tagname = t2, this.child = [], this[":@"] = {}; } - if (error3) { - for (const retriableError of retriableErrors) { - if (error3.name.toUpperCase().includes(retriableError) || error3.message.toUpperCase().includes(retriableError) || error3.code && error3.code.toString().toUpperCase() === retriableError) { - log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } - if (error3?.code === "PARSE_ERROR" && error3?.message.startsWith(`Error "Error: Unclosed root tag`)) { - log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; - } + add(t2, e2) { + "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } - if (response || error3) { - const statusCode = response?.status ?? error3?.statusCode ?? 0; - if (!isPrimaryRetry && statusCode === 404) { - log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - if (statusCode === 503 || statusCode === 500) { - log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; - } + addChild(t2, e2) { + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); } - if (response) { - if (response?.status >= 400) { - const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); - if (copySourceError !== void 0) { - switch (copySourceError) { - case "InternalError": - case "OperationTimedOut": - case "ServerBusy": - return true; - } - } - } + static getMetaDataSymbol() { + return v; } - return false; } - function calculateDelay(isPrimaryRetry, attempt) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (retryPolicyType) { - case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); - break; - case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: - delayTimeInMs = retryDelayInMs; - break; - } - } else { - delayTimeInMs = Math.random() * 1e3; + class O { + constructor(t2) { + this.suppressValidationErr = !t2, this.options = t2; } - log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delayTimeInMs; - } - return { - name: exports2.storageRetryPolicyName, - async sendRequest(request2, next) { - if (tryTimeoutInMs) { - request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); - } - const primaryUrl = request2.url; - const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request2.url, secondaryHost) : void 0; - let secondaryHas404 = false; - let attempt = 1; - let retryAgain = true; - let response; - let error3; - while (retryAgain) { - const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request2.method) || attempt % 2 === 1; - request2.url = isPrimaryRetry ? primaryUrl : secondaryUrl; - response = void 0; - error3 = void 0; - try { - log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await next(request2); - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (e) { - if ((0, core_rest_pipeline_1.isRestError)(e)) { - log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); - error3 = e; + readDocType(t2, e2) { + const n2 = {}; + if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); + { + e2 += 9; + let i2 = 1, s2 = false, r2 = false, o2 = ""; + for (; e2 < t2.length; e2++) if ("<" !== t2[e2] || r2) if (">" === t2[e2]) { + if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; + } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; + else { + if (s2 && A(t2, "!ENTITY", e2)) { + let i3, s3; + if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { + const t3 = i3.replace(/[.\-+*:]/g, "\\."); + n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; + } + } else if (s2 && A(t2, "!ELEMENT", e2)) { + e2 += 8; + const { index: n3 } = this.readElementExp(t2, e2 + 1); + e2 = n3; + } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && A(t2, "!NOTATION", e2)) { + e2 += 9; + const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); + e2 = n3; } else { - log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); - throw e; + if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + r2 = true; } + i2++, o2 = ""; } - retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }); - if (retryAgain) { - await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request2.abortSignal, RETRY_ABORT_ERROR); - } - attempt++; + if (0 !== i2) throw new Error("Unclosed DOCTYPE"); } - if (response) { - return response; + return { entities: n2, i: e2 }; + } + readEntityExp(t2, e2) { + e2 = P(t2, e2); + let n2 = ""; + for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; + if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); + if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } - throw error3 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); + let i2 = ""; + if ([e2, i2] = this.readIdentifierVal(t2, e2, "entity"), false !== this.options.enabled && this.options.maxEntitySize && i2.length > this.options.maxEntitySize) throw new Error(`Entity "${n2}" size (${i2.length}) exceeds maximum allowed size (${this.options.maxEntitySize})`); + return [n2, i2, --e2]; } - }; - } - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js -var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageSharedKeyCredentialPolicyName = void 0; - exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; - var node_crypto_1 = require("node:crypto"); - var constants_js_1 = require_constants16(); - var utils_common_js_1 = require_utils_common2(); - var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); - exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; - function storageSharedKeyCredentialPolicy(options) { - function signRequest(request2) { - request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); - if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { - request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); + readNotationExp(t2, e2) { + e2 = P(t2, e2); + let n2 = ""; + for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; + !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + const i2 = t2.substring(e2, e2 + 6).toUpperCase(); + if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); + e2 += i2.length, e2 = P(t2, e2); + let s2 = null, r2 = null; + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); + return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } - const stringToSign = [ - request2.method.toUpperCase(), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) - ].join("\n") + "\n" + getCanonicalizedHeadersString(request2) + getCanonicalizedResourceString(request2); - const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); - request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); - } - function getHeaderValueToSign(request2, headerName) { - const value = request2.headers.get(headerName); - if (!value) { - return ""; + readIdentifierVal(t2, e2, n2) { + let i2 = ""; + const s2 = t2[e2]; + if ('"' !== s2 && "'" !== s2) throw new Error(`Expected quoted string, found "${s2}"`); + for (e2++; e2 < t2.length && t2[e2] !== s2; ) i2 += t2[e2], e2++; + if (t2[e2] !== s2) throw new Error(`Unterminated ${n2} value`); + return [++e2, i2]; } - if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; + readElementExp(t2, e2) { + e2 = P(t2, e2); + let n2 = ""; + for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; + if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); + let i2 = ""; + if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + else if ("(" === t2[e2]) { + for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; + if (")" !== t2[e2]) throw new Error("Unterminated content model"); + } else if (!this.suppressValidationErr) throw new Error(`Invalid Element Expression, found "${t2[e2]}"`); + return { elementName: n2, contentModel: i2.trim(), index: e2 }; } - return value; - } - function getCanonicalizedHeadersString(request2) { - let headersArray = []; - for (const [name, value] of request2.headers) { - if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { - headersArray.push({ name, value }); + readAttlistExp(t2, e2) { + e2 = P(t2, e2); + let n2 = ""; + for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; + S(n2), e2 = P(t2, e2); + let i2 = ""; + for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; + if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = P(t2, e2); + let s2 = ""; + if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { + if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + e2++; + let n3 = []; + for (; e2 < t2.length && ")" !== t2[e2]; ) { + let i3 = ""; + for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; + if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + } + if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); + e2++, s2 += " (" + n3.join("|") + ")"; + } else { + for (; e2 < t2.length && !/\s/.test(t2[e2]); ) s2 += t2[e2], e2++; + const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; + if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } + e2 = P(t2, e2); + let r2 = ""; + return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } - headersArray.sort((a, b) => { - return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); - }); - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; + } + const P = (t2, e2) => { + for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; + return e2; + }; + function A(t2, e2, n2) { + for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; + return true; + } + function S(t2) { + if (r(t2)) return t2; + throw new Error(`Invalid entity name ${t2}`); + } + const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + function L(t2) { + return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { + for (const n2 of t2) { + if ("string" == typeof n2 && e2 === n2) return true; + if (n2 instanceof RegExp && n2.test(e2)) return true; } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} -`; - }); - return canonicalizedHeadersStringToSign; + } : () => false; } - function getCanonicalizedResourceString(request2) { - const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path13}`; - const queries = (0, utils_common_js_1.getURLQueries)(request2.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); + class F { + constructor(t2) { + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); + for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { + const e2 = this.options.stopNodes[t3]; + "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); } } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += ` -${key}:${decodeURIComponent(lowercaseQueries[key])}`; - } } - return canonicalizedResourceString; } - return { - name: exports2.storageSharedKeyCredentialPolicyName, - async sendRequest(request2, next) { - signRequest(request2); - return next(request2); - } - }; - } - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRequestFailureDetailsParserPolicy.js -var require_StorageRequestFailureDetailsParserPolicy = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRequestFailureDetailsParserPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageRequestFailureDetailsParserPolicyName = void 0; - exports2.storageRequestFailureDetailsParserPolicy = storageRequestFailureDetailsParserPolicy; - exports2.storageRequestFailureDetailsParserPolicyName = "storageRequestFailureDetailsParserPolicy"; - function storageRequestFailureDetailsParserPolicy() { - return { - name: exports2.storageRequestFailureDetailsParserPolicyName, - async sendRequest(request2, next) { - try { - const response = await next(request2); - return response; - } catch (err) { - if (typeof err === "object" && err !== null && err.response && err.response.parsedBody) { - if (err.response.parsedBody.code === "InvalidHeaderValue" && err.response.parsedBody.HeaderName === "x-ms-version") { - err.message = "The provided service version is not enabled on this storage account. Please see https://learn.microsoft.com/rest/api/storageservices/versioning-for-the-azure-storage-services for additional information.\n"; - } - } - throw err; - } + function j(t2) { + const e2 = Object.keys(t2); + for (let n2 = 0; n2 < e2.length; n2++) { + const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); + this.lastEntities[i2] = { regex: new RegExp("&" + s2 + ";", "g"), val: t2[i2] }; } - }; - } - } -}); - -// node_modules/@azure/storage-common/dist/commonjs/index.js -var require_commonjs13 = __commonJS({ - "node_modules/@azure/storage-common/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BaseRequestPolicy = exports2.getCachedDefaultHttpClient = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - tslib_1.__exportStar(require_BufferScheduler(), exports2); - var cache_js_1 = require_cache3(); - Object.defineProperty(exports2, "getCachedDefaultHttpClient", { enumerable: true, get: function() { - return cache_js_1.getCachedDefaultHttpClient; - } }); - tslib_1.__exportStar(require_StorageBrowserPolicyFactory(), exports2); - tslib_1.__exportStar(require_AnonymousCredential2(), exports2); - tslib_1.__exportStar(require_Credential2(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredential2(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); - var RequestPolicy_js_1 = require_RequestPolicy2(); - Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { - return RequestPolicy_js_1.BaseRequestPolicy; - } }); - tslib_1.__exportStar(require_AnonymousCredentialPolicy2(), exports2); - tslib_1.__exportStar(require_CredentialPolicy2(), exports2); - tslib_1.__exportStar(require_StorageBrowserPolicy(), exports2); - tslib_1.__exportStar(require_StorageBrowserPolicyV2(), exports2); - tslib_1.__exportStar(require_StorageCorrectContentLengthPolicy(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicyType2(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicy2(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicyV2(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy2(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicyV2(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); - tslib_1.__exportStar(require_StorageRequestFailureDetailsParserPolicy(), exports2); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js -var require_StorageBrowserPolicyV22 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageBrowserPolicyName = void 0; - exports2.storageBrowserPolicy = storageBrowserPolicy; - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - exports2.storageBrowserPolicyName = "storageBrowserPolicy"; - function storageBrowserPolicy() { - return { - name: exports2.storageBrowserPolicyName, - async sendRequest(request2, next) { - if (core_util_1.isNodeLike) { - return next(request2); - } - if (request2.method === "GET" || request2.method === "HEAD") { - request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); - } - request2.headers.delete(constants_js_1.HeaderConstants.COOKIE); - request2.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); - return next(request2); + } + function M(t2, e2, n2, i2, s2, r2, o2) { + if (void 0 !== t2 && (this.options.trimValues && !i2 && (t2 = t2.trim()), t2.length > 0)) { + o2 || (t2 = this.replaceEntitiesValue(t2, e2, n2)); + const i3 = this.options.tagValueProcessor(e2, t2, n2, s2, r2); + return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } - }; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js -var require_StorageRetryPolicyV22 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageRetryPolicyName = void 0; - exports2.storageRetryPolicy = storageRetryPolicy; - var abort_controller_1 = require_commonjs11(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - var log_js_1 = require_log5(); - exports2.storageRetryPolicyName = "storageRetryPolicy"; - var DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - ]; - var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); - function storageRetryPolicy(options = {}) { - const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; - const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; - const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; - const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; - const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; - const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; - function shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }) { - if (attempt >= maxTries) { - log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); - return false; + } + function _(t2) { + if (this.options.removeNSPrefix) { + const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; + if ("xmlns" === e2[0]) return ""; + 2 === e2.length && (t2 = n2 + e2[1]); } - if (error3) { - for (const retriableError of retriableErrors) { - if (error3.name.toUpperCase().includes(retriableError) || error3.message.toUpperCase().includes(retriableError) || error3.code && error3.code.toString().toUpperCase() === retriableError) { - log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } + return t2; + } + const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + function U(t2, e2, n2) { + if (true !== this.options.ignoreAttributes && "string" == typeof t2) { + const i2 = s(t2, k), r2 = i2.length, o2 = {}; + for (let t3 = 0; t3 < r2; t3++) { + const s2 = this.resolveNameSpace(i2[t3][1]); + if (this.ignoreAttributesFn(s2, e2)) continue; + let r3 = i2[t3][4], a2 = this.options.attributeNamePrefix + s2; + if (s2.length) if (this.options.transformAttributeName && (a2 = this.options.transformAttributeName(a2)), "__proto__" === a2 && (a2 = "#__proto__"), void 0 !== r3) { + this.options.trimValues && (r3 = r3.trim()), r3 = this.replaceEntitiesValue(r3, n2, e2); + const t4 = this.options.attributeValueProcessor(s2, r3, e2); + o2[a2] = null == t4 ? r3 : typeof t4 != typeof r3 || t4 !== r3 ? t4 : Z(r3, this.options.parseAttributeValue, this.options.numberParseOptions); + } else this.options.allowBooleanAttributes && (o2[a2] = true); } - if (error3?.code === "PARSE_ERROR" && error3?.message.startsWith(`Error "Error: Unclosed root tag`)) { - log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; + if (!Object.keys(o2).length) return; + if (this.options.attributesGroupName) { + const t3 = {}; + return t3[this.options.attributesGroupName] = o2, t3; } + return o2; } - if (response || error3) { - const statusCode = response?.status ?? error3?.statusCode ?? 0; - if (!isPrimaryRetry && statusCode === 404) { - log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - if (statusCode === 503 || statusCode === 500) { - log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; + } + const B = function(t2) { + t2 = t2.replace(/\r\n?/g, "\n"); + const e2 = new I("!xml"); + let n2 = e2, i2 = "", s2 = ""; + this.entityExpansionCount = 0, this.currentExpandedLength = 0; + const r2 = new O(this.options.processEntities); + for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { + const e3 = z(t2, ">", o2, "Closing Tag is not closed."); + let r3 = t2.substring(o2 + 2, e3).trim(); + if (this.options.removeNSPrefix) { + const t3 = r3.indexOf(":"); + -1 !== t3 && (r3 = r3.substr(t3 + 1)); } - } - if (response) { - if (response?.status >= 400) { - const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); - if (copySourceError !== void 0) { - switch (copySourceError) { - case "InternalError": - case "OperationTimedOut": - case "ServerBusy": - return true; - } - } + this.options.transformTagName && (r3 = this.options.transformTagName(r3)), n2 && (i2 = this.saveTextToParentTag(i2, n2, s2)); + const a2 = s2.substring(s2.lastIndexOf(".") + 1); + if (r3 && -1 !== this.options.unpairedTags.indexOf(r3)) throw new Error(`Unpaired tag can not be used as closing tag: `); + let l2 = 0; + a2 && -1 !== this.options.unpairedTags.indexOf(a2) ? (l2 = s2.lastIndexOf(".", s2.lastIndexOf(".") - 1), this.tagsNodeStack.pop()) : l2 = s2.lastIndexOf("."), s2 = s2.substring(0, l2), n2 = this.tagsNodeStack.pop(), i2 = "", o2 = e3; + } else if ("?" === t2[o2 + 1]) { + let e3 = W(t2, o2, false, "?>"); + if (!e3) throw new Error("Pi Tag is not closed."); + if (i2 = this.saveTextToParentTag(i2, n2, s2), this.options.ignoreDeclaration && "?xml" === e3.tagName || this.options.ignorePiTags) ; + else { + const t3 = new I(e3.tagName); + t3.add(this.options.textNodeName, ""), e3.tagName !== e3.tagExp && e3.attrExpPresent && (t3[":@"] = this.buildAttributesMap(e3.tagExp, s2, e3.tagName)), this.addChild(n2, t3, s2, o2); } - } - return false; - } - function calculateDelay(isPrimaryRetry, attempt) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (retryPolicyType) { - case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); - break; - case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: - delayTimeInMs = retryDelayInMs; - break; + o2 = e3.closeIndex + 1; + } else if ("!--" === t2.substr(o2 + 1, 3)) { + const e3 = z(t2, "-->", o2 + 4, "Comment is not closed."); + if (this.options.commentPropName) { + const r3 = t2.substring(o2 + 4, e3 - 2); + i2 = this.saveTextToParentTag(i2, n2, s2), n2.add(this.options.commentPropName, [{ [this.options.textNodeName]: r3 }]); } + o2 = e3; + } else if ("!D" === t2.substr(o2 + 1, 2)) { + const e3 = r2.readDocType(t2, o2); + this.docTypeEntities = e3.entities, o2 = e3.i; + } else if ("![" === t2.substr(o2 + 1, 2)) { + const e3 = z(t2, "]]>", o2, "CDATA is not closed.") - 2, r3 = t2.substring(o2 + 9, e3); + i2 = this.saveTextToParentTag(i2, n2, s2); + let a2 = this.parseTextData(r3, n2.tagname, s2, true, false, true, true); + null == a2 && (a2 = ""), this.options.cdataPropName ? n2.add(this.options.cdataPropName, [{ [this.options.textNodeName]: r3 }]) : n2.add(this.options.textNodeName, a2), o2 = e3 + 2; } else { - delayTimeInMs = Math.random() * 1e3; - } - log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delayTimeInMs; - } - return { - name: exports2.storageRetryPolicyName, - async sendRequest(request2, next) { - if (tryTimeoutInMs) { - request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; + const l2 = r3.rawTagName; + let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + if (this.options.transformTagName) { + const t3 = this.options.transformTagName(a2); + u2 === a2 && (u2 = t3), a2 = t3; } - const primaryUrl = request2.url; - const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request2.url, secondaryHost) : void 0; - let secondaryHas404 = false; - let attempt = 1; - let retryAgain = true; - let response; - let error3; - while (retryAgain) { - const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request2.method) || attempt % 2 === 1; - request2.url = isPrimaryRetry ? primaryUrl : secondaryUrl; - response = void 0; - error3 = void 0; - try { - log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await next(request2); - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (e) { - if ((0, core_rest_pipeline_1.isRestError)(e)) { - log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); - error3 = e; - } else { - log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); - throw e; - } + n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); + const p2 = n2; + p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); + const f2 = o2; + if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { + let e3 = ""; + if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; + else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; + else { + const n3 = this.readStopNodeData(t2, l2, d2 + 1); + if (!n3) throw new Error(`Unexpected end of ${l2}`); + o2 = n3.i, e3 = n3.tagContent; } - retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }); - if (retryAgain) { - await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request2.abortSignal, RETRY_ABORT_ERROR); + const i3 = new I(a2); + a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + } else { + if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { + if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { + const t4 = this.options.transformTagName(a2); + u2 === a2 && (u2 = t4), a2 = t4; + } + const t3 = new I(a2); + a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + } else { + const t3 = new I(a2); + this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; } - attempt++; - } - if (response) { - return response; + i2 = "", o2 = d2; } - throw error3 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); } + else i2 += t2[o2]; + return e2.child; }; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js -var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageSharedKeyCredentialPolicyName = void 0; - exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; - var node_crypto_1 = require("node:crypto"); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - var SharedKeyComparator_js_1 = require_SharedKeyComparator(); - exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; - function storageSharedKeyCredentialPolicy(options) { - function signRequest(request2) { - request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); - if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { - request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); - } - const stringToSign = [ - request2.method.toUpperCase(), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), - getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) - ].join("\n") + "\n" + getCanonicalizedHeadersString(request2) + getCanonicalizedResourceString(request2); - const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); - request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + function R(t2, e2, n2, i2) { + this.options.captureMetaData || (i2 = void 0); + const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); + false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); } - function getHeaderValueToSign(request2, headerName) { - const value = request2.headers.get(headerName); - if (!value) { - return ""; + const Y = function(t2, e2, n2) { + if (-1 === t2.indexOf("&")) return t2; + const i2 = this.options.processEntities; + if (!i2.enabled) return t2; + if (i2.allowedTags && !i2.allowedTags.includes(e2)) return t2; + if (i2.tagFilter && !i2.tagFilter(e2, n2)) return t2; + for (let e3 in this.docTypeEntities) { + const n3 = this.docTypeEntities[e3], s2 = t2.match(n3.regx); + if (s2) { + if (this.entityExpansionCount += s2.length, i2.maxTotalExpansions && this.entityExpansionCount > i2.maxTotalExpansions) throw new Error(`Entity expansion limit exceeded: ${this.entityExpansionCount} > ${i2.maxTotalExpansions}`); + const e4 = t2.length; + if (t2 = t2.replace(n3.regx, n3.val), i2.maxExpandedLength && (this.currentExpandedLength += t2.length - e4, this.currentExpandedLength > i2.maxExpandedLength)) throw new Error(`Total expanded content size exceeded: ${this.currentExpandedLength} > ${i2.maxExpandedLength}`); + } } - if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; + if (-1 === t2.indexOf("&")) return t2; + for (let e3 in this.lastEntities) { + const n3 = this.lastEntities[e3]; + t2 = t2.replace(n3.regex, n3.val); } - return value; - } - function getCanonicalizedHeadersString(request2) { - let headersArray = []; - for (const [name, value] of request2.headers) { - if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { - headersArray.push({ name, value }); - } + if (-1 === t2.indexOf("&")) return t2; + if (this.options.htmlEntities) for (let e3 in this.htmlEntities) { + const n3 = this.htmlEntities[e3]; + t2 = t2.replace(n3.regex, n3.val); } - headersArray.sort((a, b) => { - return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); - }); - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; - } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} -`; - }); - return canonicalizedHeadersStringToSign; + return t2.replace(this.ampEntity.regex, this.ampEntity.val); + }; + function G(t2, e2, n2, i2) { + return t2 && (void 0 === i2 && (i2 = 0 === e2.child.length), void 0 !== (t2 = this.parseTextData(t2, e2.tagname, n2, false, !!e2[":@"] && 0 !== Object.keys(e2[":@"]).length, i2)) && "" !== t2 && e2.add(this.options.textNodeName, t2), t2 = ""), t2; } - function getCanonicalizedResourceString(request2) { - const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path13}`; - const queries = (0, utils_common_js_1.getURLQueries)(request2.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); - } - } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += ` -${key}:${decodeURIComponent(lowercaseQueries[key])}`; + function X(t2, e2, n2, i2) { + return !(!e2 || !e2.has(i2)) || !(!t2 || !t2.has(n2)); + } + function z(t2, e2, n2, i2) { + const s2 = t2.indexOf(e2, n2); + if (-1 === s2) throw new Error(i2); + return s2 + e2.length - 1; + } + function W(t2, e2, n2, i2 = ">") { + const s2 = (function(t3, e3, n3 = ">") { + let i3, s3 = ""; + for (let r3 = e3; r3 < t3.length; r3++) { + let e4 = t3[r3]; + if (i3) e4 === i3 && (i3 = ""); + else if ('"' === e4 || "'" === e4) i3 = e4; + else if (e4 === n3[0]) { + if (!n3[1]) return { data: s3, index: r3 }; + if (t3[r3 + 1] === n3[1]) return { data: s3, index: r3 }; + } else " " === e4 && (e4 = " "); + s3 += e4; } + })(t2, e2 + 1, i2); + if (!s2) return; + let r2 = s2.data; + const o2 = s2.index, a2 = r2.search(/\s/); + let l2 = r2, u2 = true; + -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); + const h2 = l2; + if (n2) { + const t3 = l2.indexOf(":"); + -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return canonicalizedResourceString; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; } - return { - name: exports2.storageSharedKeyCredentialPolicyName, - async sendRequest(request2, next) { - signRequest(request2); - return next(request2); + function q(t2, e2, n2) { + const i2 = n2; + let s2 = 1; + for (; n2 < t2.length; n2++) if ("<" === t2[n2]) if ("/" === t2[n2 + 1]) { + const r2 = z(t2, ">", n2, `${e2} is not closed`); + if (t2.substring(n2 + 2, r2).trim() === e2 && (s2--, 0 === s2)) return { tagContent: t2.substring(i2, n2), i: r2 }; + n2 = r2; + } else if ("?" === t2[n2 + 1]) n2 = z(t2, "?>", n2 + 1, "StopNode is not closed."); + else if ("!--" === t2.substr(n2 + 1, 3)) n2 = z(t2, "-->", n2 + 3, "StopNode is not closed."); + else if ("![" === t2.substr(n2 + 1, 2)) n2 = z(t2, "]]>", n2, "StopNode is not closed.") - 2; + else { + const i3 = W(t2, n2, ">"); + i3 && ((i3 && i3.tagName) === e2 && "/" !== i3.tagExp[i3.tagExp.length - 1] && s2++, n2 = i3.closeIndex); } - }; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js -var require_StorageBrowserPolicy2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageBrowserPolicy = void 0; - var RequestPolicy_js_1 = require_RequestPolicy(); - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { - /** - * Creates an instance of StorageBrowserPolicy. - * @param nextPolicy - - * @param options - - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); } - /** - * Sends out request. - * - * @param request - - */ - async sendRequest(request2) { - if (core_util_1.isNodeLike) { - return this._nextPolicy.sendRequest(request2); + function Z(t2, e2, n2) { + if (e2 && "string" == typeof t2) { + const e3 = t2.trim(); + return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { + if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + let n3 = t3.trim(); + if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; + if ("0" === t3) return 0; + if (e4.hex && C.test(n3)) return (function(t4) { + if (parseInt) return parseInt(t4, 16); + if (Number.parseInt) return Number.parseInt(t4, 16); + if (window && window.parseInt) return window.parseInt(t4, 16); + throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); + })(n3); + if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (!n4.eNotation) return t4; + const i3 = e5.match(D); + if (i3) { + let s2 = i3[1] || ""; + const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; + return o2.length > 1 && a2 ? t4 : 1 !== o2.length || !i3[3].startsWith(`.${r2}`) && i3[3][0] !== r2 ? n4.leadingZeros && !a2 ? (e5 = (i3[1] || "") + i3[3], Number(e5)) : t4 : Number(e5); + } + return t4; + })(t3, n3, e4); + { + const s2 = $.exec(n3); + if (s2) { + const r2 = s2[1] || "", o2 = s2[2]; + let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; + const l2 = r2 ? "." === t3[o2.length + 1] : "." === t3[o2.length]; + if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; + { + const i3 = Number(n3), s3 = String(i3); + if (0 === i3 || -0 === i3) return i3; + if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; + if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; + let l3 = o2 ? a2 : n3; + return o2 ? l3 === s3 || r2 + l3 === s3 ? i3 : t3 : l3 === s3 || l3 === r2 + s3 ? i3 : t3; + } + } + return t3; + } + var i2; + })(t2, n2); } - if (request2.method.toUpperCase() === "GET" || request2.method.toUpperCase() === "HEAD") { - request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + return void 0 !== t2 ? t2 : ""; + } + function K(t2, e2, n2) { + const i2 = Number.parseInt(t2, e2); + return i2 >= 0 && i2 <= 1114111 ? String.fromCodePoint(i2) : n2 + t2 + ";"; + } + const Q = I.getMetaDataSymbol(); + function J(t2, e2) { + return H(t2, e2); + } + function H(t2, e2, n2) { + let i2; + const s2 = {}; + for (let r2 = 0; r2 < t2.length; r2++) { + const o2 = t2[r2], a2 = tt(o2); + let l2 = ""; + if (l2 = void 0 === n2 ? a2 : n2 + "." + a2, a2 === e2.textNodeName) void 0 === i2 ? i2 = o2[a2] : i2 += "" + o2[a2]; + else { + if (void 0 === a2) continue; + if (o2[a2]) { + let t3 = H(o2[a2], e2, l2); + const n3 = nt(t3, e2); + void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + } + } } - request2.headers.remove(constants_js_1.HeaderConstants.COOKIE); - request2.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); - return this._nextPolicy.sendRequest(request2); + return "string" == typeof i2 ? i2.length > 0 && (s2[e2.textNodeName] = i2) : void 0 !== i2 && (s2[e2.textNodeName] = i2), s2; } - }; - exports2.StorageBrowserPolicy = StorageBrowserPolicy; + function tt(t2) { + const e2 = Object.keys(t2); + for (let t3 = 0; t3 < e2.length; t3++) { + const n2 = e2[t3]; + if (":@" !== n2) return n2; + } + } + function et(t2, e2, n2, i2) { + if (e2) { + const s2 = Object.keys(e2), r2 = s2.length; + for (let o2 = 0; o2 < r2; o2++) { + const r3 = s2[o2]; + i2.isArray(r3, n2 + "." + r3, true, true) ? t2[r3] = [e2[r3]] : t2[r3] = e2[r3]; + } + } + } + function nt(t2, e2) { + const { textNodeName: n2 } = e2, i2 = Object.keys(t2).length; + return 0 === i2 || !(1 !== i2 || !t2[n2] && "boolean" != typeof t2[n2] && 0 !== t2[n2]); + } + class it { + constructor(t2) { + this.externalEntities = {}, this.options = w(t2); + } + parse(t2, e2) { + if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); + else if ("string" != typeof t2) throw new Error("XML data is accepted in String or Bytes[] form."); + if (e2) { + true === e2 && (e2 = {}); + const n3 = a(t2, e2); + if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); + } + const n2 = new F(this.options); + n2.addExternalEntities(this.externalEntities); + const i2 = n2.parseXml(t2); + return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); + } + addEntity(t2, e2) { + if (-1 !== e2.indexOf("&")) throw new Error("Entity value can't have '&'"); + if (-1 !== t2.indexOf("&") || -1 !== t2.indexOf(";")) throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '"); + if ("&" === e2) throw new Error("An entity with value '&' is not permitted"); + this.externalEntities[t2] = e2; + } + static getMetaDataSymbol() { + return I.getMetaDataSymbol(); + } + } + function st(t2, e2) { + let n2 = ""; + return e2.format && e2.indentBy.length > 0 && (n2 = "\n"), rt(t2, e2, "", n2); + } + function rt(t2, e2, n2, i2) { + let s2 = "", r2 = false; + for (let o2 = 0; o2 < t2.length; o2++) { + const a2 = t2[o2], l2 = ot(a2); + if (void 0 === l2) continue; + let u2 = ""; + if (u2 = 0 === n2.length ? l2 : `${n2}.${l2}`, l2 === e2.textNodeName) { + let t3 = a2[l2]; + lt(u2, e2) || (t3 = e2.tagValueProcessor(l2, t3), t3 = ut(t3, e2)), r2 && (s2 += i2), s2 += t3, r2 = false; + continue; + } + if (l2 === e2.cdataPropName) { + r2 && (s2 += i2), s2 += ``, r2 = false; + continue; + } + if (l2 === e2.commentPropName) { + s2 += i2 + ``, r2 = true; + continue; + } + if ("?" === l2[0]) { + const t3 = at(a2[":@"], e2), n3 = "?xml" === l2 ? "" : i2; + let o3 = a2[l2][0][e2.textNodeName]; + o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; + continue; + } + let h2 = i2; + "" !== h2 && (h2 += e2.indentBy); + const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + } + return s2; + } + function ot(t2) { + const e2 = Object.keys(t2); + for (let n2 = 0; n2 < e2.length; n2++) { + const i2 = e2[n2]; + if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + } + } + function at(t2, e2) { + let n2 = ""; + if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { + if (!t2.hasOwnProperty(i2)) continue; + let s2 = e2.attributeValueProcessor(i2, t2[i2]); + s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; + } + return n2; + } + function lt(t2, e2) { + let n2 = (t2 = t2.substr(0, t2.length - e2.textNodeName.length - 1)).substr(t2.lastIndexOf(".") + 1); + for (let i2 in e2.stopNodes) if (e2.stopNodes[i2] === t2 || e2.stopNodes[i2] === "*." + n2) return true; + return false; + } + function ut(t2, e2) { + if (t2 && t2.length > 0 && e2.processEntities) for (let n2 = 0; n2 < e2.entities.length; n2++) { + const i2 = e2.entities[n2]; + t2 = t2.replace(i2.regex, i2.val); + } + return t2; + } + const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + return e2; + }, attributeValueProcessor: function(t2, e2) { + return e2; + }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; + function dt(t2) { + this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + return false; + } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + return ""; + }, this.tagEndChar = ">", this.newLine = ""); + } + function pt(t2, e2, n2, i2) { + const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); + return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); + } + function ft(t2) { + return this.options.indentBy.repeat(t2); + } + function ct(t2) { + return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); + } + dt.prototype.build = function(t2) { + return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); + }, dt.prototype.j2x = function(t2, e2, n2) { + let i2 = "", s2 = ""; + const r2 = n2.join("."); + for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); + else if (null === t2[o2]) this.isAttribute(o2) || o2 === this.options.cdataPropName ? s2 += "" : "?" === o2[0] ? s2 += this.indentate(e2) + "<" + o2 + "?" + this.tagEndChar : s2 += this.indentate(e2) + "<" + o2 + "/" + this.tagEndChar; + else if (t2[o2] instanceof Date) s2 += this.buildTextValNode(t2[o2], o2, "", e2); + else if ("object" != typeof t2[o2]) { + const n3 = this.isAttribute(o2); + if (n3 && !this.ignoreAttributesFn(n3, r2)) i2 += this.buildAttrPairStr(n3, "" + t2[o2]); + else if (!n3) if (o2 === this.options.textNodeName) { + let e3 = this.options.tagValueProcessor(o2, "" + t2[o2]); + s2 += this.replaceEntitiesValue(e3); + } else s2 += this.buildTextValNode(t2[o2], o2, "", e2); + } else if (Array.isArray(t2[o2])) { + const i3 = t2[o2].length; + let r3 = "", a2 = ""; + for (let l2 = 0; l2 < i3; l2++) { + const i4 = t2[o2][l2]; + if (void 0 === i4) ; + else if (null === i4) "?" === o2[0] ? s2 += this.indentate(e2) + "<" + o2 + "?" + this.tagEndChar : s2 += this.indentate(e2) + "<" + o2 + "/" + this.tagEndChar; + else if ("object" == typeof i4) if (this.options.oneListGroup) { + const t3 = this.j2x(i4, e2 + 1, n2.concat(o2)); + r3 += t3.val, this.options.attributesGroupName && i4.hasOwnProperty(this.options.attributesGroupName) && (a2 += t3.attrStr); + } else r3 += this.processTextOrObjNode(i4, o2, e2, n2); + else if (this.options.oneListGroup) { + let t3 = this.options.tagValueProcessor(o2, i4); + t3 = this.replaceEntitiesValue(t3), r3 += t3; + } else r3 += this.buildTextValNode(i4, o2, "", e2); + } + this.options.oneListGroup && (r3 = this.buildObjectNode(r3, o2, a2, e2)), s2 += r3; + } else if (this.options.attributesGroupName && o2 === this.options.attributesGroupName) { + const e3 = Object.keys(t2[o2]), n3 = e3.length; + for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); + } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); + return { attrStr: i2, val: s2 }; + }, dt.prototype.buildAttrPairStr = function(t2, e2) { + return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; + }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; + { + let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; + } + }, dt.prototype.closeTag = function(t2) { + let e2 = ""; + return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; + if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; + if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; + { + let s2 = this.options.tagValueProcessor(e2, t2); + return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { + const n2 = this.options.entities[e2]; + t2 = t2.replace(n2.regex, n2.val); + } + return t2; + }; + const gt = { validate: a }; + module2.exports = e; + })(); } }); -// node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js -var require_StorageBrowserPolicyFactory2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { +// node_modules/@azure/core-xml/dist/commonjs/xml.common.js +var require_xml_common = __commonJS({ + "node_modules/@azure/core-xml/dist/commonjs/xml.common.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; - var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy2(); - Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { - return StorageBrowserPolicy_js_1.StorageBrowserPolicy; - } }); - var StorageBrowserPolicyFactory = class { - /** - * Creates a StorageBrowserPolicyFactory object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); - } - }; - exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + exports2.XML_CHARKEY = exports2.XML_ATTRKEY = void 0; + exports2.XML_ATTRKEY = "$"; + exports2.XML_CHARKEY = "_"; } }); -// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js -var require_StorageCorrectContentLengthPolicy2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { +// node_modules/@azure/core-xml/dist/commonjs/xml.js +var require_xml = __commonJS({ + "node_modules/@azure/core-xml/dist/commonjs/xml.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.storageCorrectContentLengthPolicyName = void 0; - exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; - var constants_js_1 = require_constants15(); - exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; - function storageCorrectContentLengthPolicy() { - function correctContentLength(request2) { - if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { - request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); - } - } + exports2.stringifyXML = stringifyXML; + exports2.parseXML = parseXML; + var fast_xml_parser_1 = require_fxp(); + var xml_common_js_1 = require_xml_common(); + function getCommonOptions(options) { + var _a; return { - name: exports2.storageCorrectContentLengthPolicyName, - async sendRequest(request2, next) { - correctContentLength(request2); - return next(request2); - } + attributesGroupName: xml_common_js_1.XML_ATTRKEY, + textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY, + ignoreAttributes: false, + suppressBooleanAttributes: false }; } + function getSerializerOptions(options = {}) { + var _a, _b; + return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); + } + function getParserOptions(options = {}) { + return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true, trimValues: false }); + } + function stringifyXML(obj, opts = {}) { + const parserOptions = getSerializerOptions(opts); + const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); + const node = { [parserOptions.rootNodeName]: obj }; + const xmlData = j2x.build(node); + return `${xmlData}`.replace(/\n/g, ""); + } + async function parseXML(str2, opts = {}) { + if (!str2) { + throw new Error("Document is empty"); + } + const validation = fast_xml_parser_1.XMLValidator.validate(str2); + if (validation !== true) { + throw validation; + } + const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); + const parsedXml = parser.parse(str2); + if (parsedXml["?xml"]) { + delete parsedXml["?xml"]; + } + if (!opts.includeRoot) { + for (const key of Object.keys(parsedXml)) { + const value = parsedXml[key]; + return typeof value === "object" ? Object.assign({}, value) : value; + } + } + return parsedXml; + } } }); -// node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js -var require_Pipeline = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js"(exports2) { +// node_modules/@azure/core-xml/dist/commonjs/index.js +var require_commonjs10 = __commonJS({ + "node_modules/@azure/core-xml/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Pipeline = exports2.StorageOAuthScopes = void 0; - exports2.isPipelineLike = isPipelineLike; - exports2.newPipeline = newPipeline; - exports2.getCoreClientOptions = getCoreClientOptions; - exports2.getCredentialFromPipeline = getCredentialFromPipeline; - var core_http_compat_1 = require_commonjs9(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_client_1 = require_commonjs8(); - var core_xml_1 = require_commonjs10(); - var core_auth_1 = require_commonjs7(); - var log_js_1 = require_log5(); - var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var constants_js_1 = require_constants15(); - Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { - return constants_js_1.StorageOAuthScopes; + exports2.XML_CHARKEY = exports2.XML_ATTRKEY = exports2.parseXML = exports2.stringifyXML = void 0; + var xml_js_1 = require_xml(); + Object.defineProperty(exports2, "stringifyXML", { enumerable: true, get: function() { + return xml_js_1.stringifyXML; } }); - var storage_common_1 = require_commonjs13(); - var StorageBrowserPolicyV2_js_1 = require_StorageBrowserPolicyV22(); - var StorageRetryPolicyV2_js_1 = require_StorageRetryPolicyV22(); - var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); - var StorageBrowserPolicyFactory_js_1 = require_StorageBrowserPolicyFactory2(); - var StorageCorrectContentLengthPolicy_js_1 = require_StorageCorrectContentLengthPolicy2(); - function isPipelineLike(pipeline) { - if (!pipeline || typeof pipeline !== "object") { - return false; + Object.defineProperty(exports2, "parseXML", { enumerable: true, get: function() { + return xml_js_1.parseXML; + } }); + var xml_common_js_1 = require_xml_common(); + Object.defineProperty(exports2, "XML_ATTRKEY", { enumerable: true, get: function() { + return xml_common_js_1.XML_ATTRKEY; + } }); + Object.defineProperty(exports2, "XML_CHARKEY", { enumerable: true, get: function() { + return xml_common_js_1.XML_CHARKEY; + } }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/log.js +var require_log5 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/log.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("storage-blob"); + } +}); + +// node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js +var require_AbortError3 = __commonJS({ + "node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AbortError = void 0; + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } - const castPipeline = pipeline; - return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; - } - var Pipeline = class { + }; + exports2.AbortError = AbortError; + } +}); + +// node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/index.js +var require_commonjs11 = __commonJS({ + "node_modules/@azure/storage-blob/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AbortError = void 0; + var AbortError_js_1 = require_AbortError3(); + Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { + return AbortError_js_1.AbortError; + } }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js +var require_RequestPolicy = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = void 0; + var BaseRequestPolicy = class { + _nextPolicy; + _options; /** - * A list of chained request policy factories. + * The main method to implement that manipulates a request/response. */ - factories; + constructor(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } /** - * Configures pipeline logger and HTTP client. + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. */ - options; + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } /** - * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. - * - * @param factories - - * @param options - + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. */ - constructor(factories, options = {}) { - this.factories = factories; - this.options = options; + log(logLevel, message) { + this._options.log(logLevel, message); } - /** - * Transfer Pipeline object to ServiceClientOptions object which is required by - * ServiceClient constructor. - * - * @returns The ServiceClientOptions object from this Pipeline. - */ - toServiceClientOptions() { - return { - httpClient: this.options.httpClient, - requestPolicyFactories: this.factories - }; + }; + exports2.BaseRequestPolicy = BaseRequestPolicy; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js +var require_constants15 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PathStylePorts = exports2.BlobDoesNotUseCustomerSpecifiedEncryption = exports2.BlobUsesCustomerSpecifiedEncryptionMsg = exports2.StorageBlobLoggingAllowedQueryParameters = exports2.StorageBlobLoggingAllowedHeaderNames = exports2.DevelopmentConnectionString = exports2.EncryptionAlgorithmAES25 = exports2.HTTP_VERSION_1_1 = exports2.HTTP_LINE_ENDING = exports2.BATCH_MAX_PAYLOAD_IN_BYTES = exports2.BATCH_MAX_REQUEST = exports2.SIZE_1_MB = exports2.ETagAny = exports2.ETagNone = exports2.HeaderConstants = exports2.HTTPURLConnection = exports2.URLConstants = exports2.StorageOAuthScopes = exports2.REQUEST_TIMEOUT = exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = exports2.BLOCK_BLOB_MAX_BLOCKS = exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = exports2.SERVICE_VERSION = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "12.29.1"; + exports2.SERVICE_VERSION = "2025-11-05"; + exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; + exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; + exports2.BLOCK_BLOB_MAX_BLOCKS = 5e4; + exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; + exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; + exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; + exports2.REQUEST_TIMEOUT = 100 * 1e3; + exports2.StorageOAuthScopes = "https://storage.azure.com/.default"; + exports2.URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" } }; - exports2.Pipeline = Pipeline; - function newPipeline(credential, pipelineOptions = {}) { - if (!credential) { - credential = new AnonymousCredential_js_1.AnonymousCredential(); + exports2.HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416 + }; + exports2.HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" + }; + exports2.ETagNone = ""; + exports2.ETagAny = "*"; + exports2.SIZE_1_MB = 1 * 1024 * 1024; + exports2.BATCH_MAX_REQUEST = 256; + exports2.BATCH_MAX_PAYLOAD_IN_BYTES = 4 * exports2.SIZE_1_MB; + exports2.HTTP_LINE_ENDING = "\r\n"; + exports2.HTTP_VERSION_1_1 = "HTTP/1.1"; + exports2.EncryptionAlgorithmAES25 = "AES256"; + exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + exports2.StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-copy-source-error-code", + "x-ms-copy-source-status-code", + "x-ms-if-tags", + "x-ms-source-if-tags" + ]; + exports2.StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot" + ]; + exports2.BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; + exports2.BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; + exports2.PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104" + ]; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js +var require_utils_common = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.escapeURLPath = escapeURLPath; + exports2.getValueInConnString = getValueInConnString; + exports2.extractConnectionStringParts = extractConnectionStringParts; + exports2.appendToURLPath = appendToURLPath; + exports2.setURLParameter = setURLParameter; + exports2.getURLParameter = getURLParameter; + exports2.setURLHost = setURLHost; + exports2.getURLPath = getURLPath; + exports2.getURLScheme = getURLScheme; + exports2.getURLPathAndQuery = getURLPathAndQuery; + exports2.getURLQueries = getURLQueries; + exports2.appendToURLQuery = appendToURLQuery; + exports2.truncatedISO8061Date = truncatedISO8061Date; + exports2.base64encode = base64encode; + exports2.base64decode = base64decode; + exports2.generateBlockID = generateBlockID; + exports2.delay = delay2; + exports2.padStart = padStart2; + exports2.sanitizeURL = sanitizeURL; + exports2.sanitizeHeaders = sanitizeHeaders; + exports2.iEqual = iEqual; + exports2.getAccountNameFromUrl = getAccountNameFromUrl; + exports2.isIpEndpointStyle = isIpEndpointStyle; + exports2.toBlobTagsString = toBlobTagsString; + exports2.toBlobTags = toBlobTags; + exports2.toTags = toTags; + exports2.toQuerySerialization = toQuerySerialization; + exports2.parseObjectReplicationRecord = parseObjectReplicationRecord; + exports2.attachCredential = attachCredential; + exports2.httpAuthorizationToString = httpAuthorizationToString; + exports2.BlobNameToString = BlobNameToString; + exports2.ConvertInternalResponseOfListBlobFlat = ConvertInternalResponseOfListBlobFlat; + exports2.ConvertInternalResponseOfListBlobHierarchy = ConvertInternalResponseOfListBlobHierarchy; + exports2.ExtractPageRangeInfoItems = ExtractPageRangeInfoItems; + exports2.EscapePath = EscapePath; + exports2.assertResponse = assertResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants15(); + function escapeURLPath(url2) { + const urlParsed = new URL(url2); + let path13 = urlParsed.pathname; + path13 = path13 || "/"; + path13 = escape(path13); + urlParsed.pathname = path13; + return urlParsed.toString(); + } + function getProxyUriFromDevConnString(connectionString) { + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } } - const pipeline = new Pipeline([], pipelineOptions); - pipeline._credential = credential; - return pipeline; + return proxyUri; } - function processDownlevelPipeline(pipeline) { - const knownFactoryFunctions = [ - isAnonymousCredential, - isStorageSharedKeyCredential, - isCoreHttpBearerTokenFactory, - isStorageBrowserPolicyFactory, - isStorageRetryPolicyFactory, - isStorageTelemetryPolicyFactory, - isCoreHttpPolicyFactory - ]; - if (pipeline.factories.length) { - const novelFactories = pipeline.factories.filter((factory) => { - return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); - }); - if (novelFactories.length) { - const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); - return { - wrappedPolicies: (0, core_http_compat_1.createRequestPolicyFactoryPolicy)(novelFactories), - afterRetry: hasInjector - }; + function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; } } - return void 0; + return ""; } - function getCoreClientOptions(pipeline) { - const { httpClient: v1Client, ...restOptions } = pipeline.options; - let httpClient = pipeline._coreHttpClient; - if (!httpClient) { - httpClient = v1Client ? (0, core_http_compat_1.convertHttpClient)(v1Client) : (0, storage_common_1.getCachedDefaultHttpClient)(); - pipeline._coreHttpClient = httpClient; + function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = constants_js_1.DevelopmentConnectionString; } - let corePipeline = pipeline._corePipeline; - if (!corePipeline) { - const packageDetails = `azsdk-js-azure-storage-blob/${constants_js_1.SDK_VERSION}`; - const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; - corePipeline = (0, core_client_1.createClientPipeline)({ - ...restOptions, - loggingOptions: { - additionalAllowedHeaderNames: constants_js_1.StorageBlobLoggingAllowedHeaderNames, - additionalAllowedQueryParameters: constants_js_1.StorageBlobLoggingAllowedQueryParameters, - logger: log_js_1.logger.info - }, - userAgentOptions: { - userAgentPrefix - }, - serializationOptions: { - stringifyXML: core_xml_1.stringifyXML, - serializerOptions: { - xml: { - // Use customized XML char key of "#" so we can deserialize metadata - // with "_" key - xmlCharKey: "#" - } - } - }, - deserializationOptions: { - parseXML: core_xml_1.parseXML, - serializerOptions: { - xml: { - // Use customized XML char key of "#" so we can deserialize metadata - // with "_" key - xmlCharKey: "#" - } - } + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } - }); - corePipeline.removePolicy({ phase: "Retry" }); - corePipeline.removePolicy({ name: core_rest_pipeline_1.decompressResponsePolicyName }); - corePipeline.addPolicy((0, StorageCorrectContentLengthPolicy_js_1.storageCorrectContentLengthPolicy)()); - corePipeline.addPolicy((0, StorageRetryPolicyV2_js_1.storageRetryPolicy)(restOptions.retryOptions), { phase: "Retry" }); - corePipeline.addPolicy((0, storage_common_1.storageRequestFailureDetailsParserPolicy)()); - corePipeline.addPolicy((0, StorageBrowserPolicyV2_js_1.storageBrowserPolicy)()); - const downlevelResults = processDownlevelPipeline(pipeline); - if (downlevelResults) { - corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : void 0); + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } - const credential = getCredentialFromPipeline(pipeline); - if ((0, core_auth_1.isTokenCredential)(credential)) { - corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ - credential, - scopes: restOptions.audience ?? constants_js_1.StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } - }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { - corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ - accountName: credential.accountName, - accountKey: credential.accountKey - }), { phase: "Sign" }); + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); } - pipeline._corePipeline = corePipeline; + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri + }; + } else { + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } - return { - ...restOptions, - allowInsecureConnection: true, - httpClient, - pipeline: corePipeline - }; } - function getCredentialFromPipeline(pipeline) { - if (pipeline._credential) { - return pipeline._credential; - } - let credential = new AnonymousCredential_js_1.AnonymousCredential(); - for (const factory of pipeline.factories) { - if ((0, core_auth_1.isTokenCredential)(factory.credential)) { - credential = factory.credential; - } else if (isStorageSharedKeyCredential(factory)) { - return factory; + function escape(text) { + return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); + } + function appendToURLPath(url2, name) { + const urlParsed = new URL(url2); + let path13 = urlParsed.pathname; + path13 = path13 ? path13.endsWith("/") ? `${path13}${name}` : `${path13}/${name}` : name; + urlParsed.pathname = path13; + return urlParsed.toString(); + } + function setURLParameter(url2, name, value) { + const urlParsed = new URL(url2); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : void 0; + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } } } - return credential; - } - function isStorageSharedKeyCredential(factory) { - if (factory instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { - return true; + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); } - return factory.constructor.name === "StorageSharedKeyCredential"; + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); } - function isAnonymousCredential(factory) { - if (factory instanceof AnonymousCredential_js_1.AnonymousCredential) { - return true; - } - return factory.constructor.name === "AnonymousCredential"; + function getURLParameter(url2, name) { + const urlParsed = new URL(url2); + return urlParsed.searchParams.get(name) ?? void 0; } - function isCoreHttpBearerTokenFactory(factory) { - return (0, core_auth_1.isTokenCredential)(factory.credential); + function setURLHost(url2, host) { + const urlParsed = new URL(url2); + urlParsed.hostname = host; + return urlParsed.toString(); } - function isStorageBrowserPolicyFactory(factory) { - if (factory instanceof StorageBrowserPolicyFactory_js_1.StorageBrowserPolicyFactory) { - return true; + function getURLPath(url2) { + try { + const urlParsed = new URL(url2); + return urlParsed.pathname; + } catch (e) { + return void 0; } - return factory.constructor.name === "StorageBrowserPolicyFactory"; } - function isStorageRetryPolicyFactory(factory) { - if (factory instanceof StorageRetryPolicyFactory_js_1.StorageRetryPolicyFactory) { - return true; + function getURLScheme(url2) { + try { + const urlParsed = new URL(url2); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } catch (e) { + return void 0; } - return factory.constructor.name === "StorageRetryPolicyFactory"; } - function isStorageTelemetryPolicyFactory(factory) { - return factory.constructor.name === "TelemetryPolicyFactory"; + function getURLPathAndQuery(url2) { + const urlParsed = new URL(url2); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; + } + return `${pathString}${queryString}`; } - function isInjectorPolicyFactory(factory) { - return factory.constructor.name === "InjectorPolicyFactory"; + function getURLQueries(url2) { + let queryString = new URL(url2).search; + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; } - function isCoreHttpPolicyFactory(factory) { - const knownPolicies = [ - "GenerateClientRequestIdPolicy", - "TracingPolicy", - "LogPolicy", - "ProxyPolicy", - "DisableResponseDecompressionPolicy", - "KeepAlivePolicy", - "DeserializationPolicy" - ]; - const mockHttpClient = { - sendRequest: async (request2) => { + function appendToURLQuery(url2, queryParts) { + const urlParsed = new URL(url2); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); + } + function truncatedISO8061Date(date, withMilliseconds = true) { + const dateString = date.toISOString(); + return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; + } + function base64encode(content) { + return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); + } + function base64decode(encodedString) { + return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); + } + function generateBlockID(blockIDPrefix, blockIndex) { + const maxSourceStringLength = 48; + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + padStart2(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); + } + async function delay2(timeInMs, aborter, abortError) { + return new Promise((resolve6, reject) => { + let timeout; + const abortHandler = () => { + if (timeout !== void 0) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== void 0) { + aborter.removeEventListener("abort", abortHandler); + } + resolve6(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== void 0) { + aborter.addEventListener("abort", abortHandler); + } + }); + } + function padStart2(currentString, targetLength, padString = " ") { + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } + } + function sanitizeURL(url2) { + let safeURL = url2; + if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); + } + return safeURL; + } + function sanitizeHeaders(originalHeader) { + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); + } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); + } else { + headers.set(name, value); + } + } + return headers; + } + function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); + } + function getAccountNameFromUrl(url2) { + const parsedUrl = new URL(url2); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + accountName = parsedUrl.hostname.split(".")[0]; + } else if (isIpEndpointStyle(parsedUrl)) { + accountName = parsedUrl.pathname.split("/")[1]; + } else { + accountName = ""; + } + return accountName; + } catch (error3) { + throw new Error("Unable to extract accountName with provided information."); + } + } + function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); + } + function toBlobTagsString(tags) { + if (tags === void 0) { + return void 0; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); + } + function toBlobTags(tags) { + if (tags === void 0) { + return void 0; + } + const res = { + blobTagSet: [] + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value + }); + } + } + return res; + } + function toTags(tags) { + if (tags === void 0) { + return void 0; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; + } + function toQuerySerialization(textConfiguration) { + if (textConfiguration === void 0) { + return void 0; + } + switch (textConfiguration.kind) { + case "csv": return { - request: request2, - headers: request2.headers.clone(), - status: 500 + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false + } + } + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator + } + } + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema + } + } + }; + case "parquet": + return { + format: { + type: "parquet" + } }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } + } + function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return void 0; + } + if ("policy-id" in objectReplicationRecord) { + return void 0; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key] + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } else { + orProperties.push({ + policyId: ids[0], + rules: [rule] + }); + } + } + return orProperties; + } + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; + } + function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; + } + function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } else { + return name.content; + } + } + function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return { + ...internalResponse, + segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = { + ...blobItemInteral, + name: BlobNameToString(blobItemInteral.name) + }; + return blobItem; + }) } }; - const mockRequestPolicyOptions = { - log(_logLevel, _message) { - }, - shouldLog(_logLevel) { - return false; + } + function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + return { + ...internalResponse, + segment: { + blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: BlobNameToString(blobPrefixInternal.name) + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = { + ...blobItemInteral, + name: BlobNameToString(blobItemInteral.name) + }; + return blobItem; + }) } }; - const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); - const policyName = policyInstance.constructor.name; - return knownPolicies.some((knownPolicyName) => { - return policyName.startsWith(knownPolicyName); - }); + } + function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + ++pageRangeIndex; + } else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; + } + } + function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); + } + function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); } } }); -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js -var require_models = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js +var require_StorageRetryPolicyType = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.KnownStorageErrorCode = exports2.KnownBlobExpiryOptions = exports2.KnownFileShareTokenIntent = exports2.KnownEncryptionAlgorithmType = void 0; - var KnownEncryptionAlgorithmType; - (function(KnownEncryptionAlgorithmType2) { - KnownEncryptionAlgorithmType2["AES256"] = "AES256"; - })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); - var KnownFileShareTokenIntent; - (function(KnownFileShareTokenIntent2) { - KnownFileShareTokenIntent2["Backup"] = "backup"; - })(KnownFileShareTokenIntent || (exports2.KnownFileShareTokenIntent = KnownFileShareTokenIntent = {})); - var KnownBlobExpiryOptions; - (function(KnownBlobExpiryOptions2) { - KnownBlobExpiryOptions2["NeverExpire"] = "NeverExpire"; - KnownBlobExpiryOptions2["RelativeToCreation"] = "RelativeToCreation"; - KnownBlobExpiryOptions2["RelativeToNow"] = "RelativeToNow"; - KnownBlobExpiryOptions2["Absolute"] = "Absolute"; - })(KnownBlobExpiryOptions || (exports2.KnownBlobExpiryOptions = KnownBlobExpiryOptions = {})); - var KnownStorageErrorCode; - (function(KnownStorageErrorCode2) { - KnownStorageErrorCode2["AccountAlreadyExists"] = "AccountAlreadyExists"; - KnownStorageErrorCode2["AccountBeingCreated"] = "AccountBeingCreated"; - KnownStorageErrorCode2["AccountIsDisabled"] = "AccountIsDisabled"; - KnownStorageErrorCode2["AuthenticationFailed"] = "AuthenticationFailed"; - KnownStorageErrorCode2["AuthorizationFailure"] = "AuthorizationFailure"; - KnownStorageErrorCode2["ConditionHeadersNotSupported"] = "ConditionHeadersNotSupported"; - KnownStorageErrorCode2["ConditionNotMet"] = "ConditionNotMet"; - KnownStorageErrorCode2["EmptyMetadataKey"] = "EmptyMetadataKey"; - KnownStorageErrorCode2["InsufficientAccountPermissions"] = "InsufficientAccountPermissions"; - KnownStorageErrorCode2["InternalError"] = "InternalError"; - KnownStorageErrorCode2["InvalidAuthenticationInfo"] = "InvalidAuthenticationInfo"; - KnownStorageErrorCode2["InvalidHeaderValue"] = "InvalidHeaderValue"; - KnownStorageErrorCode2["InvalidHttpVerb"] = "InvalidHttpVerb"; - KnownStorageErrorCode2["InvalidInput"] = "InvalidInput"; - KnownStorageErrorCode2["InvalidMd5"] = "InvalidMd5"; - KnownStorageErrorCode2["InvalidMetadata"] = "InvalidMetadata"; - KnownStorageErrorCode2["InvalidQueryParameterValue"] = "InvalidQueryParameterValue"; - KnownStorageErrorCode2["InvalidRange"] = "InvalidRange"; - KnownStorageErrorCode2["InvalidResourceName"] = "InvalidResourceName"; - KnownStorageErrorCode2["InvalidUri"] = "InvalidUri"; - KnownStorageErrorCode2["InvalidXmlDocument"] = "InvalidXmlDocument"; - KnownStorageErrorCode2["InvalidXmlNodeValue"] = "InvalidXmlNodeValue"; - KnownStorageErrorCode2["Md5Mismatch"] = "Md5Mismatch"; - KnownStorageErrorCode2["MetadataTooLarge"] = "MetadataTooLarge"; - KnownStorageErrorCode2["MissingContentLengthHeader"] = "MissingContentLengthHeader"; - KnownStorageErrorCode2["MissingRequiredQueryParameter"] = "MissingRequiredQueryParameter"; - KnownStorageErrorCode2["MissingRequiredHeader"] = "MissingRequiredHeader"; - KnownStorageErrorCode2["MissingRequiredXmlNode"] = "MissingRequiredXmlNode"; - KnownStorageErrorCode2["MultipleConditionHeadersNotSupported"] = "MultipleConditionHeadersNotSupported"; - KnownStorageErrorCode2["OperationTimedOut"] = "OperationTimedOut"; - KnownStorageErrorCode2["OutOfRangeInput"] = "OutOfRangeInput"; - KnownStorageErrorCode2["OutOfRangeQueryParameterValue"] = "OutOfRangeQueryParameterValue"; - KnownStorageErrorCode2["RequestBodyTooLarge"] = "RequestBodyTooLarge"; - KnownStorageErrorCode2["ResourceTypeMismatch"] = "ResourceTypeMismatch"; - KnownStorageErrorCode2["RequestUrlFailedToParse"] = "RequestUrlFailedToParse"; - KnownStorageErrorCode2["ResourceAlreadyExists"] = "ResourceAlreadyExists"; - KnownStorageErrorCode2["ResourceNotFound"] = "ResourceNotFound"; - KnownStorageErrorCode2["ServerBusy"] = "ServerBusy"; - KnownStorageErrorCode2["UnsupportedHeader"] = "UnsupportedHeader"; - KnownStorageErrorCode2["UnsupportedXmlNode"] = "UnsupportedXmlNode"; - KnownStorageErrorCode2["UnsupportedQueryParameter"] = "UnsupportedQueryParameter"; - KnownStorageErrorCode2["UnsupportedHttpVerb"] = "UnsupportedHttpVerb"; - KnownStorageErrorCode2["AppendPositionConditionNotMet"] = "AppendPositionConditionNotMet"; - KnownStorageErrorCode2["BlobAlreadyExists"] = "BlobAlreadyExists"; - KnownStorageErrorCode2["BlobImmutableDueToPolicy"] = "BlobImmutableDueToPolicy"; - KnownStorageErrorCode2["BlobNotFound"] = "BlobNotFound"; - KnownStorageErrorCode2["BlobOverwritten"] = "BlobOverwritten"; - KnownStorageErrorCode2["BlobTierInadequateForContentLength"] = "BlobTierInadequateForContentLength"; - KnownStorageErrorCode2["BlobUsesCustomerSpecifiedEncryption"] = "BlobUsesCustomerSpecifiedEncryption"; - KnownStorageErrorCode2["BlockCountExceedsLimit"] = "BlockCountExceedsLimit"; - KnownStorageErrorCode2["BlockListTooLong"] = "BlockListTooLong"; - KnownStorageErrorCode2["CannotChangeToLowerTier"] = "CannotChangeToLowerTier"; - KnownStorageErrorCode2["CannotVerifyCopySource"] = "CannotVerifyCopySource"; - KnownStorageErrorCode2["ContainerAlreadyExists"] = "ContainerAlreadyExists"; - KnownStorageErrorCode2["ContainerBeingDeleted"] = "ContainerBeingDeleted"; - KnownStorageErrorCode2["ContainerDisabled"] = "ContainerDisabled"; - KnownStorageErrorCode2["ContainerNotFound"] = "ContainerNotFound"; - KnownStorageErrorCode2["ContentLengthLargerThanTierLimit"] = "ContentLengthLargerThanTierLimit"; - KnownStorageErrorCode2["CopyAcrossAccountsNotSupported"] = "CopyAcrossAccountsNotSupported"; - KnownStorageErrorCode2["CopyIdMismatch"] = "CopyIdMismatch"; - KnownStorageErrorCode2["FeatureVersionMismatch"] = "FeatureVersionMismatch"; - KnownStorageErrorCode2["IncrementalCopyBlobMismatch"] = "IncrementalCopyBlobMismatch"; - KnownStorageErrorCode2["IncrementalCopyOfEarlierVersionSnapshotNotAllowed"] = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed"; - KnownStorageErrorCode2["IncrementalCopySourceMustBeSnapshot"] = "IncrementalCopySourceMustBeSnapshot"; - KnownStorageErrorCode2["InfiniteLeaseDurationRequired"] = "InfiniteLeaseDurationRequired"; - KnownStorageErrorCode2["InvalidBlobOrBlock"] = "InvalidBlobOrBlock"; - KnownStorageErrorCode2["InvalidBlobTier"] = "InvalidBlobTier"; - KnownStorageErrorCode2["InvalidBlobType"] = "InvalidBlobType"; - KnownStorageErrorCode2["InvalidBlockId"] = "InvalidBlockId"; - KnownStorageErrorCode2["InvalidBlockList"] = "InvalidBlockList"; - KnownStorageErrorCode2["InvalidOperation"] = "InvalidOperation"; - KnownStorageErrorCode2["InvalidPageRange"] = "InvalidPageRange"; - KnownStorageErrorCode2["InvalidSourceBlobType"] = "InvalidSourceBlobType"; - KnownStorageErrorCode2["InvalidSourceBlobUrl"] = "InvalidSourceBlobUrl"; - KnownStorageErrorCode2["InvalidVersionForPageBlobOperation"] = "InvalidVersionForPageBlobOperation"; - KnownStorageErrorCode2["LeaseAlreadyPresent"] = "LeaseAlreadyPresent"; - KnownStorageErrorCode2["LeaseAlreadyBroken"] = "LeaseAlreadyBroken"; - KnownStorageErrorCode2["LeaseIdMismatchWithBlobOperation"] = "LeaseIdMismatchWithBlobOperation"; - KnownStorageErrorCode2["LeaseIdMismatchWithContainerOperation"] = "LeaseIdMismatchWithContainerOperation"; - KnownStorageErrorCode2["LeaseIdMismatchWithLeaseOperation"] = "LeaseIdMismatchWithLeaseOperation"; - KnownStorageErrorCode2["LeaseIdMissing"] = "LeaseIdMissing"; - KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeAcquired"] = "LeaseIsBreakingAndCannotBeAcquired"; - KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeChanged"] = "LeaseIsBreakingAndCannotBeChanged"; - KnownStorageErrorCode2["LeaseIsBrokenAndCannotBeRenewed"] = "LeaseIsBrokenAndCannotBeRenewed"; - KnownStorageErrorCode2["LeaseLost"] = "LeaseLost"; - KnownStorageErrorCode2["LeaseNotPresentWithBlobOperation"] = "LeaseNotPresentWithBlobOperation"; - KnownStorageErrorCode2["LeaseNotPresentWithContainerOperation"] = "LeaseNotPresentWithContainerOperation"; - KnownStorageErrorCode2["LeaseNotPresentWithLeaseOperation"] = "LeaseNotPresentWithLeaseOperation"; - KnownStorageErrorCode2["MaxBlobSizeConditionNotMet"] = "MaxBlobSizeConditionNotMet"; - KnownStorageErrorCode2["NoAuthenticationInformation"] = "NoAuthenticationInformation"; - KnownStorageErrorCode2["NoPendingCopyOperation"] = "NoPendingCopyOperation"; - KnownStorageErrorCode2["OperationNotAllowedOnIncrementalCopyBlob"] = "OperationNotAllowedOnIncrementalCopyBlob"; - KnownStorageErrorCode2["PendingCopyOperation"] = "PendingCopyOperation"; - KnownStorageErrorCode2["PreviousSnapshotCannotBeNewer"] = "PreviousSnapshotCannotBeNewer"; - KnownStorageErrorCode2["PreviousSnapshotNotFound"] = "PreviousSnapshotNotFound"; - KnownStorageErrorCode2["PreviousSnapshotOperationNotSupported"] = "PreviousSnapshotOperationNotSupported"; - KnownStorageErrorCode2["SequenceNumberConditionNotMet"] = "SequenceNumberConditionNotMet"; - KnownStorageErrorCode2["SequenceNumberIncrementTooLarge"] = "SequenceNumberIncrementTooLarge"; - KnownStorageErrorCode2["SnapshotCountExceeded"] = "SnapshotCountExceeded"; - KnownStorageErrorCode2["SnapshotOperationRateExceeded"] = "SnapshotOperationRateExceeded"; - KnownStorageErrorCode2["SnapshotsPresent"] = "SnapshotsPresent"; - KnownStorageErrorCode2["SourceConditionNotMet"] = "SourceConditionNotMet"; - KnownStorageErrorCode2["SystemInUse"] = "SystemInUse"; - KnownStorageErrorCode2["TargetConditionNotMet"] = "TargetConditionNotMet"; - KnownStorageErrorCode2["UnauthorizedBlobOverwrite"] = "UnauthorizedBlobOverwrite"; - KnownStorageErrorCode2["BlobBeingRehydrated"] = "BlobBeingRehydrated"; - KnownStorageErrorCode2["BlobArchived"] = "BlobArchived"; - KnownStorageErrorCode2["BlobNotArchived"] = "BlobNotArchived"; - KnownStorageErrorCode2["AuthorizationSourceIPMismatch"] = "AuthorizationSourceIPMismatch"; - KnownStorageErrorCode2["AuthorizationProtocolMismatch"] = "AuthorizationProtocolMismatch"; - KnownStorageErrorCode2["AuthorizationPermissionMismatch"] = "AuthorizationPermissionMismatch"; - KnownStorageErrorCode2["AuthorizationServiceMismatch"] = "AuthorizationServiceMismatch"; - KnownStorageErrorCode2["AuthorizationResourceTypeMismatch"] = "AuthorizationResourceTypeMismatch"; - KnownStorageErrorCode2["BlobAccessTierNotSupportedForAccountType"] = "BlobAccessTierNotSupportedForAccountType"; - })(KnownStorageErrorCode || (exports2.KnownStorageErrorCode = KnownStorageErrorCode = {})); + exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicyType; + (function(StorageRetryPolicyType2) { + StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; + })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); } }); -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js -var require_mappers = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js +var require_StorageRetryPolicy = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceGetUserDelegationKeyHeaders = exports2.ServiceListContainersSegmentExceptionHeaders = exports2.ServiceListContainersSegmentHeaders = exports2.ServiceGetStatisticsExceptionHeaders = exports2.ServiceGetStatisticsHeaders = exports2.ServiceGetPropertiesExceptionHeaders = exports2.ServiceGetPropertiesHeaders = exports2.ServiceSetPropertiesExceptionHeaders = exports2.ServiceSetPropertiesHeaders = exports2.ArrowField = exports2.ArrowConfiguration = exports2.JsonTextConfiguration = exports2.DelimitedTextConfiguration = exports2.QueryFormat = exports2.QuerySerialization = exports2.QueryRequest = exports2.ClearRange = exports2.PageRange = exports2.PageList = exports2.Block = exports2.BlockList = exports2.BlockLookupList = exports2.BlobPrefix = exports2.BlobHierarchyListSegment = exports2.ListBlobsHierarchySegmentResponse = exports2.BlobPropertiesInternal = exports2.BlobName = exports2.BlobItemInternal = exports2.BlobFlatListSegment = exports2.ListBlobsFlatSegmentResponse = exports2.AccessPolicy = exports2.SignedIdentifier = exports2.BlobTag = exports2.BlobTags = exports2.FilterBlobItem = exports2.FilterBlobSegment = exports2.UserDelegationKey = exports2.KeyInfo = exports2.ContainerProperties = exports2.ContainerItem = exports2.ListContainersSegmentResponse = exports2.GeoReplication = exports2.BlobServiceStatistics = exports2.StorageError = exports2.StaticWebsite = exports2.CorsRule = exports2.Metrics = exports2.RetentionPolicy = exports2.Logging = exports2.BlobServiceProperties = void 0; - exports2.BlobUndeleteHeaders = exports2.BlobDeleteExceptionHeaders = exports2.BlobDeleteHeaders = exports2.BlobGetPropertiesExceptionHeaders = exports2.BlobGetPropertiesHeaders = exports2.BlobDownloadExceptionHeaders = exports2.BlobDownloadHeaders = exports2.ContainerGetAccountInfoExceptionHeaders = exports2.ContainerGetAccountInfoHeaders = exports2.ContainerListBlobHierarchySegmentExceptionHeaders = exports2.ContainerListBlobHierarchySegmentHeaders = exports2.ContainerListBlobFlatSegmentExceptionHeaders = exports2.ContainerListBlobFlatSegmentHeaders = exports2.ContainerChangeLeaseExceptionHeaders = exports2.ContainerChangeLeaseHeaders = exports2.ContainerBreakLeaseExceptionHeaders = exports2.ContainerBreakLeaseHeaders = exports2.ContainerRenewLeaseExceptionHeaders = exports2.ContainerRenewLeaseHeaders = exports2.ContainerReleaseLeaseExceptionHeaders = exports2.ContainerReleaseLeaseHeaders = exports2.ContainerAcquireLeaseExceptionHeaders = exports2.ContainerAcquireLeaseHeaders = exports2.ContainerFilterBlobsExceptionHeaders = exports2.ContainerFilterBlobsHeaders = exports2.ContainerSubmitBatchExceptionHeaders = exports2.ContainerSubmitBatchHeaders = exports2.ContainerRenameExceptionHeaders = exports2.ContainerRenameHeaders = exports2.ContainerRestoreExceptionHeaders = exports2.ContainerRestoreHeaders = exports2.ContainerSetAccessPolicyExceptionHeaders = exports2.ContainerSetAccessPolicyHeaders = exports2.ContainerGetAccessPolicyExceptionHeaders = exports2.ContainerGetAccessPolicyHeaders = exports2.ContainerSetMetadataExceptionHeaders = exports2.ContainerSetMetadataHeaders = exports2.ContainerDeleteExceptionHeaders = exports2.ContainerDeleteHeaders = exports2.ContainerGetPropertiesExceptionHeaders = exports2.ContainerGetPropertiesHeaders = exports2.ContainerCreateExceptionHeaders = exports2.ContainerCreateHeaders = exports2.ServiceFilterBlobsExceptionHeaders = exports2.ServiceFilterBlobsHeaders = exports2.ServiceSubmitBatchExceptionHeaders = exports2.ServiceSubmitBatchHeaders = exports2.ServiceGetAccountInfoExceptionHeaders = exports2.ServiceGetAccountInfoHeaders = exports2.ServiceGetUserDelegationKeyExceptionHeaders = void 0; - exports2.PageBlobGetPageRangesHeaders = exports2.PageBlobUploadPagesFromURLExceptionHeaders = exports2.PageBlobUploadPagesFromURLHeaders = exports2.PageBlobClearPagesExceptionHeaders = exports2.PageBlobClearPagesHeaders = exports2.PageBlobUploadPagesExceptionHeaders = exports2.PageBlobUploadPagesHeaders = exports2.PageBlobCreateExceptionHeaders = exports2.PageBlobCreateHeaders = exports2.BlobSetTagsExceptionHeaders = exports2.BlobSetTagsHeaders = exports2.BlobGetTagsExceptionHeaders = exports2.BlobGetTagsHeaders = exports2.BlobQueryExceptionHeaders = exports2.BlobQueryHeaders = exports2.BlobGetAccountInfoExceptionHeaders = exports2.BlobGetAccountInfoHeaders = exports2.BlobSetTierExceptionHeaders = exports2.BlobSetTierHeaders = exports2.BlobAbortCopyFromURLExceptionHeaders = exports2.BlobAbortCopyFromURLHeaders = exports2.BlobCopyFromURLExceptionHeaders = exports2.BlobCopyFromURLHeaders = exports2.BlobStartCopyFromURLExceptionHeaders = exports2.BlobStartCopyFromURLHeaders = exports2.BlobCreateSnapshotExceptionHeaders = exports2.BlobCreateSnapshotHeaders = exports2.BlobBreakLeaseExceptionHeaders = exports2.BlobBreakLeaseHeaders = exports2.BlobChangeLeaseExceptionHeaders = exports2.BlobChangeLeaseHeaders = exports2.BlobRenewLeaseExceptionHeaders = exports2.BlobRenewLeaseHeaders = exports2.BlobReleaseLeaseExceptionHeaders = exports2.BlobReleaseLeaseHeaders = exports2.BlobAcquireLeaseExceptionHeaders = exports2.BlobAcquireLeaseHeaders = exports2.BlobSetMetadataExceptionHeaders = exports2.BlobSetMetadataHeaders = exports2.BlobSetLegalHoldExceptionHeaders = exports2.BlobSetLegalHoldHeaders = exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = exports2.BlobDeleteImmutabilityPolicyHeaders = exports2.BlobSetImmutabilityPolicyExceptionHeaders = exports2.BlobSetImmutabilityPolicyHeaders = exports2.BlobSetHttpHeadersExceptionHeaders = exports2.BlobSetHttpHeadersHeaders = exports2.BlobSetExpiryExceptionHeaders = exports2.BlobSetExpiryHeaders = exports2.BlobUndeleteExceptionHeaders = void 0; - exports2.BlockBlobGetBlockListExceptionHeaders = exports2.BlockBlobGetBlockListHeaders = exports2.BlockBlobCommitBlockListExceptionHeaders = exports2.BlockBlobCommitBlockListHeaders = exports2.BlockBlobStageBlockFromURLExceptionHeaders = exports2.BlockBlobStageBlockFromURLHeaders = exports2.BlockBlobStageBlockExceptionHeaders = exports2.BlockBlobStageBlockHeaders = exports2.BlockBlobPutBlobFromUrlExceptionHeaders = exports2.BlockBlobPutBlobFromUrlHeaders = exports2.BlockBlobUploadExceptionHeaders = exports2.BlockBlobUploadHeaders = exports2.AppendBlobSealExceptionHeaders = exports2.AppendBlobSealHeaders = exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = exports2.AppendBlobAppendBlockFromUrlHeaders = exports2.AppendBlobAppendBlockExceptionHeaders = exports2.AppendBlobAppendBlockHeaders = exports2.AppendBlobCreateExceptionHeaders = exports2.AppendBlobCreateHeaders = exports2.PageBlobCopyIncrementalExceptionHeaders = exports2.PageBlobCopyIncrementalHeaders = exports2.PageBlobUpdateSequenceNumberExceptionHeaders = exports2.PageBlobUpdateSequenceNumberHeaders = exports2.PageBlobResizeExceptionHeaders = exports2.PageBlobResizeHeaders = exports2.PageBlobGetPageRangesDiffExceptionHeaders = exports2.PageBlobGetPageRangesDiffHeaders = exports2.PageBlobGetPageRangesExceptionHeaders = void 0; - exports2.BlobServiceProperties = { - serializedName: "BlobServiceProperties", - xmlName: "StorageServiceProperties", - type: { - name: "Composite", - className: "BlobServiceProperties", - modelProperties: { - blobAnalyticsLogging: { - serializedName: "Logging", - xmlName: "Logging", - type: { - name: "Composite", - className: "Logging" - } - }, - hourMetrics: { - serializedName: "HourMetrics", - xmlName: "HourMetrics", - type: { - name: "Composite", - className: "Metrics" - } - }, - minuteMetrics: { - serializedName: "MinuteMetrics", - xmlName: "MinuteMetrics", - type: { - name: "Composite", - className: "Metrics" - } - }, - cors: { - serializedName: "Cors", - xmlName: "Cors", - xmlIsWrapped: true, - xmlElementName: "CorsRule", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "CorsRule" - } - } - } - }, - defaultServiceVersion: { - serializedName: "DefaultServiceVersion", - xmlName: "DefaultServiceVersion", - type: { - name: "String" - } - }, - deleteRetentionPolicy: { - serializedName: "DeleteRetentionPolicy", - xmlName: "DeleteRetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - }, - staticWebsite: { - serializedName: "StaticWebsite", - xmlName: "StaticWebsite", - type: { - name: "Composite", - className: "StaticWebsite" - } - } + exports2.StorageRetryPolicy = void 0; + exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; + var abort_controller_1 = require_commonjs11(); + var RequestPolicy_js_1 = require_RequestPolicy(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + var log_js_1 = require_log5(); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); + function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); } - } + }; + } + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy }; - exports2.Logging = { - serializedName: "Logging", - type: { - name: "Composite", - className: "Logging", - modelProperties: { - version: { - serializedName: "Version", - required: true, - xmlName: "Version", - type: { - name: "String" - } - }, - deleteProperty: { - serializedName: "Delete", - required: true, - xmlName: "Delete", - type: { - name: "Boolean" - } - }, - read: { - serializedName: "Read", - required: true, - xmlName: "Read", - type: { - name: "Boolean" - } - }, - write: { - serializedName: "Write", - required: true, - xmlName: "Write", - type: { - name: "Boolean" - } - }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * RetryOptions. + */ + retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request2) { + return this.attemptSendRequest(request2, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request2, secondaryHas404, attempt) { + const newRequest = request2.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request2.method === "GET" || request2.method === "HEAD" || request2.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); + } + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); + } + let response; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; } } + await this.delay(isPrimaryRetry, attempt, request2.abortSignal); + return this.attemptSendRequest(request2, secondaryHas404, ++attempt); } - }; - exports2.RetentionPolicy = { - serializedName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy", - modelProperties: { - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - days: { - constraints: { - InclusiveMinimum: 1 - }, - serializedName: "Days", - xmlName: "Days", - type: { - name: "Number" + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; + } + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } } } - } - }; - exports2.Metrics = { - serializedName: "Metrics", - type: { - name: "Composite", - className: "Metrics", - modelProperties: { - version: { - serializedName: "Version", - xmlName: "Version", - type: { - name: "String" - } - }, - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - includeAPIs: { - serializedName: "IncludeAPIs", - xmlName: "IncludeAPIs", - type: { - name: "Boolean" - } - }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; } } - } - }; - exports2.CorsRule = { - serializedName: "CorsRule", - type: { - name: "Composite", - className: "CorsRule", - modelProperties: { - allowedOrigins: { - serializedName: "AllowedOrigins", - required: true, - xmlName: "AllowedOrigins", - type: { - name: "String" - } - }, - allowedMethods: { - serializedName: "AllowedMethods", - required: true, - xmlName: "AllowedMethods", - type: { - name: "String" - } - }, - allowedHeaders: { - serializedName: "AllowedHeaders", - required: true, - xmlName: "AllowedHeaders", - type: { - name: "String" - } - }, - exposedHeaders: { - serializedName: "ExposedHeaders", - required: true, - xmlName: "ExposedHeaders", - type: { - name: "String" - } - }, - maxAgeInSeconds: { - constraints: { - InclusiveMinimum: 0 - }, - serializedName: "MaxAgeInSeconds", - required: true, - xmlName: "MaxAgeInSeconds", - type: { - name: "Number" + if (response) { + if (response?.status >= 400) { + const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); + if (copySourceError !== void 0) { + switch (copySourceError) { + case "InternalError": + case "OperationTimedOut": + case "ServerBusy": + return true; + } } } } + if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; } - }; - exports2.StaticWebsite = { - serializedName: "StaticWebsite", - type: { - name: "Composite", - className: "StaticWebsite", - modelProperties: { - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - indexDocument: { - serializedName: "IndexDocument", - xmlName: "IndexDocument", - type: { - name: "String" - } - }, - errorDocument404Path: { - serializedName: "ErrorDocument404Path", - xmlName: "ErrorDocument404Path", - type: { - name: "String" - } - }, - defaultIndexDocumentPath: { - serializedName: "DefaultIndexDocumentPath", - xmlName: "DefaultIndexDocumentPath", - type: { - name: "String" - } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; } + } else { + delayTimeInMs = Math.random() * 1e3; } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } }; - exports2.StorageError = { - serializedName: "StorageError", - type: { - name: "Composite", - className: "StorageError", - modelProperties: { - message: { - serializedName: "Message", - xmlName: "Message", - type: { - name: "String" - } - }, - copySourceStatusCode: { - serializedName: "CopySourceStatusCode", - xmlName: "CopySourceStatusCode", - type: { - name: "Number" - } - }, - copySourceErrorCode: { - serializedName: "CopySourceErrorCode", - xmlName: "CopySourceErrorCode", - type: { - name: "String" - } - }, - copySourceErrorMessage: { - serializedName: "CopySourceErrorMessage", - xmlName: "CopySourceErrorMessage", - type: { - name: "String" - } - }, - code: { - serializedName: "Code", - xmlName: "Code", - type: { - name: "String" - } - }, - authenticationErrorDetail: { - serializedName: "AuthenticationErrorDetail", - xmlName: "AuthenticationErrorDetail", - type: { - name: "String" - } - } - } - } - }; - exports2.BlobServiceStatistics = { - serializedName: "BlobServiceStatistics", - xmlName: "StorageServiceStats", - type: { - name: "Composite", - className: "BlobServiceStatistics", - modelProperties: { - geoReplication: { - serializedName: "GeoReplication", - xmlName: "GeoReplication", - type: { - name: "Composite", - className: "GeoReplication" - } - } - } - } - }; - exports2.GeoReplication = { - serializedName: "GeoReplication", - type: { - name: "Composite", - className: "GeoReplication", - modelProperties: { - status: { - serializedName: "Status", - required: true, - xmlName: "Status", - type: { - name: "Enum", - allowedValues: ["live", "bootstrap", "unavailable"] - } - }, - lastSyncOn: { - serializedName: "LastSyncTime", - required: true, - xmlName: "LastSyncTime", - type: { - name: "DateTimeRfc1123" - } - } - } - } - }; - exports2.ListContainersSegmentResponse = { - serializedName: "ListContainersSegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListContainersSegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", - type: { - name: "String" - } - }, - marker: { - serializedName: "Marker", - xmlName: "Marker", - type: { - name: "String" - } - }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", - type: { - name: "Number" - } - }, - containerItems: { - serializedName: "ContainerItems", - required: true, - xmlName: "Containers", - xmlIsWrapped: true, - xmlElementName: "Container", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ContainerItem" - } - } - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } - }; - exports2.ContainerItem = { - serializedName: "ContainerItem", - xmlName: "Container", - type: { - name: "Composite", - className: "ContainerItem", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", - type: { - name: "String" - } - }, - deleted: { - serializedName: "Deleted", - xmlName: "Deleted", - type: { - name: "Boolean" - } - }, - version: { - serializedName: "Version", - xmlName: "Version", - type: { - name: "String" - } - }, - properties: { - serializedName: "Properties", - xmlName: "Properties", - type: { - name: "Composite", - className: "ContainerProperties" - } - }, - metadata: { - serializedName: "Metadata", - xmlName: "Metadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - } - } - } - }; - exports2.ContainerProperties = { - serializedName: "ContainerProperties", - type: { - name: "Composite", - className: "ContainerProperties", - modelProperties: { - lastModified: { - serializedName: "Last-Modified", - required: true, - xmlName: "Last-Modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "Etag", - required: true, - xmlName: "Etag", - type: { - name: "String" - } - }, - leaseStatus: { - serializedName: "LeaseStatus", - xmlName: "LeaseStatus", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - leaseState: { - serializedName: "LeaseState", - xmlName: "LeaseState", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseDuration: { - serializedName: "LeaseDuration", - xmlName: "LeaseDuration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - publicAccess: { - serializedName: "PublicAccess", - xmlName: "PublicAccess", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, - hasImmutabilityPolicy: { - serializedName: "HasImmutabilityPolicy", - xmlName: "HasImmutabilityPolicy", - type: { - name: "Boolean" - } - }, - hasLegalHold: { - serializedName: "HasLegalHold", - xmlName: "HasLegalHold", - type: { - name: "Boolean" - } - }, - defaultEncryptionScope: { - serializedName: "DefaultEncryptionScope", - xmlName: "DefaultEncryptionScope", - type: { - name: "String" - } - }, - preventEncryptionScopeOverride: { - serializedName: "DenyEncryptionScopeOverride", - xmlName: "DenyEncryptionScopeOverride", - type: { - name: "Boolean" - } - }, - deletedOn: { - serializedName: "DeletedTime", - xmlName: "DeletedTime", - type: { - name: "DateTimeRfc1123" - } - }, - remainingRetentionDays: { - serializedName: "RemainingRetentionDays", - xmlName: "RemainingRetentionDays", - type: { - name: "Number" - } - }, - isImmutableStorageWithVersioningEnabled: { - serializedName: "ImmutableStorageWithVersioningEnabled", - xmlName: "ImmutableStorageWithVersioningEnabled", - type: { - name: "Boolean" - } - } - } + exports2.StorageRetryPolicy = StorageRetryPolicy; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js +var require_StorageRetryPolicyFactory = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicy_js_1 = require_StorageRetryPolicy(); + Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { + return StorageRetryPolicy_js_1.StorageRetryPolicy; + } }); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); + Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { + return StorageRetryPolicyType_js_1.StorageRetryPolicyType; + } }); + var StorageRetryPolicyFactory = class { + retryOptions; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; } - }; - exports2.KeyInfo = { - serializedName: "KeyInfo", - type: { - name: "Composite", - className: "KeyInfo", - modelProperties: { - startsOn: { - serializedName: "Start", - required: true, - xmlName: "Start", - type: { - name: "String" - } - }, - expiresOn: { - serializedName: "Expiry", - required: true, - xmlName: "Expiry", - type: { - name: "String" - } - } - } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); } }; - exports2.UserDelegationKey = { - serializedName: "UserDelegationKey", - type: { - name: "Composite", - className: "UserDelegationKey", - modelProperties: { - signedObjectId: { - serializedName: "SignedOid", - required: true, - xmlName: "SignedOid", - type: { - name: "String" - } - }, - signedTenantId: { - serializedName: "SignedTid", - required: true, - xmlName: "SignedTid", - type: { - name: "String" - } - }, - signedStartsOn: { - serializedName: "SignedStart", - required: true, - xmlName: "SignedStart", - type: { - name: "String" - } - }, - signedExpiresOn: { - serializedName: "SignedExpiry", - required: true, - xmlName: "SignedExpiry", - type: { - name: "String" - } - }, - signedService: { - serializedName: "SignedService", - required: true, - xmlName: "SignedService", - type: { - name: "String" - } - }, - signedVersion: { - serializedName: "SignedVersion", - required: true, - xmlName: "SignedVersion", - type: { - name: "String" - } - }, - value: { - serializedName: "Value", - required: true, - xmlName: "Value", - type: { - name: "String" - } - } - } + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js +var require_CredentialPolicy = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CredentialPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy(); + var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request2) { + return this._nextPolicy.sendRequest(this.signRequest(request2)); } - }; - exports2.FilterBlobSegment = { - serializedName: "FilterBlobSegment", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "FilterBlobSegment", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - where: { - serializedName: "Where", - required: true, - xmlName: "Where", - type: { - name: "String" - } - }, - blobs: { - serializedName: "Blobs", - required: true, - xmlName: "Blobs", - xmlIsWrapped: true, - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "FilterBlobItem" - } - } - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request2) { + return request2; } }; - exports2.FilterBlobItem = { - serializedName: "FilterBlobItem", - xmlName: "Blob", - type: { - name: "Composite", - className: "FilterBlobItem", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - type: { - name: "String" - } - }, - tags: { - serializedName: "Tags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags" - } - } - } - } - }; - exports2.BlobTags = { - serializedName: "BlobTags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags", - modelProperties: { - blobTagSet: { - serializedName: "BlobTagSet", - required: true, - xmlName: "TagSet", - xmlIsWrapped: true, - xmlElementName: "Tag", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobTag" - } - } - } - } - } - } - }; - exports2.BlobTag = { - serializedName: "BlobTag", - xmlName: "Tag", - type: { - name: "Composite", - className: "BlobTag", - modelProperties: { - key: { - serializedName: "Key", - required: true, - xmlName: "Key", - type: { - name: "String" - } - }, - value: { - serializedName: "Value", - required: true, - xmlName: "Value", - type: { - name: "String" - } - } - } - } - }; - exports2.SignedIdentifier = { - serializedName: "SignedIdentifier", - xmlName: "SignedIdentifier", - type: { - name: "Composite", - className: "SignedIdentifier", - modelProperties: { - id: { - serializedName: "Id", - required: true, - xmlName: "Id", - type: { - name: "String" - } - }, - accessPolicy: { - serializedName: "AccessPolicy", - xmlName: "AccessPolicy", - type: { - name: "Composite", - className: "AccessPolicy" - } - } - } - } - }; - exports2.AccessPolicy = { - serializedName: "AccessPolicy", - type: { - name: "Composite", - className: "AccessPolicy", - modelProperties: { - startsOn: { - serializedName: "Start", - xmlName: "Start", - type: { - name: "String" - } - }, - expiresOn: { - serializedName: "Expiry", - xmlName: "Expiry", - type: { - name: "String" - } - }, - permissions: { - serializedName: "Permission", - xmlName: "Permission", - type: { - name: "String" - } - } - } - } - }; - exports2.ListBlobsFlatSegmentResponse = { - serializedName: "ListBlobsFlatSegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListBlobsFlatSegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", - type: { - name: "String" - } - }, - marker: { - serializedName: "Marker", - xmlName: "Marker", - type: { - name: "String" - } - }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", - type: { - name: "Number" - } - }, - segment: { - serializedName: "Segment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobFlatListSegment" - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } - }; - exports2.BlobFlatListSegment = { - serializedName: "BlobFlatListSegment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobFlatListSegment", - modelProperties: { - blobItems: { - serializedName: "BlobItems", - required: true, - xmlName: "BlobItems", - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } - }; - exports2.BlobItemInternal = { - serializedName: "BlobItemInternal", - xmlName: "Blob", - type: { - name: "Composite", - className: "BlobItemInternal", - modelProperties: { - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "Composite", - className: "BlobName" - } - }, - deleted: { - serializedName: "Deleted", - required: true, - xmlName: "Deleted", - type: { - name: "Boolean" - } - }, - snapshot: { - serializedName: "Snapshot", - required: true, - xmlName: "Snapshot", - type: { - name: "String" - } - }, - versionId: { - serializedName: "VersionId", - xmlName: "VersionId", - type: { - name: "String" - } - }, - isCurrentVersion: { - serializedName: "IsCurrentVersion", - xmlName: "IsCurrentVersion", - type: { - name: "Boolean" - } - }, - properties: { - serializedName: "Properties", - xmlName: "Properties", - type: { - name: "Composite", - className: "BlobPropertiesInternal" - } - }, - metadata: { - serializedName: "Metadata", - xmlName: "Metadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - blobTags: { - serializedName: "BlobTags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags" - } - }, - objectReplicationMetadata: { - serializedName: "ObjectReplicationMetadata", - xmlName: "OrMetadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - hasVersionsOnly: { - serializedName: "HasVersionsOnly", - xmlName: "HasVersionsOnly", - type: { - name: "Boolean" - } - } - } - } - }; - exports2.BlobName = { - serializedName: "BlobName", - type: { - name: "Composite", - className: "BlobName", - modelProperties: { - encoded: { - serializedName: "Encoded", - xmlName: "Encoded", - xmlIsAttribute: true, - type: { - name: "Boolean" - } - }, - content: { - serializedName: "content", - xmlName: "content", - xmlIsMsText: true, - type: { - name: "String" - } - } - } - } - }; - exports2.BlobPropertiesInternal = { - serializedName: "BlobPropertiesInternal", - xmlName: "Properties", - type: { - name: "Composite", - className: "BlobPropertiesInternal", - modelProperties: { - createdOn: { - serializedName: "Creation-Time", - xmlName: "Creation-Time", - type: { - name: "DateTimeRfc1123" - } - }, - lastModified: { - serializedName: "Last-Modified", - required: true, - xmlName: "Last-Modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "Etag", - required: true, - xmlName: "Etag", - type: { - name: "String" - } - }, - contentLength: { - serializedName: "Content-Length", - xmlName: "Content-Length", - type: { - name: "Number" - } - }, - contentType: { - serializedName: "Content-Type", - xmlName: "Content-Type", - type: { - name: "String" - } - }, - contentEncoding: { - serializedName: "Content-Encoding", - xmlName: "Content-Encoding", - type: { - name: "String" - } - }, - contentLanguage: { - serializedName: "Content-Language", - xmlName: "Content-Language", - type: { - name: "String" - } - }, - contentMD5: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", - type: { - name: "ByteArray" - } - }, - contentDisposition: { - serializedName: "Content-Disposition", - xmlName: "Content-Disposition", - type: { - name: "String" - } - }, - cacheControl: { - serializedName: "Cache-Control", - xmlName: "Cache-Control", - type: { - name: "String" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - blobType: { - serializedName: "BlobType", - xmlName: "BlobType", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - leaseStatus: { - serializedName: "LeaseStatus", - xmlName: "LeaseStatus", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - leaseState: { - serializedName: "LeaseState", - xmlName: "LeaseState", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseDuration: { - serializedName: "LeaseDuration", - xmlName: "LeaseDuration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - copyId: { - serializedName: "CopyId", - xmlName: "CopyId", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "CopyStatus", - xmlName: "CopyStatus", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - copySource: { - serializedName: "CopySource", - xmlName: "CopySource", - type: { - name: "String" - } - }, - copyProgress: { - serializedName: "CopyProgress", - xmlName: "CopyProgress", - type: { - name: "String" - } - }, - copyCompletedOn: { - serializedName: "CopyCompletionTime", - xmlName: "CopyCompletionTime", - type: { - name: "DateTimeRfc1123" - } - }, - copyStatusDescription: { - serializedName: "CopyStatusDescription", - xmlName: "CopyStatusDescription", - type: { - name: "String" - } - }, - serverEncrypted: { - serializedName: "ServerEncrypted", - xmlName: "ServerEncrypted", - type: { - name: "Boolean" - } - }, - incrementalCopy: { - serializedName: "IncrementalCopy", - xmlName: "IncrementalCopy", - type: { - name: "Boolean" - } - }, - destinationSnapshot: { - serializedName: "DestinationSnapshot", - xmlName: "DestinationSnapshot", - type: { - name: "String" - } - }, - deletedOn: { - serializedName: "DeletedTime", - xmlName: "DeletedTime", - type: { - name: "DateTimeRfc1123" - } - }, - remainingRetentionDays: { - serializedName: "RemainingRetentionDays", - xmlName: "RemainingRetentionDays", - type: { - name: "Number" - } - }, - accessTier: { - serializedName: "AccessTier", - xmlName: "AccessTier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } - }, - accessTierInferred: { - serializedName: "AccessTierInferred", - xmlName: "AccessTierInferred", - type: { - name: "Boolean" - } - }, - archiveStatus: { - serializedName: "ArchiveStatus", - xmlName: "ArchiveStatus", - type: { - name: "Enum", - allowedValues: [ - "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", - "rehydrate-pending-to-cold" - ] - } - }, - customerProvidedKeySha256: { - serializedName: "CustomerProvidedKeySha256", - xmlName: "CustomerProvidedKeySha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "EncryptionScope", - xmlName: "EncryptionScope", - type: { - name: "String" - } - }, - accessTierChangedOn: { - serializedName: "AccessTierChangeTime", - xmlName: "AccessTierChangeTime", - type: { - name: "DateTimeRfc1123" - } - }, - tagCount: { - serializedName: "TagCount", - xmlName: "TagCount", - type: { - name: "Number" - } - }, - expiresOn: { - serializedName: "Expiry-Time", - xmlName: "Expiry-Time", - type: { - name: "DateTimeRfc1123" - } - }, - isSealed: { - serializedName: "Sealed", - xmlName: "Sealed", - type: { - name: "Boolean" - } - }, - rehydratePriority: { - serializedName: "RehydratePriority", - xmlName: "RehydratePriority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] - } - }, - lastAccessedOn: { - serializedName: "LastAccessTime", - xmlName: "LastAccessTime", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyExpiresOn: { - serializedName: "ImmutabilityPolicyUntilDate", - xmlName: "ImmutabilityPolicyUntilDate", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyMode: { - serializedName: "ImmutabilityPolicyMode", - xmlName: "ImmutabilityPolicyMode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - }, - legalHold: { - serializedName: "LegalHold", - xmlName: "LegalHold", - type: { - name: "Boolean" - } - } - } - } - }; - exports2.ListBlobsHierarchySegmentResponse = { - serializedName: "ListBlobsHierarchySegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListBlobsHierarchySegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", - type: { - name: "String" - } - }, - marker: { - serializedName: "Marker", - xmlName: "Marker", - type: { - name: "String" - } - }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", - type: { - name: "Number" - } - }, - delimiter: { - serializedName: "Delimiter", - xmlName: "Delimiter", - type: { - name: "String" - } - }, - segment: { - serializedName: "Segment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobHierarchyListSegment" - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } - }; - exports2.BlobHierarchyListSegment = { - serializedName: "BlobHierarchyListSegment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobHierarchyListSegment", - modelProperties: { - blobPrefixes: { - serializedName: "BlobPrefixes", - xmlName: "BlobPrefixes", - xmlElementName: "BlobPrefix", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobPrefix" - } - } - } - }, - blobItems: { - serializedName: "BlobItems", - required: true, - xmlName: "BlobItems", - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } - }; - exports2.BlobPrefix = { - serializedName: "BlobPrefix", - type: { - name: "Composite", - className: "BlobPrefix", - modelProperties: { - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "Composite", - className: "BlobName" - } - } - } - } - }; - exports2.BlockLookupList = { - serializedName: "BlockLookupList", - xmlName: "BlockList", - type: { - name: "Composite", - className: "BlockLookupList", - modelProperties: { - committed: { - serializedName: "Committed", - xmlName: "Committed", - xmlElementName: "Committed", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - uncommitted: { - serializedName: "Uncommitted", - xmlName: "Uncommitted", - xmlElementName: "Uncommitted", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - latest: { - serializedName: "Latest", - xmlName: "Latest", - xmlElementName: "Latest", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - } - } - } - }; - exports2.BlockList = { - serializedName: "BlockList", - type: { - name: "Composite", - className: "BlockList", - modelProperties: { - committedBlocks: { - serializedName: "CommittedBlocks", - xmlName: "CommittedBlocks", - xmlIsWrapped: true, - xmlElementName: "Block", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Block" - } - } - } - }, - uncommittedBlocks: { - serializedName: "UncommittedBlocks", - xmlName: "UncommittedBlocks", - xmlIsWrapped: true, - xmlElementName: "Block", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Block" - } - } - } - } - } - } - }; - exports2.Block = { - serializedName: "Block", - type: { - name: "Composite", - className: "Block", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", - type: { - name: "String" - } - }, - size: { - serializedName: "Size", - required: true, - xmlName: "Size", - type: { - name: "Number" - } - } - } - } - }; - exports2.PageList = { - serializedName: "PageList", - type: { - name: "Composite", - className: "PageList", - modelProperties: { - pageRange: { - serializedName: "PageRange", - xmlName: "PageRange", - xmlElementName: "PageRange", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "PageRange" - } - } - } - }, - clearRange: { - serializedName: "ClearRange", - xmlName: "ClearRange", - xmlElementName: "ClearRange", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ClearRange" - } - } - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } - }; - exports2.PageRange = { - serializedName: "PageRange", - xmlName: "PageRange", - type: { - name: "Composite", - className: "PageRange", - modelProperties: { - start: { - serializedName: "Start", - required: true, - xmlName: "Start", - type: { - name: "Number" - } - }, - end: { - serializedName: "End", - required: true, - xmlName: "End", - type: { - name: "Number" - } - } - } - } - }; - exports2.ClearRange = { - serializedName: "ClearRange", - xmlName: "ClearRange", - type: { - name: "Composite", - className: "ClearRange", - modelProperties: { - start: { - serializedName: "Start", - required: true, - xmlName: "Start", - type: { - name: "Number" - } - }, - end: { - serializedName: "End", - required: true, - xmlName: "End", - type: { - name: "Number" - } - } - } - } - }; - exports2.QueryRequest = { - serializedName: "QueryRequest", - xmlName: "QueryRequest", - type: { - name: "Composite", - className: "QueryRequest", - modelProperties: { - queryType: { - serializedName: "QueryType", - required: true, - xmlName: "QueryType", - type: { - name: "String" - } - }, - expression: { - serializedName: "Expression", - required: true, - xmlName: "Expression", - type: { - name: "String" - } - }, - inputSerialization: { - serializedName: "InputSerialization", - xmlName: "InputSerialization", - type: { - name: "Composite", - className: "QuerySerialization" - } - }, - outputSerialization: { - serializedName: "OutputSerialization", - xmlName: "OutputSerialization", - type: { - name: "Composite", - className: "QuerySerialization" - } - } - } - } - }; - exports2.QuerySerialization = { - serializedName: "QuerySerialization", - type: { - name: "Composite", - className: "QuerySerialization", - modelProperties: { - format: { - serializedName: "Format", - xmlName: "Format", - type: { - name: "Composite", - className: "QueryFormat" - } - } - } - } - }; - exports2.QueryFormat = { - serializedName: "QueryFormat", - type: { - name: "Composite", - className: "QueryFormat", - modelProperties: { - type: { - serializedName: "Type", - required: true, - xmlName: "Type", - type: { - name: "Enum", - allowedValues: ["delimited", "json", "arrow", "parquet"] - } - }, - delimitedTextConfiguration: { - serializedName: "DelimitedTextConfiguration", - xmlName: "DelimitedTextConfiguration", - type: { - name: "Composite", - className: "DelimitedTextConfiguration" - } - }, - jsonTextConfiguration: { - serializedName: "JsonTextConfiguration", - xmlName: "JsonTextConfiguration", - type: { - name: "Composite", - className: "JsonTextConfiguration" - } - }, - arrowConfiguration: { - serializedName: "ArrowConfiguration", - xmlName: "ArrowConfiguration", - type: { - name: "Composite", - className: "ArrowConfiguration" - } - }, - parquetTextConfiguration: { - serializedName: "ParquetTextConfiguration", - xmlName: "ParquetTextConfiguration", - type: { - name: "Dictionary", - value: { type: { name: "any" } } - } - } - } - } - }; - exports2.DelimitedTextConfiguration = { - serializedName: "DelimitedTextConfiguration", - xmlName: "DelimitedTextConfiguration", - type: { - name: "Composite", - className: "DelimitedTextConfiguration", - modelProperties: { - columnSeparator: { - serializedName: "ColumnSeparator", - xmlName: "ColumnSeparator", - type: { - name: "String" - } - }, - fieldQuote: { - serializedName: "FieldQuote", - xmlName: "FieldQuote", - type: { - name: "String" - } - }, - recordSeparator: { - serializedName: "RecordSeparator", - xmlName: "RecordSeparator", - type: { - name: "String" - } - }, - escapeChar: { - serializedName: "EscapeChar", - xmlName: "EscapeChar", - type: { - name: "String" - } - }, - headersPresent: { - serializedName: "HeadersPresent", - xmlName: "HasHeaders", - type: { - name: "Boolean" - } - } - } - } - }; - exports2.JsonTextConfiguration = { - serializedName: "JsonTextConfiguration", - xmlName: "JsonTextConfiguration", - type: { - name: "Composite", - className: "JsonTextConfiguration", - modelProperties: { - recordSeparator: { - serializedName: "RecordSeparator", - xmlName: "RecordSeparator", - type: { - name: "String" - } - } - } - } - }; - exports2.ArrowConfiguration = { - serializedName: "ArrowConfiguration", - xmlName: "ArrowConfiguration", - type: { - name: "Composite", - className: "ArrowConfiguration", - modelProperties: { - schema: { - serializedName: "Schema", - required: true, - xmlName: "Schema", - xmlIsWrapped: true, - xmlElementName: "Field", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ArrowField" - } - } - } - } + exports2.CredentialPolicy = CredentialPolicy; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js +var require_SharedKeyComparator = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.compareHeader = compareHeader; + var table_lv0 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1820, + 0, + 1823, + 1825, + 1827, + 1829, + 0, + 0, + 0, + 1837, + 2051, + 0, + 0, + 1843, + 0, + 3331, + 3354, + 3356, + 3358, + 3360, + 3362, + 3364, + 3366, + 3368, + 3370, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 0, + 0, + 1859, + 1860, + 1864, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 1868, + 0, + 1872, + 0 + ]); + var table_lv2 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + var table_lv4 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 32786, + 0, + 0, + 0, + 0, + 0, + 33298, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; + } + function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; } - } - }; - exports2.ArrowField = { - serializedName: "ArrowField", - xmlName: "Field", - type: { - name: "Composite", - className: "ArrowField", - modelProperties: { - type: { - serializedName: "Type", - required: true, - xmlName: "Type", - type: { - name: "String" - } - }, - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "String" - } - }, - precision: { - serializedName: "Precision", - xmlName: "Precision", - type: { - name: "Number" - } - }, - scale: { - serializedName: "Scale", - xmlName: "Scale", - type: { - name: "Number" - } - } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; + if (weight1 === 1 && weight2 === 1) { + i = 0; + j = 0; + ++curr_level; + } else if (weight1 === weight2) { + ++i; + ++j; + } else if (weight1 === 0) { + ++i; + } else if (weight2 === 0) { + ++j; + } else { + return weight1 < weight2; } } - }; - exports2.ServiceSetPropertiesHeaders = { - serializedName: "Service_setPropertiesHeaders", - type: { - name: "Composite", - className: "ServiceSetPropertiesHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + return false; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js +var require_StorageSharedKeyCredentialPolicy = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredentialPolicy = void 0; + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + var CredentialPolicy_js_1 = require_CredentialPolicy(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator(); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; } - }; - exports2.ServiceSetPropertiesExceptionHeaders = { - serializedName: "Service_setPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "ServiceSetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Signs request. + * + * @param request - + */ + signRequest(request2) { + request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request2.body && (typeof request2.body === "string" || request2.body !== void 0) && request2.body.length > 0) { + request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); } + const stringToSign = [ + request2.method.toUpperCase(), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request2) + this.getCanonicalizedResourceString(request2); + const signature = this.factory.computeHMACSHA256(stringToSign); + request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + return request2; } - }; - exports2.ServiceGetPropertiesHeaders = { - serializedName: "Service_getPropertiesHeaders", - type: { - name: "Composite", - className: "ServiceGetPropertiesHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Retrieve header value according to shared key sign rules. + * @see https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request2, headerName) { + const value = request2.headers.get(headerName); + if (!value) { + return ""; } - } - }; - exports2.ServiceGetPropertiesExceptionHeaders = { - serializedName: "Service_getPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } - }; - exports2.ServiceGetStatisticsHeaders = { - serializedName: "Service_getStatisticsHeaders", - type: { - name: "Composite", - className: "ServiceGetStatisticsHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request2) { + let headersArray = request2.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; } - } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; } - }; - exports2.ServiceGetStatisticsExceptionHeaders = { - serializedName: "Service_getStatisticsExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetStatisticsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request2) { + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path13}`; + const queries = (0, utils_common_js_1.getURLQueries)(request2.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } - } - } - }; - exports2.ServiceListContainersSegmentHeaders = { - serializedName: "Service_listContainersSegmentHeaders", - type: { - name: "Composite", - className: "ServiceListContainersSegmentHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } + return canonicalizedResourceString; } }; - exports2.ServiceListContainersSegmentExceptionHeaders = { - serializedName: "Service_listContainersSegmentExceptionHeaders", - type: { - name: "Composite", - className: "ServiceListContainersSegmentExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js +var require_Credential = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Credential = void 0; + var Credential = class { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); } }; - exports2.ServiceGetUserDelegationKeyHeaders = { - serializedName: "Service_getUserDelegationKeyHeaders", - type: { - name: "Composite", - className: "ServiceGetUserDelegationKeyHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.Credential = Credential; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js +var require_StorageSharedKeyCredential = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy(); + var Credential_js_1 = require_Credential(); + var StorageSharedKeyCredential = class extends Credential_js_1.Credential { + /** + * Azure Storage account name; readonly. + */ + accountName; + /** + * Azure Storage account key; readonly. + */ + accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); } - }; - exports2.ServiceGetUserDelegationKeyExceptionHeaders = { - serializedName: "Service_getUserDelegationKeyExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetUserDelegationKeyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; - exports2.ServiceGetAccountInfoHeaders = { - serializedName: "Service_getAccountInfoHeaders", - type: { - name: "Composite", - className: "ServiceGetAccountInfoHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", - type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } - }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", - type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] - } - }, - isHierarchicalNamespaceEnabled: { - serializedName: "x-ms-is-hns-enabled", - xmlName: "x-ms-is-hns-enabled", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js +var require_AnonymousCredentialPolicy = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredentialPolicy = void 0; + var CredentialPolicy_js_1 = require_CredentialPolicy(); + var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } }; - exports2.ServiceGetAccountInfoExceptionHeaders = { - serializedName: "Service_getAccountInfoExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetAccountInfoExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js +var require_AnonymousCredential = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredential = void 0; + var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy(); + var Credential_js_1 = require_Credential(); + var AnonymousCredential = class extends Credential_js_1.Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); } }; - exports2.ServiceSubmitBatchHeaders = { - serializedName: "Service_submitBatchHeaders", - type: { - name: "Composite", - className: "ServiceSubmitBatchHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.AnonymousCredential = AnonymousCredential; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js +var require_BuffersStream = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BuffersStream = void 0; + var node_stream_1 = require("node:stream"); + var BuffersStream = class extends node_stream_1.Readable { + buffers; + byteLength; + /** + * The offset of data to be read in the current buffer. + */ + byteOffsetInCurrentBuffer; + /** + * The index of buffer to be read in the array of buffers. + */ + bufferIndex; + /** + * The total length of data already read. + */ + pushedBytesLength; + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; } - } - }; - exports2.ServiceSubmitBatchExceptionHeaders = { - serializedName: "Service_submitBatchExceptionHeaders", - type: { - name: "Composite", - className: "ServiceSubmitBatchExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); } } - }; - exports2.ServiceFilterBlobsHeaders = { - serializedName: "Service_filterBlobsHeaders", - type: { - name: "Composite", - className: "ServiceFilterBlobsHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } else { + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } else { + this.byteOffsetInCurrentBuffer = end; } + this.pushedBytesLength += remaining; + i += remaining; } } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } } }; - exports2.ServiceFilterBlobsExceptionHeaders = { - serializedName: "Service_filterBlobsExceptionHeaders", - type: { - name: "Composite", - className: "ServiceFilterBlobsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + exports2.BuffersStream = BuffersStream; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js +var require_PooledBuffer = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PooledBuffer = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var BuffersStream_js_1 = require_BuffersStream(); + var node_buffer_1 = tslib_1.__importDefault(require("node:buffer")); + var maxBufferLength = node_buffer_1.default.constants.MAX_LENGTH; + var PooledBuffer = class { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + buffers = []; + /** + * The total size of internal buffers. + */ + capacity; + /** + * The total size of data contained in internal buffers. + */ + _size; + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + constructor(capacity, buffers, totalLength) { + this.capacity = capacity; + this._size = 0; + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); } } - }; - exports2.ContainerCreateHeaders = { - serializedName: "Container_createHeaders", - type: { - name: "Composite", - className: "ContainerCreateHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; } } + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream_js_1.BuffersStream(this.buffers, this.size); } }; - exports2.ContainerCreateExceptionHeaders = { - serializedName: "Container_createExceptionHeaders", - type: { - name: "Composite", - className: "ContainerCreateExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.PooledBuffer = PooledBuffer; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js +var require_BufferScheduler = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BufferScheduler = void 0; + var events_1 = require("events"); + var PooledBuffer_js_1 = require_PooledBuffer(); + var BufferScheduler = class { + /** + * Size of buffers in incoming and outgoing queues. This class will try to align + * data read from Readable stream into buffer chunks with bufferSize defined. + */ + bufferSize; + /** + * How many buffers can be created or maintained. + */ + maxBuffers; + /** + * A Node.js Readable stream. + */ + readable; + /** + * OutgoingHandler is an async function triggered by BufferScheduler when there + * are available buffers in outgoing array. + */ + outgoingHandler; + /** + * An internal event emitter. + */ + emitter = new events_1.EventEmitter(); + /** + * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers) + */ + concurrency; + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + offset = 0; + /** + * An internal marker to track whether stream is end. + */ + isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + isError = false; + /** + * How many handlers are executing. + */ + executingOutgoingHandlers = 0; + /** + * Encoding of the input Readable stream which has string data type instead of Buffer. + */ + encoding; + /** + * How many buffers have been allocated. + */ + numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + outgoing = []; + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; } - }; - exports2.ContainerGetPropertiesHeaders = { - serializedName: "Container_getPropertiesHeaders", - type: { - name: "Composite", - className: "ContainerGetPropertiesHeaders", - modelProperties: { - metadata: { - serializedName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobPublicAccess: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, - hasImmutabilityPolicy: { - serializedName: "x-ms-has-immutability-policy", - xmlName: "x-ms-has-immutability-policy", - type: { - name: "Boolean" - } - }, - hasLegalHold: { - serializedName: "x-ms-has-legal-hold", - xmlName: "x-ms-has-legal-hold", - type: { - name: "Boolean" - } - }, - defaultEncryptionScope: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", - type: { - name: "String" - } - }, - denyEncryptionScopeOverride: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", - type: { - name: "Boolean" + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve6, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); } - }, - isImmutableStorageWithVersioningEnabled: { - serializedName: "x-ms-immutable-storage-with-versioning-enabled", - xmlName: "x-ms-immutable-storage-with-versioning-enabled", - type: { - name: "Boolean" + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve6).catch(reject); + } else if (this.unresolvedLength >= this.bufferSize) { + return; + } else { + resolve6(); + } } - } - } + }); + }); } - }; - exports2.ContainerGetPropertiesExceptionHeaders = { - serializedName: "Container_getPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "ContainerGetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer_js_1.PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); } + this.unresolvedLength -= buffer.size; + return buffer; } - }; - exports2.ContainerDeleteHeaders = { - serializedName: "Container_deleteHeaders", - type: { - name: "Composite", - className: "ContainerDeleteHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } else { + return false; } } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); } + return true; } - }; - exports2.ContainerDeleteExceptionHeaders = { - serializedName: "Container_deleteExceptionHeaders", - type: { - name: "Composite", - className: "ContainerDeleteExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } catch (err) { + this.emitter.emit("error", err); + return; } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); } - }; - exports2.ContainerSetMetadataHeaders = { - serializedName: "Container_setMetadataHeaders", - type: { - name: "Composite", - className: "ContainerSetMetadataHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); } } }; - exports2.ContainerSetMetadataExceptionHeaders = { - serializedName: "Container_setMetadataExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSetMetadataExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.BufferScheduler = BufferScheduler; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/cache.js +var require_cache3 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/cache.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCachedDefaultHttpClient = getCachedDefaultHttpClient; + var core_rest_pipeline_1 = require_commonjs6(); + var _defaultHttpClient; + function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); + } + return _defaultHttpClient; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js +var require_RequestPolicy2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = void 0; + var BaseRequestPolicy = class { + _nextPolicy; + _options; + /** + * The main method to implement that manipulates a request/response. + */ + constructor(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); } }; - exports2.ContainerGetAccessPolicyHeaders = { - serializedName: "Container_getAccessPolicyHeaders", - type: { - name: "Composite", - className: "ContainerGetAccessPolicyHeaders", - modelProperties: { - blobPublicAccess: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.BaseRequestPolicy = BaseRequestPolicy; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/utils/constants.js +var require_constants16 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/utils/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PathStylePorts = exports2.DevelopmentConnectionString = exports2.HeaderConstants = exports2.URLConstants = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "1.0.0"; + exports2.URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" } }; - exports2.ContainerGetAccessPolicyExceptionHeaders = { - serializedName: "Container_getAccessPolicyExceptionHeaders", - type: { - name: "Composite", - className: "ContainerGetAccessPolicyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + exports2.HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" + }; + exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + exports2.PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104" + ]; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js +var require_utils_common2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.escapeURLPath = escapeURLPath; + exports2.getValueInConnString = getValueInConnString; + exports2.extractConnectionStringParts = extractConnectionStringParts; + exports2.appendToURLPath = appendToURLPath; + exports2.setURLParameter = setURLParameter; + exports2.getURLParameter = getURLParameter; + exports2.setURLHost = setURLHost; + exports2.getURLPath = getURLPath; + exports2.getURLScheme = getURLScheme; + exports2.getURLPathAndQuery = getURLPathAndQuery; + exports2.getURLQueries = getURLQueries; + exports2.appendToURLQuery = appendToURLQuery; + exports2.truncatedISO8061Date = truncatedISO8061Date; + exports2.base64encode = base64encode; + exports2.base64decode = base64decode; + exports2.generateBlockID = generateBlockID; + exports2.delay = delay2; + exports2.padStart = padStart2; + exports2.sanitizeURL = sanitizeURL; + exports2.sanitizeHeaders = sanitizeHeaders; + exports2.iEqual = iEqual; + exports2.getAccountNameFromUrl = getAccountNameFromUrl; + exports2.isIpEndpointStyle = isIpEndpointStyle; + exports2.attachCredential = attachCredential; + exports2.httpAuthorizationToString = httpAuthorizationToString; + exports2.EscapePath = EscapePath; + exports2.assertResponse = assertResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants16(); + function escapeURLPath(url2) { + const urlParsed = new URL(url2); + let path13 = urlParsed.pathname; + path13 = path13 || "/"; + path13 = escape(path13); + urlParsed.pathname = path13; + return urlParsed.toString(); + } + function getProxyUriFromDevConnString(connectionString) { + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; } } } - }; - exports2.ContainerSetAccessPolicyHeaders = { - serializedName: "Container_setAccessPolicyHeaders", - type: { - name: "Composite", - className: "ContainerSetAccessPolicyHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + return proxyUri; + } + function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; } } - }; - exports2.ContainerSetAccessPolicyExceptionHeaders = { - serializedName: "Container_setAccessPolicyExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSetAccessPolicyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + return ""; + } + function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = constants_js_1.DevelopmentConnectionString; } - }; - exports2.ContainerRestoreHeaders = { - serializedName: "Container_restoreHeaders", - type: { - name: "Composite", - className: "ContainerRestoreHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } - } - } - }; - exports2.ContainerRestoreExceptionHeaders = { - serializedName: "Container_restoreExceptionHeaders", - type: { - name: "Composite", - className: "ContainerRestoreExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } - } - }; - exports2.ContainerRenameHeaders = { - serializedName: "Container_renameHeaders", - type: { - name: "Composite", - className: "ContainerRenameHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); } - } - }; - exports2.ContainerRenameExceptionHeaders = { - serializedName: "Container_renameExceptionHeaders", - type: { - name: "Composite", - className: "ContainerRenameExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri + }; + } else { + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); } - } - }; - exports2.ContainerSubmitBatchHeaders = { - serializedName: "Container_submitBatchHeaders", - type: { - name: "Composite", - className: "ContainerSubmitBatchHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); } - } - }; - exports2.ContainerSubmitBatchExceptionHeaders = { - serializedName: "Container_submitBatchExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSubmitBatchExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } - }; - exports2.ContainerFilterBlobsHeaders = { - serializedName: "Container_filterBlobsHeaders", - type: { - name: "Composite", - className: "ContainerFilterBlobsHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } + } + function escape(text) { + return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); + } + function appendToURLPath(url2, name) { + const urlParsed = new URL(url2); + let path13 = urlParsed.pathname; + path13 = path13 ? path13.endsWith("/") ? `${path13}${name}` : `${path13}/${name}` : name; + urlParsed.pathname = path13; + return urlParsed.toString(); + } + function setURLParameter(url2, name, value) { + const urlParsed = new URL(url2); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : void 0; + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); } } } - }; - exports2.ContainerFilterBlobsExceptionHeaders = { - serializedName: "Container_filterBlobsExceptionHeaders", - type: { - name: "Composite", - className: "ContainerFilterBlobsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); } - }; - exports2.ContainerAcquireLeaseHeaders = { - serializedName: "Container_acquireLeaseHeaders", - type: { - name: "Composite", - className: "ContainerAcquireLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); + } + function getURLParameter(url2, name) { + const urlParsed = new URL(url2); + return urlParsed.searchParams.get(name) ?? void 0; + } + function setURLHost(url2, host) { + const urlParsed = new URL(url2); + urlParsed.hostname = host; + return urlParsed.toString(); + } + function getURLPath(url2) { + try { + const urlParsed = new URL(url2); + return urlParsed.pathname; + } catch (e) { + return void 0; + } + } + function getURLScheme(url2) { + try { + const urlParsed = new URL(url2); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } catch (e) { + return void 0; + } + } + function getURLPathAndQuery(url2) { + const urlParsed = new URL(url2); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; + } + return `${pathString}${queryString}`; + } + function getURLQueries(url2) { + let queryString = new URL(url2).search; + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; + } + function appendToURLQuery(url2, queryParts) { + const urlParsed = new URL(url2); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); + } + function truncatedISO8061Date(date, withMilliseconds = true) { + const dateString = date.toISOString(); + return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; + } + function base64encode(content) { + return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); + } + function base64decode(encodedString) { + return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); + } + function generateBlockID(blockIDPrefix, blockIndex) { + const maxSourceStringLength = 48; + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + padStart2(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); + } + async function delay2(timeInMs, aborter, abortError) { + return new Promise((resolve6, reject) => { + let timeout; + const abortHandler = () => { + if (timeout !== void 0) { + clearTimeout(timeout); } - } - } - }; - exports2.ContainerAcquireLeaseExceptionHeaders = { - serializedName: "Container_acquireLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerAcquireLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== void 0) { + aborter.removeEventListener("abort", abortHandler); } + resolve6(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== void 0) { + aborter.addEventListener("abort", abortHandler); } + }); + } + function padStart2(currentString, targetLength, padString = " ") { + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); } - }; - exports2.ContainerReleaseLeaseHeaders = { - serializedName: "Container_releaseLeaseHeaders", - type: { - name: "Composite", - className: "ContainerReleaseLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); } + return padString.slice(0, targetLength) + currentString; } - }; - exports2.ContainerReleaseLeaseExceptionHeaders = { - serializedName: "Container_releaseLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerReleaseLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + } + function sanitizeURL(url2) { + let safeURL = url2; + if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); } - }; - exports2.ContainerRenewLeaseHeaders = { - serializedName: "Container_renewLeaseHeaders", - type: { - name: "Composite", - className: "ContainerRenewLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } + return safeURL; + } + function sanitizeHeaders(originalHeader) { + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); + } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); + } else { + headers.set(name, value); } } - }; - exports2.ContainerRenewLeaseExceptionHeaders = { - serializedName: "Container_renewLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerRenewLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + return headers; + } + function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); + } + function getAccountNameFromUrl(url2) { + const parsedUrl = new URL(url2); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + accountName = parsedUrl.hostname.split(".")[0]; + } else if (isIpEndpointStyle(parsedUrl)) { + accountName = parsedUrl.pathname.split("/")[1]; + } else { + accountName = ""; } + return accountName; + } catch (error3) { + throw new Error("Unable to extract accountName with provided information."); } - }; - exports2.ContainerBreakLeaseHeaders = { - serializedName: "Container_breakLeaseHeaders", - type: { - name: "Composite", - className: "ContainerBreakLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseTime: { - serializedName: "x-ms-lease-time", - xmlName: "x-ms-lease-time", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } + } + function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); + } + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; + } + function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; + } + function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); } - }; - exports2.ContainerBreakLeaseExceptionHeaders = { - serializedName: "Container_breakLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerBreakLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + return split.join("/"); + } + function assertResponse(response) { + if (`_response` in response) { + return response; } - }; - exports2.ContainerChangeLeaseHeaders = { - serializedName: "Container_changeLeaseHeaders", - type: { - name: "Composite", - className: "ContainerChangeLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } + throw new TypeError(`Unexpected response object ${response}`); + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js +var require_StorageBrowserPolicy = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy2(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants16(); + var utils_common_js_1 = require_utils_common2(); + var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request2) { + if (core_util_1.isNodeLike) { + return this._nextPolicy.sendRequest(request2); } + if (request2.method.toUpperCase() === "GET" || request2.method.toUpperCase() === "HEAD") { + request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + } + request2.headers.remove(constants_js_1.HeaderConstants.COOKIE); + request2.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request2); } }; - exports2.ContainerChangeLeaseExceptionHeaders = { - serializedName: "Container_changeLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerChangeLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js +var require_StorageBrowserPolicyFactory = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; + var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy(); + Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { + return StorageBrowserPolicy_js_1.StorageBrowserPolicy; + } }); + var StorageBrowserPolicyFactory = class { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); } }; - exports2.ContainerListBlobFlatSegmentHeaders = { - serializedName: "Container_listBlobFlatSegmentHeaders", - type: { - name: "Composite", - className: "ContainerListBlobFlatSegmentHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js +var require_CredentialPolicy2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CredentialPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy2(); + var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request2) { + return this._nextPolicy.sendRequest(this.signRequest(request2)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request2) { + return request2; } }; - exports2.ContainerListBlobFlatSegmentExceptionHeaders = { - serializedName: "Container_listBlobFlatSegmentExceptionHeaders", - type: { - name: "Composite", - className: "ContainerListBlobFlatSegmentExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.CredentialPolicy = CredentialPolicy; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js +var require_AnonymousCredentialPolicy2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredentialPolicy = void 0; + var CredentialPolicy_js_1 = require_CredentialPolicy2(); + var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } }; - exports2.ContainerListBlobHierarchySegmentHeaders = { - serializedName: "Container_listBlobHierarchySegmentHeaders", - type: { - name: "Composite", - className: "ContainerListBlobHierarchySegmentHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js +var require_Credential2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Credential = void 0; + var Credential = class { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); } }; - exports2.ContainerListBlobHierarchySegmentExceptionHeaders = { - serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", - type: { - name: "Composite", - className: "ContainerListBlobHierarchySegmentExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.Credential = Credential; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js +var require_AnonymousCredential2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredential = void 0; + var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy2(); + var Credential_js_1 = require_Credential2(); + var AnonymousCredential = class extends Credential_js_1.Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); + } + }; + exports2.AnonymousCredential = AnonymousCredential; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js +var require_SharedKeyComparator2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.compareHeader = compareHeader; + var table_lv0 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1820, + 0, + 1823, + 1825, + 1827, + 1829, + 0, + 0, + 0, + 1837, + 2051, + 0, + 0, + 1843, + 0, + 3331, + 3354, + 3356, + 3358, + 3360, + 3362, + 3364, + 3366, + 3368, + 3370, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 0, + 0, + 1859, + 1860, + 1864, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 1868, + 0, + 1872, + 0 + ]); + var table_lv2 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + var table_lv4 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 32786, + 0, + 0, + 0, + 0, + 0, + 33298, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; + } + function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; } - } - }; - exports2.ContainerGetAccountInfoHeaders = { - serializedName: "Container_getAccountInfoHeaders", - type: { - name: "Composite", - className: "ContainerGetAccountInfoHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", - type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } - }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", - type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] - } - }, - isHierarchicalNamespaceEnabled: { - serializedName: "x-ms-is-hns-enabled", - xmlName: "x-ms-is-hns-enabled", - type: { - name: "Boolean" - } - } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; + if (weight1 === 1 && weight2 === 1) { + i = 0; + j = 0; + ++curr_level; + } else if (weight1 === weight2) { + ++i; + ++j; + } else if (weight1 === 0) { + ++i; + } else if (weight2 === 0) { + ++j; + } else { + return weight1 < weight2; } } - }; - exports2.ContainerGetAccountInfoExceptionHeaders = { - serializedName: "Container_getAccountInfoExceptionHeaders", - type: { - name: "Composite", - className: "ContainerGetAccountInfoExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + return false; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js +var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredentialPolicy = void 0; + var constants_js_1 = require_constants16(); + var utils_common_js_1 = require_utils_common2(); + var CredentialPolicy_js_1 = require_CredentialPolicy2(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; } - }; - exports2.BlobDownloadHeaders = { - serializedName: "Blob_downloadHeaders", - type: { - name: "Composite", - className: "BlobDownloadHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - createdOn: { - serializedName: "x-ms-creation-time", - xmlName: "x-ms-creation-time", - type: { - name: "DateTimeRfc1123" - } - }, - metadata: { - serializedName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - objectReplicationPolicyId: { - serializedName: "x-ms-or-policy-id", - xmlName: "x-ms-or-policy-id", - type: { - name: "String" - } - }, - objectReplicationRules: { - serializedName: "x-ms-or", - headerCollectionPrefix: "x-ms-or-", - xmlName: "x-ms-or", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", - type: { - name: "Number" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - contentRange: { - serializedName: "content-range", - xmlName: "content-range", - type: { - name: "String" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", - type: { - name: "String" - } - }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", - type: { - name: "String" - } - }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", - type: { - name: "String" - } - }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", - type: { - name: "String" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - copyCompletedOn: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", - type: { - name: "DateTimeRfc1123" - } - }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", - type: { - name: "String" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", - type: { - name: "String" - } - }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - isCurrentVersion: { - serializedName: "x-ms-is-current-version", - xmlName: "x-ms-is-current-version", - type: { - name: "Boolean" - } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - blobContentMD5: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" - } - }, - tagCount: { - serializedName: "x-ms-tag-count", - xmlName: "x-ms-tag-count", - type: { - name: "Number" - } - }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", - type: { - name: "Boolean" - } - }, - lastAccessed: { - serializedName: "x-ms-last-access-time", - xmlName: "x-ms-last-access-time", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyExpiresOn: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - }, - contentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - } + /** + * Signs request. + * + * @param request - + */ + signRequest(request2) { + request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request2.body && (typeof request2.body === "string" || request2.body !== void 0) && request2.body.length > 0) { + request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); } + const stringToSign = [ + request2.method.toUpperCase(), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request2) + this.getCanonicalizedResourceString(request2); + const signature = this.factory.computeHMACSHA256(stringToSign); + request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + return request2; } - }; - exports2.BlobDownloadExceptionHeaders = { - serializedName: "Blob_downloadExceptionHeaders", - type: { - name: "Composite", - className: "BlobDownloadExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Retrieve header value according to shared key sign rules. + * @see https://learn.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request2, headerName) { + const value = request2.headers.get(headerName); + if (!value) { + return ""; + } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } - }; - exports2.BlobGetPropertiesHeaders = { - serializedName: "Blob_getPropertiesHeaders", - type: { - name: "Composite", - className: "BlobGetPropertiesHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - createdOn: { - serializedName: "x-ms-creation-time", - xmlName: "x-ms-creation-time", - type: { - name: "DateTimeRfc1123" - } - }, - metadata: { - serializedName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - objectReplicationPolicyId: { - serializedName: "x-ms-or-policy-id", - xmlName: "x-ms-or-policy-id", - type: { - name: "String" - } - }, - objectReplicationRules: { - serializedName: "x-ms-or", - headerCollectionPrefix: "x-ms-or-", - xmlName: "x-ms-or", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - copyCompletedOn: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", - type: { - name: "DateTimeRfc1123" - } - }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", - type: { - name: "String" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", - type: { - name: "String" - } - }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - isIncrementalCopy: { - serializedName: "x-ms-incremental-copy", - xmlName: "x-ms-incremental-copy", - type: { - name: "Boolean" - } - }, - destinationSnapshot: { - serializedName: "x-ms-copy-destination-snapshot", - xmlName: "x-ms-copy-destination-snapshot", - type: { - name: "String" - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", - type: { - name: "Number" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", - type: { - name: "String" - } - }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", - type: { - name: "String" - } - }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", - type: { - name: "String" - } - }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", - type: { - name: "String" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - accessTier: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", - type: { - name: "String" - } - }, - accessTierInferred: { - serializedName: "x-ms-access-tier-inferred", - xmlName: "x-ms-access-tier-inferred", - type: { - name: "Boolean" - } - }, - archiveStatus: { - serializedName: "x-ms-archive-status", - xmlName: "x-ms-archive-status", - type: { - name: "String" - } - }, - accessTierChangedOn: { - serializedName: "x-ms-access-tier-change-time", - xmlName: "x-ms-access-tier-change-time", - type: { - name: "DateTimeRfc1123" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - isCurrentVersion: { - serializedName: "x-ms-is-current-version", - xmlName: "x-ms-is-current-version", - type: { - name: "Boolean" - } - }, - tagCount: { - serializedName: "x-ms-tag-count", - xmlName: "x-ms-tag-count", - type: { - name: "Number" - } - }, - expiresOn: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", - type: { - name: "DateTimeRfc1123" - } - }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", - type: { - name: "Boolean" - } - }, - rehydratePriority: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] - } - }, - lastAccessed: { - serializedName: "x-ms-last-access-time", - xmlName: "x-ms-last-access-time", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyExpiresOn: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request2) { + let headersArray = request2.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request2) { + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path13}`; + const queries = (0, utils_common_js_1.getURLQueries)(request2.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } } + return canonicalizedResourceString; + } + }; + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js +var require_StorageSharedKeyCredential2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy2(); + var Credential_js_1 = require_Credential2(); + var StorageSharedKeyCredential = class extends Credential_js_1.Credential { + /** + * Azure Storage account name; readonly. + */ + accountName; + /** + * Azure Storage account key; readonly. + */ + accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; - exports2.BlobGetPropertiesExceptionHeaders = { - serializedName: "Blob_getPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "BlobGetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + } +}); + +// node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js +var require_AbortError4 = __commonJS({ + "node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AbortError = void 0; + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } + }; + exports2.AbortError = AbortError; + } +}); + +// node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/index.js +var require_commonjs12 = __commonJS({ + "node_modules/@azure/storage-common/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AbortError = void 0; + var AbortError_js_1 = require_AbortError4(); + Object.defineProperty(exports2, "AbortError", { enumerable: true, get: function() { + return AbortError_js_1.AbortError; + } }); + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/log.js +var require_log6 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/log.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("storage-common"); + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js +var require_StorageRetryPolicyType2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicyType; + (function(StorageRetryPolicyType2) { + StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; + })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js +var require_StorageRetryPolicy2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicy = void 0; + exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; + var abort_controller_1 = require_commonjs12(); + var RequestPolicy_js_1 = require_RequestPolicy2(); + var constants_js_1 = require_constants16(); + var utils_common_js_1 = require_utils_common2(); + var log_js_1 = require_log6(); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); + function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + } + }; + } + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * RetryOptions. + */ + retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request2) { + return this.attemptSendRequest(request2, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request2, secondaryHas404, attempt) { + const newRequest = request2.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request2.method === "GET" || request2.method === "HEAD" || request2.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); + } + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); + } + let response; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; } } + await this.delay(isPrimaryRetry, attempt, request2.abortSignal); + return this.attemptSendRequest(request2, secondaryHas404, ++attempt); } - }; - exports2.BlobDeleteHeaders = { - serializedName: "Blob_deleteHeaders", - type: { - name: "Composite", - className: "BlobDeleteHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; + } + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + } + } + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (response) { + if (response?.status >= 400) { + const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); + if (copySourceError !== void 0) { + switch (copySourceError) { + case "InternalError": + case "OperationTimedOut": + case "ServerBusy": + return true; + } } } } + if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; + } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } }; - exports2.BlobDeleteExceptionHeaders = { - serializedName: "Blob_deleteExceptionHeaders", - type: { - name: "Composite", - className: "BlobDeleteExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + exports2.StorageRetryPolicy = StorageRetryPolicy; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js +var require_StorageRetryPolicyFactory2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicy_js_1 = require_StorageRetryPolicy2(); + Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { + return StorageRetryPolicy_js_1.StorageRetryPolicy; + } }); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); + Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { + return StorageRetryPolicyType_js_1.StorageRetryPolicyType; + } }); + var StorageRetryPolicyFactory = class { + retryOptions; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } + }; + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js +var require_StorageBrowserPolicyV2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageBrowserPolicyName = void 0; + exports2.storageBrowserPolicy = storageBrowserPolicy; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants16(); + var utils_common_js_1 = require_utils_common2(); + exports2.storageBrowserPolicyName = "storageBrowserPolicy"; + function storageBrowserPolicy() { + return { + name: exports2.storageBrowserPolicyName, + async sendRequest(request2, next) { + if (core_util_1.isNodeLike) { + return next(request2); + } + if (request2.method === "GET" || request2.method === "HEAD") { + request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } + request2.headers.delete(constants_js_1.HeaderConstants.COOKIE); + request2.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return next(request2); + } + }; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js +var require_StorageCorrectContentLengthPolicy = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageCorrectContentLengthPolicyName = void 0; + exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; + var constants_js_1 = require_constants16(); + exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; + function storageCorrectContentLengthPolicy() { + function correctContentLength(request2) { + if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { + request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); } } + return { + name: exports2.storageCorrectContentLengthPolicyName, + async sendRequest(request2, next) { + correctContentLength(request2); + return next(request2); + } + }; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js +var require_StorageRetryPolicyV2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRetryPolicyName = void 0; + exports2.storageRetryPolicy = storageRetryPolicy; + var abort_controller_1 = require_commonjs12(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory2(); + var constants_js_1 = require_constants16(); + var utils_common_js_1 = require_utils_common2(); + var log_js_1 = require_log6(); + exports2.storageRetryPolicyName = "storageRetryPolicy"; + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy }; - exports2.BlobUndeleteHeaders = { - serializedName: "Blob_undeleteHeaders", - type: { - name: "Composite", - className: "BlobUndeleteHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" + var retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + ]; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + function storageRetryPolicy(options = {}) { + const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }) { + if (attempt >= maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error3) { + for (const retriableError of retriableErrors) { + if (error3.name.toUpperCase().includes(retriableError) || error3.message.toUpperCase().includes(retriableError) || error3.code && error3.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + } + if (error3?.code === "PARSE_ERROR" && error3?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + if (response || error3) { + const statusCode = response?.status ?? error3?.statusCode ?? 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (response) { + if (response?.status >= 400) { + const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); + if (copySourceError !== void 0) { + switch (copySourceError) { + case "InternalError": + case "OperationTimedOut": + case "ServerBusy": + return true; + } } } } + return false; } - }; - exports2.BlobUndeleteExceptionHeaders = { - serializedName: "Blob_undeleteExceptionHeaders", - type: { - name: "Composite", - className: "BlobUndeleteExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; } + } else { + delayTimeInMs = Math.random() * 1e3; } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; } - }; - exports2.BlobSetExpiryHeaders = { - serializedName: "Blob_setExpiryHeaders", - type: { - name: "Composite", - className: "BlobSetExpiryHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" + return { + name: exports2.storageRetryPolicyName, + async sendRequest(request2, next) { + if (tryTimeoutInMs) { + request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + } + const primaryUrl = request2.url; + const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request2.url, secondaryHost) : void 0; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error3; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request2.method) || attempt % 2 === 1; + request2.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = void 0; + error3 = void 0; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request2); + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (e) { + if ((0, core_rest_pipeline_1.isRestError)(e)) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error3 = e; + } else { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); + throw e; + } } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }); + if (retryAgain) { + await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request2.abortSignal, RETRY_ABORT_ERROR); } + attempt++; + } + if (response) { + return response; } + throw error3 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); + } + }; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js +var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageSharedKeyCredentialPolicyName = void 0; + exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; + var node_crypto_1 = require("node:crypto"); + var constants_js_1 = require_constants16(); + var utils_common_js_1 = require_utils_common2(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); + exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; + function storageSharedKeyCredentialPolicy(options) { + function signRequest(request2) { + request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { + request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); + } + const stringToSign = [ + request2.method.toUpperCase(), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + getCanonicalizedHeadersString(request2) + getCanonicalizedResourceString(request2); + const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + } + function getHeaderValueToSign(request2, headerName) { + const value = request2.headers.get(headerName); + if (!value) { + return ""; + } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } - }; - exports2.BlobSetExpiryExceptionHeaders = { - serializedName: "Blob_setExpiryExceptionHeaders", - type: { - name: "Composite", - className: "BlobSetExpiryExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + function getCanonicalizedHeadersString(request2) { + let headersArray = []; + for (const [name, value] of request2.headers) { + if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); } } + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; } - }; - exports2.BlobSetHttpHeadersHeaders = { - serializedName: "Blob_setHttpHeadersHeaders", - type: { - name: "Composite", - className: "BlobSetHttpHeadersHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + function getCanonicalizedResourceString(request2) { + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path13}`; + const queries = (0, utils_common_js_1.getURLQueries)(request2.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } } + return canonicalizedResourceString; } - }; - exports2.BlobSetHttpHeadersExceptionHeaders = { - serializedName: "Blob_setHttpHeadersExceptionHeaders", - type: { - name: "Composite", - className: "BlobSetHttpHeadersExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + return { + name: exports2.storageSharedKeyCredentialPolicyName, + async sendRequest(request2, next) { + signRequest(request2); + return next(request2); + } + }; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/policies/StorageRequestFailureDetailsParserPolicy.js +var require_StorageRequestFailureDetailsParserPolicy = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/policies/StorageRequestFailureDetailsParserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRequestFailureDetailsParserPolicyName = void 0; + exports2.storageRequestFailureDetailsParserPolicy = storageRequestFailureDetailsParserPolicy; + exports2.storageRequestFailureDetailsParserPolicyName = "storageRequestFailureDetailsParserPolicy"; + function storageRequestFailureDetailsParserPolicy() { + return { + name: exports2.storageRequestFailureDetailsParserPolicyName, + async sendRequest(request2, next) { + try { + const response = await next(request2); + return response; + } catch (err) { + if (typeof err === "object" && err !== null && err.response && err.response.parsedBody) { + if (err.response.parsedBody.code === "InvalidHeaderValue" && err.response.parsedBody.HeaderName === "x-ms-version") { + err.message = "The provided service version is not enabled on this storage account. Please see https://learn.microsoft.com/rest/api/storageservices/versioning-for-the-azure-storage-services for additional information.\n"; + } } + throw err; } } - } + }; + } + } +}); + +// node_modules/@azure/storage-common/dist/commonjs/index.js +var require_commonjs13 = __commonJS({ + "node_modules/@azure/storage-common/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = exports2.getCachedDefaultHttpClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_BufferScheduler(), exports2); + var cache_js_1 = require_cache3(); + Object.defineProperty(exports2, "getCachedDefaultHttpClient", { enumerable: true, get: function() { + return cache_js_1.getCachedDefaultHttpClient; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory(), exports2); + tslib_1.__exportStar(require_AnonymousCredential2(), exports2); + tslib_1.__exportStar(require_Credential2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy2(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy2(), exports2); + tslib_1.__exportStar(require_CredentialPolicy2(), exports2); + tslib_1.__exportStar(require_StorageBrowserPolicy(), exports2); + tslib_1.__exportStar(require_StorageBrowserPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageCorrectContentLengthPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyType2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicy2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); + tslib_1.__exportStar(require_StorageRequestFailureDetailsParserPolicy(), exports2); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js +var require_StorageBrowserPolicyV22 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageBrowserPolicyName = void 0; + exports2.storageBrowserPolicy = storageBrowserPolicy; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + exports2.storageBrowserPolicyName = "storageBrowserPolicy"; + function storageBrowserPolicy() { + return { + name: exports2.storageBrowserPolicyName, + async sendRequest(request2, next) { + if (core_util_1.isNodeLike) { + return next(request2); + } + if (request2.method === "GET" || request2.method === "HEAD") { + request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + } + request2.headers.delete(constants_js_1.HeaderConstants.COOKIE); + request2.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return next(request2); + } + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js +var require_StorageRetryPolicyV22 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRetryPolicyName = void 0; + exports2.storageRetryPolicy = storageRetryPolicy; + var abort_controller_1 = require_commonjs11(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + var log_js_1 = require_log5(); + exports2.storageRetryPolicyName = "storageRetryPolicy"; + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy }; - exports2.BlobSetImmutabilityPolicyHeaders = { - serializedName: "Blob_setImmutabilityPolicyHeaders", - type: { - name: "Composite", - className: "BlobSetImmutabilityPolicyHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyExpiry: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" + var retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + ]; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + function storageRetryPolicy(options = {}) { + const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }) { + if (attempt >= maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error3) { + for (const retriableError of retriableErrors) { + if (error3.name.toUpperCase().includes(retriableError) || error3.message.toUpperCase().includes(retriableError) || error3.code && error3.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } - }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] + } + if (error3?.code === "PARSE_ERROR" && error3?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + if (response || error3) { + const statusCode = response?.status ?? error3?.statusCode ?? 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (response) { + if (response?.status >= 400) { + const copySourceError = response.headers.get(constants_js_1.HeaderConstants.X_MS_CopySourceErrorCode); + if (copySourceError !== void 0) { + switch (copySourceError) { + case "InternalError": + case "OperationTimedOut": + case "ServerBusy": + return true; + } } } } + return false; } - }; - exports2.BlobSetImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", - type: { - name: "Composite", - className: "BlobSetImmutabilityPolicyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; } + } else { + delayTimeInMs = Math.random() * 1e3; } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; } - }; - exports2.BlobDeleteImmutabilityPolicyHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyHeaders", - type: { - name: "Composite", - className: "BlobDeleteImmutabilityPolicyHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" + return { + name: exports2.storageRetryPolicyName, + async sendRequest(request2, next) { + if (tryTimeoutInMs) { + request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + } + const primaryUrl = request2.url; + const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request2.url, secondaryHost) : void 0; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error3; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request2.method) || attempt % 2 === 1; + request2.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = void 0; + error3 = void 0; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request2); + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (e) { + if ((0, core_rest_pipeline_1.isRestError)(e)) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error3 = e; + } else { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); + throw e; + } } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error3 }); + if (retryAgain) { + await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request2.abortSignal, RETRY_ABORT_ERROR); } + attempt++; + } + if (response) { + return response; } + throw error3 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); + } + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js +var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageSharedKeyCredentialPolicyName = void 0; + exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; + var node_crypto_1 = require("node:crypto"); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator(); + exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; + function storageSharedKeyCredentialPolicy(options) { + function signRequest(request2) { + request2.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { + request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); } + const stringToSign = [ + request2.method.toUpperCase(), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.DATE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_MATCH), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request2, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + getCanonicalizedHeadersString(request2) + getCanonicalizedResourceString(request2); + const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + request2.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); } - }; - exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", - type: { - name: "Composite", - className: "BlobDeleteImmutabilityPolicyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + function getHeaderValueToSign(request2, headerName) { + const value = request2.headers.get(headerName); + if (!value) { + return ""; } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; } - }; - exports2.BlobSetLegalHoldHeaders = { - serializedName: "Blob_setLegalHoldHeaders", - type: { - name: "Composite", - className: "BlobSetLegalHoldHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" - } + function getCanonicalizedHeadersString(request2) { + let headersArray = []; + for (const [name, value] of request2.headers) { + if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); } } + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; } - }; - exports2.BlobSetLegalHoldExceptionHeaders = { - serializedName: "Blob_setLegalHoldExceptionHeaders", - type: { - name: "Composite", - className: "BlobSetLegalHoldExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + function getCanonicalizedResourceString(request2) { + const path13 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path13}`; + const queries = (0, utils_common_js_1.getURLQueries)(request2.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } } + return canonicalizedResourceString; } - }; - exports2.BlobSetMetadataHeaders = { - serializedName: "Blob_setMetadataHeaders", - type: { - name: "Composite", - className: "BlobSetMetadataHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + return { + name: exports2.storageSharedKeyCredentialPolicyName, + async sendRequest(request2, next) { + signRequest(request2); + return next(request2); } + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js +var require_StorageBrowserPolicy2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } - }; - exports2.BlobSetMetadataExceptionHeaders = { - serializedName: "Blob_setMetadataExceptionHeaders", - type: { - name: "Composite", - className: "BlobSetMetadataExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request2) { + if (core_util_1.isNodeLike) { + return this._nextPolicy.sendRequest(request2); } + if (request2.method.toUpperCase() === "GET" || request2.method.toUpperCase() === "HEAD") { + request2.url = (0, utils_common_js_1.setURLParameter)(request2.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + } + request2.headers.remove(constants_js_1.HeaderConstants.COOKIE); + request2.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request2); } }; - exports2.BlobAcquireLeaseHeaders = { - serializedName: "Blob_acquireLeaseHeaders", - type: { - name: "Composite", - className: "BlobAcquireLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js +var require_StorageBrowserPolicyFactory2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; + var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy2(); + Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { + return StorageBrowserPolicy_js_1.StorageBrowserPolicy; + } }); + var StorageBrowserPolicyFactory = class { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); } }; - exports2.BlobAcquireLeaseExceptionHeaders = { - serializedName: "Blob_acquireLeaseExceptionHeaders", - type: { - name: "Composite", - className: "BlobAcquireLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js +var require_StorageCorrectContentLengthPolicy2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageCorrectContentLengthPolicyName = void 0; + exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; + var constants_js_1 = require_constants15(); + exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; + function storageCorrectContentLengthPolicy() { + function correctContentLength(request2) { + if (request2.body && (typeof request2.body === "string" || Buffer.isBuffer(request2.body)) && request2.body.length > 0) { + request2.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request2.body)); } } - }; - exports2.BlobReleaseLeaseHeaders = { - serializedName: "Blob_releaseLeaseHeaders", - type: { - name: "Composite", - className: "BlobReleaseLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } + return { + name: exports2.storageCorrectContentLengthPolicyName, + async sendRequest(request2, next) { + correctContentLength(request2); + return next(request2); } + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js +var require_Pipeline = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Pipeline = exports2.StorageOAuthScopes = void 0; + exports2.isPipelineLike = isPipelineLike; + exports2.newPipeline = newPipeline; + exports2.getCoreClientOptions = getCoreClientOptions; + exports2.getCredentialFromPipeline = getCredentialFromPipeline; + var core_http_compat_1 = require_commonjs9(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_client_1 = require_commonjs8(); + var core_xml_1 = require_commonjs10(); + var core_auth_1 = require_commonjs7(); + var log_js_1 = require_log5(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var constants_js_1 = require_constants15(); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return constants_js_1.StorageOAuthScopes; + } }); + var storage_common_1 = require_commonjs13(); + var StorageBrowserPolicyV2_js_1 = require_StorageBrowserPolicyV22(); + var StorageRetryPolicyV2_js_1 = require_StorageRetryPolicyV22(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); + var StorageBrowserPolicyFactory_js_1 = require_StorageBrowserPolicyFactory2(); + var StorageCorrectContentLengthPolicy_js_1 = require_StorageCorrectContentLengthPolicy2(); + function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; + } + var Pipeline = class { + /** + * A list of chained request policy factories. + */ + factories; + /** + * Configures pipeline logger and HTTP client. + */ + options; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + this.options = options; + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories + }; } }; - exports2.BlobReleaseLeaseExceptionHeaders = { - serializedName: "Blob_releaseLeaseExceptionHeaders", - type: { - name: "Composite", - className: "BlobReleaseLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.Pipeline = Pipeline; + function newPipeline(credential, pipelineOptions = {}) { + if (!credential) { + credential = new AnonymousCredential_js_1.AnonymousCredential(); + } + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; + } + function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory + ]; + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); + }); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + return { + wrappedPolicies: (0, core_http_compat_1.createRequestPolicyFactoryPolicy)(novelFactories), + afterRetry: hasInjector + }; } } - }; - exports2.BlobRenewLeaseHeaders = { - serializedName: "Blob_renewLeaseHeaders", - type: { - name: "Composite", - className: "BlobRenewLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } + return void 0; + } + function getCoreClientOptions(pipeline) { + const { httpClient: v1Client, ...restOptions } = pipeline.options; + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? (0, core_http_compat_1.convertHttpClient)(v1Client) : (0, storage_common_1.getCachedDefaultHttpClient)(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${constants_js_1.SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; + corePipeline = (0, core_client_1.createClientPipeline)({ + ...restOptions, + loggingOptions: { + additionalAllowedHeaderNames: constants_js_1.StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: constants_js_1.StorageBlobLoggingAllowedQueryParameters, + logger: log_js_1.logger.info }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } + userAgentOptions: { + userAgentPrefix }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" + serializationOptions: { + stringifyXML: core_xml_1.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#" + } } }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" + deserializationOptions: { + parseXML: core_xml_1.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#" + } } } + }); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: core_rest_pipeline_1.decompressResponsePolicyName }); + corePipeline.addPolicy((0, StorageCorrectContentLengthPolicy_js_1.storageCorrectContentLengthPolicy)()); + corePipeline.addPolicy((0, StorageRetryPolicyV2_js_1.storageRetryPolicy)(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy((0, storage_common_1.storageRequestFailureDetailsParserPolicy)()); + corePipeline.addPolicy((0, StorageBrowserPolicyV2_js_1.storageBrowserPolicy)()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : void 0); + } + const credential = getCredentialFromPipeline(pipeline); + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential, + scopes: restOptions.audience ?? constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } + }), { phase: "Sign" }); + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ + accountName: credential.accountName, + accountKey: credential.accountKey + }), { phase: "Sign" }); } + pipeline._corePipeline = corePipeline; } - }; - exports2.BlobRenewLeaseExceptionHeaders = { - serializedName: "Blob_renewLeaseExceptionHeaders", - type: { - name: "Composite", - className: "BlobRenewLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + return { + ...restOptions, + allowInsecureConnection: true, + httpClient, + pipeline: corePipeline + }; + } + function getCredentialFromPipeline(pipeline) { + if (pipeline._credential) { + return pipeline._credential; + } + let credential = new AnonymousCredential_js_1.AnonymousCredential(); + for (const factory of pipeline.factories) { + if ((0, core_auth_1.isTokenCredential)(factory.credential)) { + credential = factory.credential; + } else if (isStorageSharedKeyCredential(factory)) { + return factory; } } - }; - exports2.BlobChangeLeaseHeaders = { - serializedName: "Blob_changeLeaseHeaders", + return credential; + } + function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + return true; + } + return factory.constructor.name === "StorageSharedKeyCredential"; + } + function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential_js_1.AnonymousCredential) { + return true; + } + return factory.constructor.name === "AnonymousCredential"; + } + function isCoreHttpBearerTokenFactory(factory) { + return (0, core_auth_1.isTokenCredential)(factory.credential); + } + function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory_js_1.StorageBrowserPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageBrowserPolicyFactory"; + } + function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory_js_1.StorageRetryPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageRetryPolicyFactory"; + } + function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; + } + function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; + } + function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy" + ]; + const mockHttpClient = { + sendRequest: async (request2) => { + return { + request: request2, + headers: request2.headers.clone(), + status: 500 + }; + } + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + }, + shouldLog(_logLevel) { + return false; + } + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); + }); + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js +var require_models = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.KnownStorageErrorCode = exports2.KnownBlobExpiryOptions = exports2.KnownFileShareTokenIntent = exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); + var KnownFileShareTokenIntent; + (function(KnownFileShareTokenIntent2) { + KnownFileShareTokenIntent2["Backup"] = "backup"; + })(KnownFileShareTokenIntent || (exports2.KnownFileShareTokenIntent = KnownFileShareTokenIntent = {})); + var KnownBlobExpiryOptions; + (function(KnownBlobExpiryOptions2) { + KnownBlobExpiryOptions2["NeverExpire"] = "NeverExpire"; + KnownBlobExpiryOptions2["RelativeToCreation"] = "RelativeToCreation"; + KnownBlobExpiryOptions2["RelativeToNow"] = "RelativeToNow"; + KnownBlobExpiryOptions2["Absolute"] = "Absolute"; + })(KnownBlobExpiryOptions || (exports2.KnownBlobExpiryOptions = KnownBlobExpiryOptions = {})); + var KnownStorageErrorCode; + (function(KnownStorageErrorCode2) { + KnownStorageErrorCode2["AccountAlreadyExists"] = "AccountAlreadyExists"; + KnownStorageErrorCode2["AccountBeingCreated"] = "AccountBeingCreated"; + KnownStorageErrorCode2["AccountIsDisabled"] = "AccountIsDisabled"; + KnownStorageErrorCode2["AuthenticationFailed"] = "AuthenticationFailed"; + KnownStorageErrorCode2["AuthorizationFailure"] = "AuthorizationFailure"; + KnownStorageErrorCode2["ConditionHeadersNotSupported"] = "ConditionHeadersNotSupported"; + KnownStorageErrorCode2["ConditionNotMet"] = "ConditionNotMet"; + KnownStorageErrorCode2["EmptyMetadataKey"] = "EmptyMetadataKey"; + KnownStorageErrorCode2["InsufficientAccountPermissions"] = "InsufficientAccountPermissions"; + KnownStorageErrorCode2["InternalError"] = "InternalError"; + KnownStorageErrorCode2["InvalidAuthenticationInfo"] = "InvalidAuthenticationInfo"; + KnownStorageErrorCode2["InvalidHeaderValue"] = "InvalidHeaderValue"; + KnownStorageErrorCode2["InvalidHttpVerb"] = "InvalidHttpVerb"; + KnownStorageErrorCode2["InvalidInput"] = "InvalidInput"; + KnownStorageErrorCode2["InvalidMd5"] = "InvalidMd5"; + KnownStorageErrorCode2["InvalidMetadata"] = "InvalidMetadata"; + KnownStorageErrorCode2["InvalidQueryParameterValue"] = "InvalidQueryParameterValue"; + KnownStorageErrorCode2["InvalidRange"] = "InvalidRange"; + KnownStorageErrorCode2["InvalidResourceName"] = "InvalidResourceName"; + KnownStorageErrorCode2["InvalidUri"] = "InvalidUri"; + KnownStorageErrorCode2["InvalidXmlDocument"] = "InvalidXmlDocument"; + KnownStorageErrorCode2["InvalidXmlNodeValue"] = "InvalidXmlNodeValue"; + KnownStorageErrorCode2["Md5Mismatch"] = "Md5Mismatch"; + KnownStorageErrorCode2["MetadataTooLarge"] = "MetadataTooLarge"; + KnownStorageErrorCode2["MissingContentLengthHeader"] = "MissingContentLengthHeader"; + KnownStorageErrorCode2["MissingRequiredQueryParameter"] = "MissingRequiredQueryParameter"; + KnownStorageErrorCode2["MissingRequiredHeader"] = "MissingRequiredHeader"; + KnownStorageErrorCode2["MissingRequiredXmlNode"] = "MissingRequiredXmlNode"; + KnownStorageErrorCode2["MultipleConditionHeadersNotSupported"] = "MultipleConditionHeadersNotSupported"; + KnownStorageErrorCode2["OperationTimedOut"] = "OperationTimedOut"; + KnownStorageErrorCode2["OutOfRangeInput"] = "OutOfRangeInput"; + KnownStorageErrorCode2["OutOfRangeQueryParameterValue"] = "OutOfRangeQueryParameterValue"; + KnownStorageErrorCode2["RequestBodyTooLarge"] = "RequestBodyTooLarge"; + KnownStorageErrorCode2["ResourceTypeMismatch"] = "ResourceTypeMismatch"; + KnownStorageErrorCode2["RequestUrlFailedToParse"] = "RequestUrlFailedToParse"; + KnownStorageErrorCode2["ResourceAlreadyExists"] = "ResourceAlreadyExists"; + KnownStorageErrorCode2["ResourceNotFound"] = "ResourceNotFound"; + KnownStorageErrorCode2["ServerBusy"] = "ServerBusy"; + KnownStorageErrorCode2["UnsupportedHeader"] = "UnsupportedHeader"; + KnownStorageErrorCode2["UnsupportedXmlNode"] = "UnsupportedXmlNode"; + KnownStorageErrorCode2["UnsupportedQueryParameter"] = "UnsupportedQueryParameter"; + KnownStorageErrorCode2["UnsupportedHttpVerb"] = "UnsupportedHttpVerb"; + KnownStorageErrorCode2["AppendPositionConditionNotMet"] = "AppendPositionConditionNotMet"; + KnownStorageErrorCode2["BlobAlreadyExists"] = "BlobAlreadyExists"; + KnownStorageErrorCode2["BlobImmutableDueToPolicy"] = "BlobImmutableDueToPolicy"; + KnownStorageErrorCode2["BlobNotFound"] = "BlobNotFound"; + KnownStorageErrorCode2["BlobOverwritten"] = "BlobOverwritten"; + KnownStorageErrorCode2["BlobTierInadequateForContentLength"] = "BlobTierInadequateForContentLength"; + KnownStorageErrorCode2["BlobUsesCustomerSpecifiedEncryption"] = "BlobUsesCustomerSpecifiedEncryption"; + KnownStorageErrorCode2["BlockCountExceedsLimit"] = "BlockCountExceedsLimit"; + KnownStorageErrorCode2["BlockListTooLong"] = "BlockListTooLong"; + KnownStorageErrorCode2["CannotChangeToLowerTier"] = "CannotChangeToLowerTier"; + KnownStorageErrorCode2["CannotVerifyCopySource"] = "CannotVerifyCopySource"; + KnownStorageErrorCode2["ContainerAlreadyExists"] = "ContainerAlreadyExists"; + KnownStorageErrorCode2["ContainerBeingDeleted"] = "ContainerBeingDeleted"; + KnownStorageErrorCode2["ContainerDisabled"] = "ContainerDisabled"; + KnownStorageErrorCode2["ContainerNotFound"] = "ContainerNotFound"; + KnownStorageErrorCode2["ContentLengthLargerThanTierLimit"] = "ContentLengthLargerThanTierLimit"; + KnownStorageErrorCode2["CopyAcrossAccountsNotSupported"] = "CopyAcrossAccountsNotSupported"; + KnownStorageErrorCode2["CopyIdMismatch"] = "CopyIdMismatch"; + KnownStorageErrorCode2["FeatureVersionMismatch"] = "FeatureVersionMismatch"; + KnownStorageErrorCode2["IncrementalCopyBlobMismatch"] = "IncrementalCopyBlobMismatch"; + KnownStorageErrorCode2["IncrementalCopyOfEarlierVersionSnapshotNotAllowed"] = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed"; + KnownStorageErrorCode2["IncrementalCopySourceMustBeSnapshot"] = "IncrementalCopySourceMustBeSnapshot"; + KnownStorageErrorCode2["InfiniteLeaseDurationRequired"] = "InfiniteLeaseDurationRequired"; + KnownStorageErrorCode2["InvalidBlobOrBlock"] = "InvalidBlobOrBlock"; + KnownStorageErrorCode2["InvalidBlobTier"] = "InvalidBlobTier"; + KnownStorageErrorCode2["InvalidBlobType"] = "InvalidBlobType"; + KnownStorageErrorCode2["InvalidBlockId"] = "InvalidBlockId"; + KnownStorageErrorCode2["InvalidBlockList"] = "InvalidBlockList"; + KnownStorageErrorCode2["InvalidOperation"] = "InvalidOperation"; + KnownStorageErrorCode2["InvalidPageRange"] = "InvalidPageRange"; + KnownStorageErrorCode2["InvalidSourceBlobType"] = "InvalidSourceBlobType"; + KnownStorageErrorCode2["InvalidSourceBlobUrl"] = "InvalidSourceBlobUrl"; + KnownStorageErrorCode2["InvalidVersionForPageBlobOperation"] = "InvalidVersionForPageBlobOperation"; + KnownStorageErrorCode2["LeaseAlreadyPresent"] = "LeaseAlreadyPresent"; + KnownStorageErrorCode2["LeaseAlreadyBroken"] = "LeaseAlreadyBroken"; + KnownStorageErrorCode2["LeaseIdMismatchWithBlobOperation"] = "LeaseIdMismatchWithBlobOperation"; + KnownStorageErrorCode2["LeaseIdMismatchWithContainerOperation"] = "LeaseIdMismatchWithContainerOperation"; + KnownStorageErrorCode2["LeaseIdMismatchWithLeaseOperation"] = "LeaseIdMismatchWithLeaseOperation"; + KnownStorageErrorCode2["LeaseIdMissing"] = "LeaseIdMissing"; + KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeAcquired"] = "LeaseIsBreakingAndCannotBeAcquired"; + KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeChanged"] = "LeaseIsBreakingAndCannotBeChanged"; + KnownStorageErrorCode2["LeaseIsBrokenAndCannotBeRenewed"] = "LeaseIsBrokenAndCannotBeRenewed"; + KnownStorageErrorCode2["LeaseLost"] = "LeaseLost"; + KnownStorageErrorCode2["LeaseNotPresentWithBlobOperation"] = "LeaseNotPresentWithBlobOperation"; + KnownStorageErrorCode2["LeaseNotPresentWithContainerOperation"] = "LeaseNotPresentWithContainerOperation"; + KnownStorageErrorCode2["LeaseNotPresentWithLeaseOperation"] = "LeaseNotPresentWithLeaseOperation"; + KnownStorageErrorCode2["MaxBlobSizeConditionNotMet"] = "MaxBlobSizeConditionNotMet"; + KnownStorageErrorCode2["NoAuthenticationInformation"] = "NoAuthenticationInformation"; + KnownStorageErrorCode2["NoPendingCopyOperation"] = "NoPendingCopyOperation"; + KnownStorageErrorCode2["OperationNotAllowedOnIncrementalCopyBlob"] = "OperationNotAllowedOnIncrementalCopyBlob"; + KnownStorageErrorCode2["PendingCopyOperation"] = "PendingCopyOperation"; + KnownStorageErrorCode2["PreviousSnapshotCannotBeNewer"] = "PreviousSnapshotCannotBeNewer"; + KnownStorageErrorCode2["PreviousSnapshotNotFound"] = "PreviousSnapshotNotFound"; + KnownStorageErrorCode2["PreviousSnapshotOperationNotSupported"] = "PreviousSnapshotOperationNotSupported"; + KnownStorageErrorCode2["SequenceNumberConditionNotMet"] = "SequenceNumberConditionNotMet"; + KnownStorageErrorCode2["SequenceNumberIncrementTooLarge"] = "SequenceNumberIncrementTooLarge"; + KnownStorageErrorCode2["SnapshotCountExceeded"] = "SnapshotCountExceeded"; + KnownStorageErrorCode2["SnapshotOperationRateExceeded"] = "SnapshotOperationRateExceeded"; + KnownStorageErrorCode2["SnapshotsPresent"] = "SnapshotsPresent"; + KnownStorageErrorCode2["SourceConditionNotMet"] = "SourceConditionNotMet"; + KnownStorageErrorCode2["SystemInUse"] = "SystemInUse"; + KnownStorageErrorCode2["TargetConditionNotMet"] = "TargetConditionNotMet"; + KnownStorageErrorCode2["UnauthorizedBlobOverwrite"] = "UnauthorizedBlobOverwrite"; + KnownStorageErrorCode2["BlobBeingRehydrated"] = "BlobBeingRehydrated"; + KnownStorageErrorCode2["BlobArchived"] = "BlobArchived"; + KnownStorageErrorCode2["BlobNotArchived"] = "BlobNotArchived"; + KnownStorageErrorCode2["AuthorizationSourceIPMismatch"] = "AuthorizationSourceIPMismatch"; + KnownStorageErrorCode2["AuthorizationProtocolMismatch"] = "AuthorizationProtocolMismatch"; + KnownStorageErrorCode2["AuthorizationPermissionMismatch"] = "AuthorizationPermissionMismatch"; + KnownStorageErrorCode2["AuthorizationServiceMismatch"] = "AuthorizationServiceMismatch"; + KnownStorageErrorCode2["AuthorizationResourceTypeMismatch"] = "AuthorizationResourceTypeMismatch"; + KnownStorageErrorCode2["BlobAccessTierNotSupportedForAccountType"] = "BlobAccessTierNotSupportedForAccountType"; + })(KnownStorageErrorCode || (exports2.KnownStorageErrorCode = KnownStorageErrorCode = {})); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js +var require_mappers = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceGetUserDelegationKeyHeaders = exports2.ServiceListContainersSegmentExceptionHeaders = exports2.ServiceListContainersSegmentHeaders = exports2.ServiceGetStatisticsExceptionHeaders = exports2.ServiceGetStatisticsHeaders = exports2.ServiceGetPropertiesExceptionHeaders = exports2.ServiceGetPropertiesHeaders = exports2.ServiceSetPropertiesExceptionHeaders = exports2.ServiceSetPropertiesHeaders = exports2.ArrowField = exports2.ArrowConfiguration = exports2.JsonTextConfiguration = exports2.DelimitedTextConfiguration = exports2.QueryFormat = exports2.QuerySerialization = exports2.QueryRequest = exports2.ClearRange = exports2.PageRange = exports2.PageList = exports2.Block = exports2.BlockList = exports2.BlockLookupList = exports2.BlobPrefix = exports2.BlobHierarchyListSegment = exports2.ListBlobsHierarchySegmentResponse = exports2.BlobPropertiesInternal = exports2.BlobName = exports2.BlobItemInternal = exports2.BlobFlatListSegment = exports2.ListBlobsFlatSegmentResponse = exports2.AccessPolicy = exports2.SignedIdentifier = exports2.BlobTag = exports2.BlobTags = exports2.FilterBlobItem = exports2.FilterBlobSegment = exports2.UserDelegationKey = exports2.KeyInfo = exports2.ContainerProperties = exports2.ContainerItem = exports2.ListContainersSegmentResponse = exports2.GeoReplication = exports2.BlobServiceStatistics = exports2.StorageError = exports2.StaticWebsite = exports2.CorsRule = exports2.Metrics = exports2.RetentionPolicy = exports2.Logging = exports2.BlobServiceProperties = void 0; + exports2.BlobUndeleteHeaders = exports2.BlobDeleteExceptionHeaders = exports2.BlobDeleteHeaders = exports2.BlobGetPropertiesExceptionHeaders = exports2.BlobGetPropertiesHeaders = exports2.BlobDownloadExceptionHeaders = exports2.BlobDownloadHeaders = exports2.ContainerGetAccountInfoExceptionHeaders = exports2.ContainerGetAccountInfoHeaders = exports2.ContainerListBlobHierarchySegmentExceptionHeaders = exports2.ContainerListBlobHierarchySegmentHeaders = exports2.ContainerListBlobFlatSegmentExceptionHeaders = exports2.ContainerListBlobFlatSegmentHeaders = exports2.ContainerChangeLeaseExceptionHeaders = exports2.ContainerChangeLeaseHeaders = exports2.ContainerBreakLeaseExceptionHeaders = exports2.ContainerBreakLeaseHeaders = exports2.ContainerRenewLeaseExceptionHeaders = exports2.ContainerRenewLeaseHeaders = exports2.ContainerReleaseLeaseExceptionHeaders = exports2.ContainerReleaseLeaseHeaders = exports2.ContainerAcquireLeaseExceptionHeaders = exports2.ContainerAcquireLeaseHeaders = exports2.ContainerFilterBlobsExceptionHeaders = exports2.ContainerFilterBlobsHeaders = exports2.ContainerSubmitBatchExceptionHeaders = exports2.ContainerSubmitBatchHeaders = exports2.ContainerRenameExceptionHeaders = exports2.ContainerRenameHeaders = exports2.ContainerRestoreExceptionHeaders = exports2.ContainerRestoreHeaders = exports2.ContainerSetAccessPolicyExceptionHeaders = exports2.ContainerSetAccessPolicyHeaders = exports2.ContainerGetAccessPolicyExceptionHeaders = exports2.ContainerGetAccessPolicyHeaders = exports2.ContainerSetMetadataExceptionHeaders = exports2.ContainerSetMetadataHeaders = exports2.ContainerDeleteExceptionHeaders = exports2.ContainerDeleteHeaders = exports2.ContainerGetPropertiesExceptionHeaders = exports2.ContainerGetPropertiesHeaders = exports2.ContainerCreateExceptionHeaders = exports2.ContainerCreateHeaders = exports2.ServiceFilterBlobsExceptionHeaders = exports2.ServiceFilterBlobsHeaders = exports2.ServiceSubmitBatchExceptionHeaders = exports2.ServiceSubmitBatchHeaders = exports2.ServiceGetAccountInfoExceptionHeaders = exports2.ServiceGetAccountInfoHeaders = exports2.ServiceGetUserDelegationKeyExceptionHeaders = void 0; + exports2.PageBlobGetPageRangesHeaders = exports2.PageBlobUploadPagesFromURLExceptionHeaders = exports2.PageBlobUploadPagesFromURLHeaders = exports2.PageBlobClearPagesExceptionHeaders = exports2.PageBlobClearPagesHeaders = exports2.PageBlobUploadPagesExceptionHeaders = exports2.PageBlobUploadPagesHeaders = exports2.PageBlobCreateExceptionHeaders = exports2.PageBlobCreateHeaders = exports2.BlobSetTagsExceptionHeaders = exports2.BlobSetTagsHeaders = exports2.BlobGetTagsExceptionHeaders = exports2.BlobGetTagsHeaders = exports2.BlobQueryExceptionHeaders = exports2.BlobQueryHeaders = exports2.BlobGetAccountInfoExceptionHeaders = exports2.BlobGetAccountInfoHeaders = exports2.BlobSetTierExceptionHeaders = exports2.BlobSetTierHeaders = exports2.BlobAbortCopyFromURLExceptionHeaders = exports2.BlobAbortCopyFromURLHeaders = exports2.BlobCopyFromURLExceptionHeaders = exports2.BlobCopyFromURLHeaders = exports2.BlobStartCopyFromURLExceptionHeaders = exports2.BlobStartCopyFromURLHeaders = exports2.BlobCreateSnapshotExceptionHeaders = exports2.BlobCreateSnapshotHeaders = exports2.BlobBreakLeaseExceptionHeaders = exports2.BlobBreakLeaseHeaders = exports2.BlobChangeLeaseExceptionHeaders = exports2.BlobChangeLeaseHeaders = exports2.BlobRenewLeaseExceptionHeaders = exports2.BlobRenewLeaseHeaders = exports2.BlobReleaseLeaseExceptionHeaders = exports2.BlobReleaseLeaseHeaders = exports2.BlobAcquireLeaseExceptionHeaders = exports2.BlobAcquireLeaseHeaders = exports2.BlobSetMetadataExceptionHeaders = exports2.BlobSetMetadataHeaders = exports2.BlobSetLegalHoldExceptionHeaders = exports2.BlobSetLegalHoldHeaders = exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = exports2.BlobDeleteImmutabilityPolicyHeaders = exports2.BlobSetImmutabilityPolicyExceptionHeaders = exports2.BlobSetImmutabilityPolicyHeaders = exports2.BlobSetHttpHeadersExceptionHeaders = exports2.BlobSetHttpHeadersHeaders = exports2.BlobSetExpiryExceptionHeaders = exports2.BlobSetExpiryHeaders = exports2.BlobUndeleteExceptionHeaders = void 0; + exports2.BlockBlobGetBlockListExceptionHeaders = exports2.BlockBlobGetBlockListHeaders = exports2.BlockBlobCommitBlockListExceptionHeaders = exports2.BlockBlobCommitBlockListHeaders = exports2.BlockBlobStageBlockFromURLExceptionHeaders = exports2.BlockBlobStageBlockFromURLHeaders = exports2.BlockBlobStageBlockExceptionHeaders = exports2.BlockBlobStageBlockHeaders = exports2.BlockBlobPutBlobFromUrlExceptionHeaders = exports2.BlockBlobPutBlobFromUrlHeaders = exports2.BlockBlobUploadExceptionHeaders = exports2.BlockBlobUploadHeaders = exports2.AppendBlobSealExceptionHeaders = exports2.AppendBlobSealHeaders = exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = exports2.AppendBlobAppendBlockFromUrlHeaders = exports2.AppendBlobAppendBlockExceptionHeaders = exports2.AppendBlobAppendBlockHeaders = exports2.AppendBlobCreateExceptionHeaders = exports2.AppendBlobCreateHeaders = exports2.PageBlobCopyIncrementalExceptionHeaders = exports2.PageBlobCopyIncrementalHeaders = exports2.PageBlobUpdateSequenceNumberExceptionHeaders = exports2.PageBlobUpdateSequenceNumberHeaders = exports2.PageBlobResizeExceptionHeaders = exports2.PageBlobResizeHeaders = exports2.PageBlobGetPageRangesDiffExceptionHeaders = exports2.PageBlobGetPageRangesDiffHeaders = exports2.PageBlobGetPageRangesExceptionHeaders = void 0; + exports2.BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", type: { name: "Composite", - className: "BlobChangeLeaseHeaders", + className: "BlobServiceProperties", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", type: { - name: "String" + name: "Composite", + className: "Logging" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", type: { - name: "DateTimeRfc1123" + name: "Composite", + className: "Metrics" } }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", type: { - name: "String" + name: "Composite", + className: "Metrics" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } } }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", type: { - name: "String" + name: "Composite", + className: "RetentionPolicy" } }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } - }; - exports2.BlobChangeLeaseExceptionHeaders = { - serializedName: "Blob_changeLeaseExceptionHeaders", - type: { - name: "Composite", - className: "BlobChangeLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", type: { - name: "String" + name: "Composite", + className: "StaticWebsite" } } } } }; - exports2.BlobBreakLeaseHeaders = { - serializedName: "Blob_breakLeaseHeaders", + exports2.Logging = { + serializedName: "Logging", type: { name: "Composite", - className: "BlobBreakLeaseHeaders", + className: "Logging", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + version: { + serializedName: "Version", + required: true, + xmlName: "Version", type: { name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseTime: { - serializedName: "x-ms-lease-time", - xmlName: "x-ms-lease-time", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", type: { - name: "String" + name: "Boolean" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + read: { + serializedName: "Read", + required: true, + xmlName: "Read", type: { - name: "String" + name: "Boolean" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + write: { + serializedName: "Write", + required: true, + xmlName: "Write", type: { - name: "String" + name: "Boolean" } }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } - }; - exports2.BlobBreakLeaseExceptionHeaders = { - serializedName: "Blob_breakLeaseExceptionHeaders", - type: { - name: "Composite", - className: "BlobBreakLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", type: { - name: "String" + name: "Composite", + className: "RetentionPolicy" } } } } }; - exports2.BlobCreateSnapshotHeaders = { - serializedName: "Blob_createSnapshotHeaders", + exports2.RetentionPolicy = { + serializedName: "RetentionPolicy", type: { name: "Composite", - className: "BlobCreateSnapshotHeaders", + className: "RetentionPolicy", modelProperties: { - snapshot: { - serializedName: "x-ms-snapshot", - xmlName: "x-ms-snapshot", - type: { - name: "String" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", type: { name: "Boolean" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - exports2.BlobCreateSnapshotExceptionHeaders = { - serializedName: "Blob_createSnapshotExceptionHeaders", - type: { - name: "Composite", - className: "BlobCreateSnapshotExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", type: { - name: "String" + name: "Number" } } } } }; - exports2.BlobStartCopyFromURLHeaders = { - serializedName: "Blob_startCopyFromURLHeaders", + exports2.Metrics = { + serializedName: "Metrics", type: { name: "Composite", - className: "BlobStartCopyFromURLHeaders", + className: "Metrics", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + serializedName: "Version", + xmlName: "Version", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - exports2.BlobStartCopyFromURLExceptionHeaders = { - serializedName: "Blob_startCopyFromURLExceptionHeaders", - type: { - name: "Composite", - className: "BlobStartCopyFromURLExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", type: { - name: "String" + name: "Boolean" } }, - copySourceErrorCode: { - serializedName: "x-ms-copy-source-error-code", - xmlName: "x-ms-copy-source-error-code", + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", type: { - name: "String" + name: "Boolean" } }, - copySourceStatusCode: { - serializedName: "x-ms-copy-source-status-code", - xmlName: "x-ms-copy-source-status-code", + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", type: { - name: "Number" + name: "Composite", + className: "RetentionPolicy" } } } } }; - exports2.BlobCopyFromURLHeaders = { - serializedName: "Blob_copyFromURLHeaders", + exports2.CorsRule = { + serializedName: "CorsRule", type: { name: "Composite", - className: "BlobCopyFromURLHeaders", + className: "CorsRule", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyStatus: { - defaultValue: "success", - isConstant: true, - serializedName: "x-ms-copy-status", - type: { - name: "String" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", type: { name: "String" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - exports2.BlobCopyFromURLExceptionHeaders = { - serializedName: "Blob_copyFromURLExceptionHeaders", - type: { - name: "Composite", - className: "BlobCopyFromURLExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", type: { name: "String" } }, - copySourceErrorCode: { - serializedName: "x-ms-copy-source-error-code", - xmlName: "x-ms-copy-source-error-code", + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", type: { name: "String" } }, - copySourceStatusCode: { - serializedName: "x-ms-copy-source-status-code", - xmlName: "x-ms-copy-source-status-code", + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", type: { name: "Number" } @@ -72206,43 +67949,37 @@ var require_mappers = __commonJS({ } } }; - exports2.BlobAbortCopyFromURLHeaders = { - serializedName: "Blob_abortCopyFromURLHeaders", + exports2.StaticWebsite = { + serializedName: "StaticWebsite", type: { name: "Composite", - className: "BlobAbortCopyFromURLHeaders", + className: "StaticWebsite", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", type: { - name: "String" + name: "Boolean" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", type: { name: "String" } @@ -72250,52 +67987,50 @@ var require_mappers = __commonJS({ } } }; - exports2.BlobAbortCopyFromURLExceptionHeaders = { - serializedName: "Blob_abortCopyFromURLExceptionHeaders", + exports2.StorageError = { + serializedName: "StorageError", type: { name: "Composite", - className: "BlobAbortCopyFromURLExceptionHeaders", + className: "StorageError", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + message: { + serializedName: "Message", + xmlName: "Message", type: { name: "String" } - } - } - } - }; - exports2.BlobSetTierHeaders = { - serializedName: "Blob_setTierHeaders", - type: { - name: "Composite", - className: "BlobSetTierHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + }, + copySourceStatusCode: { + serializedName: "CopySourceStatusCode", + xmlName: "CopySourceStatusCode", + type: { + name: "Number" + } + }, + copySourceErrorCode: { + serializedName: "CopySourceErrorCode", + xmlName: "CopySourceErrorCode", type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + copySourceErrorMessage: { + serializedName: "CopySourceErrorMessage", + xmlName: "CopySourceErrorMessage", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + code: { + serializedName: "Code", + xmlName: "Code", type: { name: "String" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + authenticationErrorDetail: { + serializedName: "AuthenticationErrorDetail", + xmlName: "AuthenticationErrorDetail", type: { name: "String" } @@ -72303,379 +68038,548 @@ var require_mappers = __commonJS({ } } }; - exports2.BlobSetTierExceptionHeaders = { - serializedName: "Blob_setTierExceptionHeaders", + exports2.BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", type: { name: "Composite", - className: "BlobSetTierExceptionHeaders", + className: "BlobServiceStatistics", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", type: { - name: "String" + name: "Composite", + className: "GeoReplication" } } } } }; - exports2.BlobGetAccountInfoHeaders = { - serializedName: "Blob_getAccountInfoHeaders", + exports2.GeoReplication = { + serializedName: "GeoReplication", type: { name: "Composite", - className: "BlobGetAccountInfoHeaders", + className: "GeoReplication", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + status: { + serializedName: "Status", + required: true, + xmlName: "Status", type: { - name: "String" + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } + }; + exports2.ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + marker: { + serializedName: "Marker", + xmlName: "Marker", type: { - name: "DateTimeRfc1123" + name: "String" } }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] + name: "Number" } }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } } }, - isHierarchicalNamespaceEnabled: { - serializedName: "x-ms-is-hns-enabled", - xmlName: "x-ms-is-hns-enabled", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { - name: "Boolean" + name: "String" } } } } }; - exports2.BlobGetAccountInfoExceptionHeaders = { - serializedName: "Blob_getAccountInfoExceptionHeaders", + exports2.ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", type: { name: "Composite", - className: "BlobGetAccountInfoExceptionHeaders", + className: "ContainerItem", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", type: { name: "String" } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } } } } }; - exports2.BlobQueryHeaders = { - serializedName: "Blob_queryHeaders", + exports2.ContainerProperties = { + serializedName: "ContainerProperties", type: { name: "Composite", - className: "BlobQueryHeaders", + className: "ContainerProperties", modelProperties: { lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", type: { name: "DateTimeRfc1123" } }, - metadata: { - serializedName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - xmlName: "x-ms-meta", + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "String" } }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", type: { - name: "Number" + name: "Enum", + allowedValues: ["locked", "unlocked"] } }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", type: { - name: "String" + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] } }, - contentRange: { - serializedName: "content-range", - xmlName: "content-range", + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", type: { - name: "String" + name: "Enum", + allowedValues: ["infinite", "fixed"] } }, - etag: { - serializedName: "etag", - xmlName: "etag", + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", type: { - name: "String" + name: "Enum", + allowedValues: ["container", "blob"] } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", type: { - name: "ByteArray" + name: "Boolean" } }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", type: { - name: "String" + name: "Boolean" } }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", type: { name: "String" } }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", type: { - name: "String" + name: "Boolean" } }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", type: { - name: "String" + name: "DateTimeRfc1123" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", type: { name: "Number" } }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + name: "Boolean" } - }, - copyCompletionTime: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", + } + } + } + }; + exports2.KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { - name: "DateTimeRfc1123" + name: "String" } }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", type: { name: "String" } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + } + } + } + }; + exports2.UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", type: { name: "String" } }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", type: { name: "String" } }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] + name: "String" } }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] + name: "String" } }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] + name: "String" } }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } + }; + exports2.FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] + name: "String" } }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + where: { + serializedName: "Where", + required: true, + xmlName: "Where", type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { name: "String" } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", + } + } + } + }; + exports2.FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", type: { - name: "DateTimeRfc1123" + name: "String" } }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + tags: { + serializedName: "Tags", + xmlName: "Tags", type: { - name: "Number" + name: "Composite", + className: "BlobTags" } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", + } + } + } + }; + exports2.BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", type: { - name: "Boolean" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + } + } + } + }; + exports2.BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + value: { + serializedName: "Value", + required: true, + xmlName: "Value", type: { name: "String" } - }, - blobContentMD5: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + } + } + } + }; + exports2.SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", type: { name: "String" } }, - contentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", type: { - name: "ByteArray" + name: "Composite", + className: "AccessPolicy" } } } } }; - exports2.BlobQueryExceptionHeaders = { - serializedName: "Blob_queryExceptionHeaders", + exports2.AccessPolicy = { + serializedName: "AccessPolicy", type: { name: "Composite", - className: "BlobQueryExceptionHeaders", + className: "AccessPolicy", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", type: { name: "String" } @@ -72683,43 +68587,63 @@ var require_mappers = __commonJS({ } } }; - exports2.BlobGetTagsHeaders = { - serializedName: "Blob_getTagsHeaders", + exports2.ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", type: { name: "Composite", - className: "BlobGetTagsHeaders", + className: "ListBlobsFlatSegmentResponse", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + marker: { + serializedName: "Marker", + xmlName: "Marker", type: { - name: "DateTimeRfc1123" + name: "String" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { name: "String" } @@ -72727,75 +68651,136 @@ var require_mappers = __commonJS({ } } }; - exports2.BlobGetTagsExceptionHeaders = { - serializedName: "Blob_getTagsExceptionHeaders", + exports2.BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", type: { name: "Composite", - className: "BlobGetTagsExceptionHeaders", + className: "BlobFlatListSegment", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } } } } } }; - exports2.BlobSetTagsHeaders = { - serializedName: "Blob_setTagsHeaders", + exports2.BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", type: { name: "Composite", - className: "BlobSetTagsHeaders", + className: "BlobItemInternal", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", type: { - name: "DateTimeRfc1123" + name: "Boolean" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + properties: { + serializedName: "Properties", + xmlName: "Properties", type: { - name: "String" + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" } } } } }; - exports2.BlobSetTagsExceptionHeaders = { - serializedName: "Blob_setTagsExceptionHeaders", + exports2.BlobName = { + serializedName: "BlobName", type: { name: "Composite", - className: "BlobSetTagsExceptionHeaders", + className: "BlobName", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, type: { name: "String" } @@ -72803,412 +68788,397 @@ var require_mappers = __commonJS({ } } }; - exports2.PageBlobCreateHeaders = { - serializedName: "PageBlob_createHeaders", + exports2.BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", type: { name: "Composite", - className: "PageBlobCreateHeaders", + className: "BlobPropertiesInternal", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", type: { - name: "String" + name: "DateTimeRfc1123" } }, lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", type: { name: "DateTimeRfc1123" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", type: { - name: "String" + name: "Number" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", type: { name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", type: { - name: "DateTimeRfc1123" + name: "String" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", type: { - name: "Boolean" + name: "ByteArray" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", type: { name: "String" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - exports2.PageBlobCreateExceptionHeaders = { - serializedName: "PageBlob_createExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobCreateExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "String" + name: "Number" } - } - } - } - }; - exports2.PageBlobUploadPagesHeaders = { - serializedName: "PageBlob_uploadPagesHeaders", - type: { - name: "Composite", - className: "PageBlobUploadPagesHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", type: { - name: "String" + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", type: { - name: "DateTimeRfc1123" + name: "Enum", + allowedValues: ["locked", "unlocked"] } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", type: { - name: "ByteArray" + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] } }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", type: { - name: "ByteArray" + name: "Enum", + allowedValues: ["infinite", "fixed"] } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", type: { - name: "Number" + name: "String" } }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", type: { - name: "String" + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", type: { name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", type: { - name: "Boolean" + name: "String" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", type: { - name: "String" + name: "Boolean" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", type: { - name: "String" + name: "Boolean" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", type: { name: "String" } - } - } - } - }; - exports2.PageBlobUploadPagesExceptionHeaders = { - serializedName: "PageBlob_uploadPagesExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobUploadPagesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", type: { - name: "String" + name: "DateTimeRfc1123" } - } - } - } - }; - exports2.PageBlobClearPagesHeaders = { - serializedName: "PageBlob_clearPagesHeaders", - type: { - name: "Composite", - className: "PageBlobClearPagesHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", type: { - name: "String" + name: "Number" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", type: { - name: "DateTimeRfc1123" + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", type: { - name: "ByteArray" + name: "Boolean" } }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", type: { - name: "ByteArray" + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold" + ] } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", type: { - name: "Number" + name: "String" } }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", type: { - name: "String" + name: "DateTimeRfc1123" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", type: { - name: "String" + name: "Number" } }, - date: { - serializedName: "date", - xmlName: "date", + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", type: { name: "DateTimeRfc1123" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", type: { - name: "String" + name: "Boolean" } - } - } - } - }; - exports2.PageBlobClearPagesExceptionHeaders = { - serializedName: "PageBlob_clearPagesExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobClearPagesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", type: { - name: "String" + name: "Enum", + allowedValues: ["High", "Standard"] } - } - } - } - }; - exports2.PageBlobUploadPagesFromURLHeaders = { - serializedName: "PageBlob_uploadPagesFromURLHeaders", - type: { - name: "Composite", - className: "PageBlobUploadPagesFromURLHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", type: { - name: "String" + name: "DateTimeRfc1123" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", type: { name: "DateTimeRfc1123" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", type: { - name: "ByteArray" + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] } }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", type: { - name: "ByteArray" + name: "Boolean" } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + } + } + } + }; + exports2.ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "Number" + name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + marker: { + serializedName: "Marker", + xmlName: "Marker", type: { - name: "DateTimeRfc1123" + name: "String" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", type: { - name: "Boolean" + name: "Number" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + segment: { + serializedName: "Segment", + xmlName: "Blobs", type: { - name: "String" + name: "Composite", + className: "BlobHierarchyListSegment" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { name: "String" } @@ -73216,175 +69186,212 @@ var require_mappers = __commonJS({ } } }; - exports2.PageBlobUploadPagesFromURLExceptionHeaders = { - serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + exports2.BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", type: { name: "Composite", - className: "PageBlobUploadPagesFromURLExceptionHeaders", + className: "BlobHierarchyListSegment", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - }, - copySourceErrorCode: { - serializedName: "x-ms-copy-source-error-code", - xmlName: "x-ms-copy-source-error-code", + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } } }, - copySourceStatusCode: { - serializedName: "x-ms-copy-source-status-code", - xmlName: "x-ms-copy-source-status-code", + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", type: { - name: "Number" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } } } } } }; - exports2.PageBlobGetPageRangesHeaders = { - serializedName: "PageBlob_getPageRangesHeaders", + exports2.BlobPrefix = { + serializedName: "BlobPrefix", type: { name: "Composite", - className: "PageBlobGetPageRangesHeaders", + className: "BlobPrefix", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "Number" + name: "Composite", + className: "BlobName" } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + } + } + } + }; + exports2.BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } - }, - date: { - serializedName: "date", - xmlName: "date", + } + } + } + }; + exports2.BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", type: { - name: "DateTimeRfc1123" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } } } } } }; - exports2.PageBlobGetPageRangesExceptionHeaders = { - serializedName: "PageBlob_getPageRangesExceptionHeaders", + exports2.Block = { + serializedName: "Block", type: { name: "Composite", - className: "PageBlobGetPageRangesExceptionHeaders", + className: "Block", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: { + serializedName: "Name", + required: true, + xmlName: "Name", type: { name: "String" } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } } } } }; - exports2.PageBlobGetPageRangesDiffHeaders = { - serializedName: "PageBlob_getPageRangesDiffHeaders", + exports2.PageList = { + serializedName: "PageList", type: { name: "Composite", - className: "PageBlobGetPageRangesDiffHeaders", + className: "PageList", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } } }, - date: { - serializedName: "date", - xmlName: "date", + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", type: { - name: "DateTimeRfc1123" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { name: "String" } @@ -73392,250 +69399,222 @@ var require_mappers = __commonJS({ } } }; - exports2.PageBlobGetPageRangesDiffExceptionHeaders = { - serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + exports2.PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", type: { name: "Composite", - className: "PageBlobGetPageRangesDiffExceptionHeaders", + className: "PageRange", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + start: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { - name: "String" + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" } } } } }; - exports2.PageBlobResizeHeaders = { - serializedName: "PageBlob_resizeHeaders", + exports2.ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", type: { name: "Composite", - className: "PageBlobResizeHeaders", + className: "ClearRange", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + start: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { name: "Number" } }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + end: { + serializedName: "End", + required: true, + xmlName: "End", type: { - name: "String" + name: "Number" } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + } + } + } + }; + exports2.QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", type: { - name: "DateTimeRfc1123" + name: "Composite", + className: "QuerySerialization" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", type: { - name: "String" + name: "Composite", + className: "QuerySerialization" } } } } }; - exports2.PageBlobResizeExceptionHeaders = { - serializedName: "PageBlob_resizeExceptionHeaders", + exports2.QuerySerialization = { + serializedName: "QuerySerialization", type: { name: "Composite", - className: "PageBlobResizeExceptionHeaders", + className: "QuerySerialization", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + format: { + serializedName: "Format", + xmlName: "Format", type: { - name: "String" + name: "Composite", + className: "QueryFormat" } } } } }; - exports2.PageBlobUpdateSequenceNumberHeaders = { - serializedName: "PageBlob_updateSequenceNumberHeaders", + exports2.QueryFormat = { + serializedName: "QueryFormat", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberHeaders", + className: "QueryFormat", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + type: { + serializedName: "Type", + required: true, + xmlName: "Type", type: { - name: "String" + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", type: { - name: "String" + name: "Composite", + className: "DelimitedTextConfiguration" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", type: { - name: "String" + name: "Composite", + className: "JsonTextConfiguration" } }, - date: { - serializedName: "date", - xmlName: "date", + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", type: { - name: "DateTimeRfc1123" + name: "Composite", + className: "ArrowConfiguration" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - exports2.PageBlobUpdateSequenceNumberExceptionHeaders = { - serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobUpdateSequenceNumberExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", type: { - name: "String" + name: "Dictionary", + value: { type: { name: "any" } } } } } } }; - exports2.PageBlobCopyIncrementalHeaders = { - serializedName: "PageBlob_copyIncrementalHeaders", + exports2.DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", type: { name: "Composite", - className: "PageBlobCopyIncrementalHeaders", + className: "DelimitedTextConfiguration", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", type: { name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] + name: "Boolean" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + } + } + } + }; + exports2.JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", type: { name: "String" } @@ -73643,49 +69622,77 @@ var require_mappers = __commonJS({ } } }; - exports2.PageBlobCopyIncrementalExceptionHeaders = { - serializedName: "PageBlob_copyIncrementalExceptionHeaders", + exports2.ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", type: { name: "Composite", - className: "PageBlobCopyIncrementalExceptionHeaders", + className: "ArrowConfiguration", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } } } } } }; - exports2.AppendBlobCreateHeaders = { - serializedName: "AppendBlob_createHeaders", + exports2.ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", type: { name: "Composite", - className: "AppendBlobCreateHeaders", + className: "ArrowField", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + type: { + serializedName: "Type", + required: true, + xmlName: "Type", type: { name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "DateTimeRfc1123" + name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + precision: { + serializedName: "Precision", + xmlName: "Precision", type: { - name: "ByteArray" + name: "Number" } }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } + }; + exports2.ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -73698,46 +69705,11 @@ var require_mappers = __commonJS({ xmlName: "x-ms-request-id", type: { name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } @@ -73752,11 +69724,11 @@ var require_mappers = __commonJS({ } } }; - exports2.AppendBlobCreateExceptionHeaders = { - serializedName: "AppendBlob_createExceptionHeaders", + exports2.ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", - className: "AppendBlobCreateExceptionHeaders", + className: "ServiceSetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -73768,40 +69740,12 @@ var require_mappers = __commonJS({ } } }; - exports2.AppendBlobAppendBlockHeaders = { - serializedName: "AppendBlob_appendBlockHeaders", + exports2.ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockHeaders", + className: "ServiceGetPropertiesHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -73823,48 +69767,6 @@ var require_mappers = __commonJS({ name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -73875,11 +69777,11 @@ var require_mappers = __commonJS({ } } }; - exports2.AppendBlobAppendBlockExceptionHeaders = { - serializedName: "AppendBlob_appendBlockExceptionHeaders", + exports2.ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockExceptionHeaders", + className: "ServiceGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -73891,40 +69793,19 @@ var require_mappers = __commonJS({ } } }; - exports2.AppendBlobAppendBlockFromUrlHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlHeaders", + exports2.ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockFromUrlHeaders", + className: "ServiceGetStatisticsHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", @@ -73946,39 +69827,57 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + } + } + } + }; + exports2.ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Number" + name: "String" } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + } + } + } + }; + exports2.ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Boolean" + name: "String" } }, errorCode: { @@ -73991,11 +69890,11 @@ var require_mappers = __commonJS({ } } }; - exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + exports2.ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + className: "ServiceListContainersSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -74003,44 +69902,16 @@ var require_mappers = __commonJS({ type: { name: "String" } - }, - copySourceErrorCode: { - serializedName: "x-ms-copy-source-error-code", - xmlName: "x-ms-copy-source-error-code", - type: { - name: "String" - } - }, - copySourceStatusCode: { - serializedName: "x-ms-copy-source-status-code", - xmlName: "x-ms-copy-source-status-code", - type: { - name: "Number" - } } } } }; - exports2.AppendBlobSealHeaders = { - serializedName: "AppendBlob_sealHeaders", + exports2.ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", - className: "AppendBlobSealHeaders", + className: "ServiceGetUserDelegationKeyHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -74069,21 +69940,21 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Boolean" + name: "String" } } } } }; - exports2.AppendBlobSealExceptionHeaders = { - serializedName: "AppendBlob_sealExceptionHeaders", + exports2.ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", - className: "AppendBlobSealExceptionHeaders", + className: "ServiceGetUserDelegationKeyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -74095,33 +69966,12 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobUploadHeaders = { - serializedName: "BlockBlob_uploadHeaders", + exports2.ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", - className: "BlockBlobUploadHeaders", + className: "ServiceGetAccountInfoHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -74143,13 +69993,6 @@ var require_mappers = __commonJS({ name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -74157,25 +70000,39 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", type: { - name: "Boolean" + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", type: { - name: "String" + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", type: { - name: "String" + name: "Boolean" } }, errorCode: { @@ -74188,11 +70045,11 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobUploadExceptionHeaders = { - serializedName: "BlockBlob_uploadExceptionHeaders", + exports2.ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", - className: "BlockBlobUploadExceptionHeaders", + className: "ServiceGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -74204,36 +70061,15 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobPutBlobFromUrlHeaders = { - serializedName: "BlockBlob_putBlobFromUrlHeaders", + exports2.ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", type: { name: "Composite", - className: "BlockBlobPutBlobFromUrlHeaders", + className: "ServiceSubmitBatchHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { name: "String" } @@ -74252,37 +70088,9 @@ var require_mappers = __commonJS({ name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } @@ -74297,11 +70105,11 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobPutBlobFromUrlExceptionHeaders = { - serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + exports2.ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", - className: "BlockBlobPutBlobFromUrlExceptionHeaders", + className: "ServiceSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -74309,37 +70117,16 @@ var require_mappers = __commonJS({ type: { name: "String" } - }, - copySourceErrorCode: { - serializedName: "x-ms-copy-source-error-code", - xmlName: "x-ms-copy-source-error-code", - type: { - name: "String" - } - }, - copySourceStatusCode: { - serializedName: "x-ms-copy-source-status-code", - xmlName: "x-ms-copy-source-status-code", - type: { - name: "Number" - } } } } }; - exports2.BlockBlobStageBlockHeaders = { - serializedName: "BlockBlob_stageBlockHeaders", + exports2.ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockHeaders", + className: "ServiceFilterBlobsHeaders", modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -74368,34 +70155,6 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -74406,11 +70165,11 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobStageBlockExceptionHeaders = { - serializedName: "BlockBlob_stageBlockExceptionHeaders", + exports2.ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockExceptionHeaders", + className: "ServiceFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -74422,24 +70181,24 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobStageBlockFromURLHeaders = { - serializedName: "BlockBlob_stageBlockFromURLHeaders", + exports2.ContainerCreateHeaders = { + serializedName: "Container_createHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockFromURLHeaders", + className: "ContainerCreateHeaders", modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + etag: { + serializedName: "etag", + xmlName: "etag", type: { - name: "ByteArray" + name: "String" } }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "ByteArray" + name: "DateTimeRfc1123" } }, clientRequestId: { @@ -74470,27 +70229,6 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -74501,42 +70239,37 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobStageBlockFromURLExceptionHeaders = { - serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + exports2.ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockFromURLExceptionHeaders", + className: "ContainerCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - }, - copySourceErrorCode: { - serializedName: "x-ms-copy-source-error-code", - xmlName: "x-ms-copy-source-error-code", - type: { - name: "String" - } - }, - copySourceStatusCode: { - serializedName: "x-ms-copy-source-status-code", - xmlName: "x-ms-copy-source-status-code", + xmlName: "x-ms-error-code", type: { - name: "Number" + name: "String" } } } } }; - exports2.BlockBlobCommitBlockListHeaders = { - serializedName: "BlockBlob_commitBlockListHeaders", + exports2.ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", - className: "BlockBlobCommitBlockListHeaders", + className: "ContainerGetPropertiesHeaders", modelProperties: { + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, etag: { serializedName: "etag", xmlName: "etag", @@ -74551,18 +70284,34 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", type: { - name: "ByteArray" + name: "Enum", + allowedValues: ["infinite", "fixed"] } }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", type: { - name: "ByteArray" + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] } }, clientRequestId: { @@ -74586,13 +70335,6 @@ var require_mappers = __commonJS({ name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -74600,27 +70342,49 @@ var require_mappers = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", type: { name: "Boolean" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", type: { - name: "String" + name: "Boolean" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", type: { name: "String" } }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -74631,11 +70395,11 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobCommitBlockListExceptionHeaders = { - serializedName: "BlockBlob_commitBlockListExceptionHeaders", + exports2.ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "BlockBlobCommitBlockListExceptionHeaders", + className: "ContainerGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -74647,40 +70411,12 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobGetBlockListHeaders = { - serializedName: "BlockBlob_getBlockListHeaders", + exports2.ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", type: { name: "Composite", - className: "BlockBlobGetBlockListHeaders", + className: "ContainerDeleteHeaders", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -74719,11700 +70455,15350 @@ var require_mappers = __commonJS({ } } }; - exports2.BlockBlobGetBlockListExceptionHeaders = { - serializedName: "BlockBlob_getBlockListExceptionHeaders", + exports2.ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", - className: "BlockBlobGetBlockListExceptionHeaders", + className: "ContainerDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js -var require_parameters = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.action3 = exports2.action2 = exports2.leaseId1 = exports2.action1 = exports2.proposedLeaseId = exports2.duration = exports2.action = exports2.comp10 = exports2.sourceLeaseId = exports2.sourceContainerName = exports2.comp9 = exports2.deletedContainerVersion = exports2.deletedContainerName = exports2.comp8 = exports2.containerAcl = exports2.comp7 = exports2.comp6 = exports2.ifUnmodifiedSince = exports2.ifModifiedSince = exports2.leaseId = exports2.preventEncryptionScopeOverride = exports2.defaultEncryptionScope = exports2.access = exports2.metadata = exports2.restype2 = exports2.where = exports2.comp5 = exports2.multipartContentType = exports2.contentLength = exports2.comp4 = exports2.body = exports2.restype1 = exports2.comp3 = exports2.keyInfo = exports2.include = exports2.maxPageSize = exports2.marker = exports2.prefix = exports2.comp2 = exports2.comp1 = exports2.accept1 = exports2.requestId = exports2.version = exports2.timeoutInSeconds = exports2.comp = exports2.restype = exports2.url = exports2.accept = exports2.blobServiceProperties = exports2.contentType = void 0; - exports2.fileRequestIntent = exports2.copySourceTags = exports2.copySourceAuthorization = exports2.sourceContentMD5 = exports2.xMsRequiresSync = exports2.legalHold1 = exports2.sealBlob = exports2.blobTagsString = exports2.copySource = exports2.sourceIfTags = exports2.sourceIfNoneMatch = exports2.sourceIfMatch = exports2.sourceIfUnmodifiedSince = exports2.sourceIfModifiedSince = exports2.rehydratePriority = exports2.tier = exports2.comp14 = exports2.encryptionScope = exports2.legalHold = exports2.comp13 = exports2.immutabilityPolicyMode = exports2.immutabilityPolicyExpiry = exports2.comp12 = exports2.blobContentDisposition = exports2.blobContentLanguage = exports2.blobContentEncoding = exports2.blobContentMD5 = exports2.blobContentType = exports2.blobCacheControl = exports2.expiresOn = exports2.expiryOptions = exports2.comp11 = exports2.blobDeleteType = exports2.deleteSnapshots = exports2.ifTags = exports2.ifNoneMatch = exports2.ifMatch = exports2.encryptionAlgorithm = exports2.encryptionKeySha256 = exports2.encryptionKey = exports2.rangeGetContentCRC64 = exports2.rangeGetContentMD5 = exports2.range = exports2.versionId = exports2.snapshot = exports2.delimiter = exports2.include1 = exports2.proposedLeaseId1 = exports2.action4 = exports2.breakPeriod = void 0; - exports2.listType = exports2.comp25 = exports2.blocks = exports2.blockId = exports2.comp24 = exports2.copySourceBlobProperties = exports2.blobType2 = exports2.comp23 = exports2.sourceRange1 = exports2.appendPosition = exports2.maxSize = exports2.comp22 = exports2.blobType1 = exports2.comp21 = exports2.sequenceNumberAction = exports2.prevSnapshotUrl = exports2.prevsnapshot = exports2.comp20 = exports2.range1 = exports2.sourceContentCrc64 = exports2.sourceRange = exports2.sourceUrl = exports2.pageWrite1 = exports2.ifSequenceNumberEqualTo = exports2.ifSequenceNumberLessThan = exports2.ifSequenceNumberLessThanOrEqualTo = exports2.pageWrite = exports2.comp19 = exports2.accept2 = exports2.body1 = exports2.contentType1 = exports2.blobSequenceNumber = exports2.blobContentLength = exports2.blobType = exports2.transactionalContentCrc64 = exports2.transactionalContentMD5 = exports2.tags = exports2.comp18 = exports2.comp17 = exports2.queryRequest = exports2.tier1 = exports2.comp16 = exports2.copyId = exports2.copyActionAbortConstant = exports2.comp15 = void 0; - var mappers_js_1 = require_mappers(); - exports2.contentType = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" - } - } - }; - exports2.blobServiceProperties = { - parameterPath: "blobServiceProperties", - mapper: mappers_js_1.BlobServiceProperties - }; - exports2.accept = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" - } - } - }; - exports2.url = { - parameterPath: "url", - mapper: { - serializedName: "url", - required: true, - xmlName: "url", - type: { - name: "String" - } - }, - skipEncoding: true - }; - exports2.restype = { - parameterPath: "restype", - mapper: { - defaultValue: "service", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } - } - }; - exports2.comp = { - parameterPath: "comp", - mapper: { - defaultValue: "properties", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } - }; - exports2.timeoutInSeconds = { - parameterPath: ["options", "timeoutInSeconds"], - mapper: { - constraints: { - InclusiveMinimum: 0 - }, - serializedName: "timeout", - xmlName: "timeout", - type: { - name: "Number" - } - } - }; - exports2.version = { - parameterPath: "version", - mapper: { - defaultValue: "2025-11-05", - isConstant: true, - serializedName: "x-ms-version", - type: { - name: "String" - } - } - }; - exports2.requestId = { - parameterPath: ["options", "requestId"], - mapper: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - } - }; - exports2.accept1 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" - } - } - }; - exports2.comp1 = { - parameterPath: "comp", - mapper: { - defaultValue: "stats", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } - }; - exports2.comp2 = { - parameterPath: "comp", - mapper: { - defaultValue: "list", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } - }; - exports2.prefix = { - parameterPath: ["options", "prefix"], - mapper: { - serializedName: "prefix", - xmlName: "prefix", - type: { - name: "String" - } - } - }; - exports2.marker = { - parameterPath: ["options", "marker"], - mapper: { - serializedName: "marker", - xmlName: "marker", - type: { - name: "String" - } - } - }; - exports2.maxPageSize = { - parameterPath: ["options", "maxPageSize"], - mapper: { - constraints: { - InclusiveMinimum: 1 - }, - serializedName: "maxresults", - xmlName: "maxresults", - type: { - name: "Number" - } - } - }; - exports2.include = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListContainersIncludeType", - type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } - } - }, - collectionFormat: "CSV" - }; - exports2.keyInfo = { - parameterPath: "keyInfo", - mapper: mappers_js_1.KeyInfo - }; - exports2.comp3 = { - parameterPath: "comp", - mapper: { - defaultValue: "userdelegationkey", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } - }; - exports2.restype1 = { - parameterPath: "restype", - mapper: { - defaultValue: "account", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } - } - }; - exports2.body = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" - } - } - }; - exports2.comp4 = { - parameterPath: "comp", - mapper: { - defaultValue: "batch", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } - }; - exports2.contentLength = { - parameterPath: "contentLength", - mapper: { - serializedName: "Content-Length", - required: true, - xmlName: "Content-Length", - type: { - name: "Number" - } - } - }; - exports2.multipartContentType = { - parameterPath: "multipartContentType", - mapper: { - serializedName: "Content-Type", - required: true, - xmlName: "Content-Type", - type: { - name: "String" - } - } - }; - exports2.comp5 = { - parameterPath: "comp", - mapper: { - defaultValue: "blobs", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } - }; - exports2.where = { - parameterPath: ["options", "where"], - mapper: { - serializedName: "where", - xmlName: "where", - type: { - name: "String" - } - } - }; - exports2.restype2 = { - parameterPath: "restype", - mapper: { - defaultValue: "container", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } - } - }; - exports2.metadata = { - parameterPath: ["options", "metadata"], - mapper: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - } - }; - exports2.access = { - parameterPath: ["options", "access"], - mapper: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - } - }; - exports2.defaultEncryptionScope = { - parameterPath: [ - "options", - "containerEncryptionScope", - "defaultEncryptionScope" - ], - mapper: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", - type: { - name: "String" - } - } - }; - exports2.preventEncryptionScopeOverride = { - parameterPath: [ - "options", - "containerEncryptionScope", - "preventEncryptionScopeOverride" - ], - mapper: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", - type: { - name: "Boolean" - } - } - }; - exports2.leaseId = { - parameterPath: ["options", "leaseAccessConditions", "leaseId"], - mapper: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - } - }; - exports2.ifModifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], - mapper: { - serializedName: "If-Modified-Since", - xmlName: "If-Modified-Since", - type: { - name: "DateTimeRfc1123" + type: { + name: "String" + } + } } } }; - exports2.ifUnmodifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], - mapper: { - serializedName: "If-Unmodified-Since", - xmlName: "If-Unmodified-Since", - type: { - name: "DateTimeRfc1123" + exports2.ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp6 = { - parameterPath: "comp", - mapper: { - defaultValue: "metadata", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp7 = { - parameterPath: "comp", - mapper: { - defaultValue: "acl", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.containerAcl = { - parameterPath: ["options", "containerAcl"], - mapper: { - serializedName: "containerAcl", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier", - type: { - name: "Sequence", - element: { + exports2.ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Composite", - className: "SignedIdentifier" + name: "String" } } } } }; - exports2.comp8 = { - parameterPath: "comp", - mapper: { - defaultValue: "undelete", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.deletedContainerName = { - parameterPath: ["options", "deletedContainerName"], - mapper: { - serializedName: "x-ms-deleted-container-name", - xmlName: "x-ms-deleted-container-name", - type: { - name: "String" + exports2.ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.deletedContainerVersion = { - parameterPath: ["options", "deletedContainerVersion"], - mapper: { - serializedName: "x-ms-deleted-container-version", - xmlName: "x-ms-deleted-container-version", - type: { - name: "String" + exports2.ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp9 = { - parameterPath: "comp", - mapper: { - defaultValue: "rename", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceContainerName = { - parameterPath: "sourceContainerName", - mapper: { - serializedName: "x-ms-source-container-name", - required: true, - xmlName: "x-ms-source-container-name", - type: { - name: "String" + exports2.ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceLeaseId = { - parameterPath: ["options", "sourceLeaseId"], - mapper: { - serializedName: "x-ms-source-lease-id", - xmlName: "x-ms-source-lease-id", - type: { - name: "String" + exports2.ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp10 = { - parameterPath: "comp", - mapper: { - defaultValue: "lease", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } } } }; - exports2.action = { - parameterPath: "action", - mapper: { - defaultValue: "acquire", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + exports2.ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.duration = { - parameterPath: ["options", "duration"], - mapper: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Number" + exports2.ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.proposedLeaseId = { - parameterPath: ["options", "proposedLeaseId"], - mapper: { - serializedName: "x-ms-proposed-lease-id", - xmlName: "x-ms-proposed-lease-id", - type: { - name: "String" + exports2.ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.action1 = { - parameterPath: "action", - mapper: { - defaultValue: "release", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + exports2.ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.leaseId1 = { - parameterPath: "leaseId", - mapper: { - serializedName: "x-ms-lease-id", - required: true, - xmlName: "x-ms-lease-id", - type: { - name: "String" + exports2.ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.action2 = { - parameterPath: "action", - mapper: { - defaultValue: "renew", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + exports2.ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.action3 = { - parameterPath: "action", - mapper: { - defaultValue: "break", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + exports2.ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.breakPeriod = { - parameterPath: ["options", "breakPeriod"], - mapper: { - serializedName: "x-ms-lease-break-period", - xmlName: "x-ms-lease-break-period", - type: { - name: "Number" + exports2.ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.action4 = { - parameterPath: "action", - mapper: { - defaultValue: "change", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + exports2.ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.proposedLeaseId1 = { - parameterPath: "proposedLeaseId", - mapper: { - serializedName: "x-ms-proposed-lease-id", - required: true, - xmlName: "x-ms-proposed-lease-id", - type: { - name: "String" + exports2.ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.include1 = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListBlobsIncludeItem", - type: { - name: "Sequence", - element: { + exports2.ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Enum", - allowedValues: [ - "copy", - "deleted", - "metadata", - "snapshots", - "uncommittedblobs", - "versions", - "tags", - "immutabilitypolicy", - "legalhold", - "deletedwithversions" - ] + name: "String" } } } - }, - collectionFormat: "CSV" - }; - exports2.delimiter = { - parameterPath: "delimiter", - mapper: { - serializedName: "delimiter", - required: true, - xmlName: "delimiter", - type: { - name: "String" - } } }; - exports2.snapshot = { - parameterPath: ["options", "snapshot"], - mapper: { - serializedName: "snapshot", - xmlName: "snapshot", - type: { - name: "String" + exports2.ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.versionId = { - parameterPath: ["options", "versionId"], - mapper: { - serializedName: "versionid", - xmlName: "versionid", - type: { - name: "String" + exports2.ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.range = { - parameterPath: ["options", "range"], - mapper: { - serializedName: "x-ms-range", - xmlName: "x-ms-range", - type: { - name: "String" + exports2.ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.rangeGetContentMD5 = { - parameterPath: ["options", "rangeGetContentMD5"], - mapper: { - serializedName: "x-ms-range-get-content-md5", - xmlName: "x-ms-range-get-content-md5", - type: { - name: "Boolean" + exports2.ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.rangeGetContentCRC64 = { - parameterPath: ["options", "rangeGetContentCRC64"], - mapper: { - serializedName: "x-ms-range-get-content-crc64", - xmlName: "x-ms-range-get-content-crc64", - type: { - name: "Boolean" + exports2.ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.encryptionKey = { - parameterPath: ["options", "cpkInfo", "encryptionKey"], - mapper: { - serializedName: "x-ms-encryption-key", - xmlName: "x-ms-encryption-key", - type: { - name: "String" + exports2.ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.encryptionKeySha256 = { - parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], - mapper: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" + exports2.ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + } } } }; - exports2.encryptionAlgorithm = { - parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], - mapper: { - serializedName: "x-ms-encryption-algorithm", - xmlName: "x-ms-encryption-algorithm", - type: { - name: "String" + exports2.ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.ifMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], - mapper: { - serializedName: "If-Match", - xmlName: "If-Match", - type: { - name: "String" + exports2.BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } } } }; - exports2.ifNoneMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], - mapper: { - serializedName: "If-None-Match", - xmlName: "If-None-Match", - type: { - name: "String" + exports2.BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.ifTags = { - parameterPath: ["options", "modifiedAccessConditions", "ifTags"], - mapper: { - serializedName: "x-ms-if-tags", - xmlName: "x-ms-if-tags", - type: { - name: "String" + exports2.BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.deleteSnapshots = { - parameterPath: ["options", "deleteSnapshots"], - mapper: { - serializedName: "x-ms-delete-snapshots", - xmlName: "x-ms-delete-snapshots", - type: { - name: "Enum", - allowedValues: ["include", "only"] + exports2.BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobDeleteType = { - parameterPath: ["options", "blobDeleteType"], - mapper: { - serializedName: "deletetype", - xmlName: "deletetype", - type: { - name: "String" + exports2.BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp11 = { - parameterPath: "comp", - mapper: { - defaultValue: "expiry", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.expiryOptions = { - parameterPath: "expiryOptions", - mapper: { - serializedName: "x-ms-expiry-option", - required: true, - xmlName: "x-ms-expiry-option", - type: { - name: "String" + exports2.BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.expiresOn = { - parameterPath: ["options", "expiresOn"], - mapper: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", - type: { - name: "String" + exports2.BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobCacheControl = { - parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], - mapper: { - serializedName: "x-ms-blob-cache-control", - xmlName: "x-ms-blob-cache-control", - type: { - name: "String" + exports2.BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.blobContentType = { - parameterPath: ["options", "blobHttpHeaders", "blobContentType"], - mapper: { - serializedName: "x-ms-blob-content-type", - xmlName: "x-ms-blob-content-type", - type: { - name: "String" + exports2.BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobContentMD5 = { - parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], - mapper: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" + exports2.BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobContentEncoding = { - parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], - mapper: { - serializedName: "x-ms-blob-content-encoding", - xmlName: "x-ms-blob-content-encoding", - type: { - name: "String" + exports2.BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobContentLanguage = { - parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], - mapper: { - serializedName: "x-ms-blob-content-language", - xmlName: "x-ms-blob-content-language", - type: { - name: "String" + exports2.BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } } } }; - exports2.blobContentDisposition = { - parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], - mapper: { - serializedName: "x-ms-blob-content-disposition", - xmlName: "x-ms-blob-content-disposition", - type: { - name: "String" + exports2.BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp12 = { - parameterPath: "comp", - mapper: { - defaultValue: "immutabilityPolicies", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.immutabilityPolicyExpiry = { - parameterPath: ["options", "immutabilityPolicyExpiry"], - mapper: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" + exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.immutabilityPolicyMode = { - parameterPath: ["options", "immutabilityPolicyMode"], - mapper: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] + exports2.BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } } } }; - exports2.comp13 = { - parameterPath: "comp", - mapper: { - defaultValue: "legalhold", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.legalHold = { - parameterPath: "legalHold", - mapper: { - serializedName: "x-ms-legal-hold", - required: true, - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" + exports2.BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.encryptionScope = { - parameterPath: ["options", "encryptionScope"], - mapper: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" + exports2.BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp14 = { - parameterPath: "comp", - mapper: { - defaultValue: "snapshot", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.tier = { - parameterPath: ["options", "tier"], - mapper: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] + exports2.BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.rehydratePriority = { - parameterPath: ["options", "rehydratePriority"], - mapper: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] + exports2.BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.sourceIfModifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfModifiedSince" - ], - mapper: { - serializedName: "x-ms-source-if-modified-since", - xmlName: "x-ms-source-if-modified-since", - type: { - name: "DateTimeRfc1123" + exports2.BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceIfUnmodifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" - ], - mapper: { - serializedName: "x-ms-source-if-unmodified-since", - xmlName: "x-ms-source-if-unmodified-since", - type: { - name: "DateTimeRfc1123" + exports2.BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.sourceIfMatch = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], - mapper: { - serializedName: "x-ms-source-if-match", - xmlName: "x-ms-source-if-match", - type: { - name: "String" + exports2.BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceIfNoneMatch = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfNoneMatch" - ], - mapper: { - serializedName: "x-ms-source-if-none-match", - xmlName: "x-ms-source-if-none-match", - type: { - name: "String" + exports2.BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.sourceIfTags = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], - mapper: { - serializedName: "x-ms-source-if-tags", - xmlName: "x-ms-source-if-tags", - type: { - name: "String" + exports2.BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.copySource = { - parameterPath: "copySource", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + exports2.BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } } } }; - exports2.blobTagsString = { - parameterPath: ["options", "blobTagsString"], - mapper: { - serializedName: "x-ms-tags", - xmlName: "x-ms-tags", - type: { - name: "String" + exports2.BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sealBlob = { - parameterPath: ["options", "sealBlob"], - mapper: { - serializedName: "x-ms-seal-blob", - xmlName: "x-ms-seal-blob", - type: { - name: "Boolean" + exports2.BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.legalHold1 = { - parameterPath: ["options", "legalHold"], - mapper: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" + exports2.BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.xMsRequiresSync = { - parameterPath: "xMsRequiresSync", - mapper: { - defaultValue: "true", - isConstant: true, - serializedName: "x-ms-requires-sync", - type: { - name: "String" + exports2.BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + copySourceErrorCode: { + serializedName: "x-ms-copy-source-error-code", + xmlName: "x-ms-copy-source-error-code", + type: { + name: "String" + } + }, + copySourceStatusCode: { + serializedName: "x-ms-copy-source-status-code", + xmlName: "x-ms-copy-source-status-code", + type: { + name: "Number" + } + } } } }; - exports2.sourceContentMD5 = { - parameterPath: ["options", "sourceContentMD5"], - mapper: { - serializedName: "x-ms-source-content-md5", - xmlName: "x-ms-source-content-md5", - type: { - name: "ByteArray" + exports2.BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.copySourceAuthorization = { - parameterPath: ["options", "copySourceAuthorization"], - mapper: { - serializedName: "x-ms-copy-source-authorization", - xmlName: "x-ms-copy-source-authorization", - type: { - name: "String" + exports2.BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + copySourceErrorCode: { + serializedName: "x-ms-copy-source-error-code", + xmlName: "x-ms-copy-source-error-code", + type: { + name: "String" + } + }, + copySourceStatusCode: { + serializedName: "x-ms-copy-source-status-code", + xmlName: "x-ms-copy-source-status-code", + type: { + name: "Number" + } + } } } }; - exports2.copySourceTags = { - parameterPath: ["options", "copySourceTags"], - mapper: { - serializedName: "x-ms-copy-source-tag-option", - xmlName: "x-ms-copy-source-tag-option", - type: { - name: "Enum", - allowedValues: ["REPLACE", "COPY"] + exports2.BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.fileRequestIntent = { - parameterPath: ["options", "fileRequestIntent"], - mapper: { - serializedName: "x-ms-file-request-intent", - xmlName: "x-ms-file-request-intent", - type: { - name: "String" + exports2.BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp15 = { - parameterPath: "comp", - mapper: { - defaultValue: "copy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.copyActionAbortConstant = { - parameterPath: "copyActionAbortConstant", - mapper: { - defaultValue: "abort", - isConstant: true, - serializedName: "x-ms-copy-action", - type: { - name: "String" + exports2.BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.copyId = { - parameterPath: "copyId", - mapper: { - serializedName: "copyid", - required: true, - xmlName: "copyid", - type: { - name: "String" + exports2.BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + } } } }; - exports2.comp16 = { - parameterPath: "comp", - mapper: { - defaultValue: "tier", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.tier1 = { - parameterPath: "tier", - mapper: { - serializedName: "x-ms-access-tier", - required: true, - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] + exports2.BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } } } }; - exports2.queryRequest = { - parameterPath: ["options", "queryRequest"], - mapper: mappers_js_1.QueryRequest - }; - exports2.comp17 = { - parameterPath: "comp", - mapper: { - defaultValue: "query", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp18 = { - parameterPath: "comp", - mapper: { - defaultValue: "tags", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.tags = { - parameterPath: ["options", "tags"], - mapper: mappers_js_1.BlobTags - }; - exports2.transactionalContentMD5 = { - parameterPath: ["options", "transactionalContentMD5"], - mapper: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", - type: { - name: "ByteArray" + exports2.BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.transactionalContentCrc64 = { - parameterPath: ["options", "transactionalContentCrc64"], - mapper: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" + exports2.BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobType = { - parameterPath: "blobType", - mapper: { - defaultValue: "PageBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + exports2.BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobContentLength = { - parameterPath: "blobContentLength", - mapper: { - serializedName: "x-ms-blob-content-length", - required: true, - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" + exports2.PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobSequenceNumber = { - parameterPath: ["options", "blobSequenceNumber"], - mapper: { - defaultValue: 0, - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" + exports2.PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.contentType1 = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/octet-stream", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" + exports2.PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.body1 = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" + exports2.PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.accept2 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" + exports2.PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp19 = { - parameterPath: "comp", - mapper: { - defaultValue: "page", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.pageWrite = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "update", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + exports2.PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.ifSequenceNumberLessThanOrEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-le", - xmlName: "x-ms-if-sequence-number-le", - type: { - name: "Number" + exports2.PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + copySourceErrorCode: { + serializedName: "x-ms-copy-source-error-code", + xmlName: "x-ms-copy-source-error-code", + type: { + name: "String" + } + }, + copySourceStatusCode: { + serializedName: "x-ms-copy-source-status-code", + xmlName: "x-ms-copy-source-status-code", + type: { + name: "Number" + } + } } } }; - exports2.ifSequenceNumberLessThan = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-lt", - xmlName: "x-ms-if-sequence-number-lt", - type: { - name: "Number" + exports2.PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.ifSequenceNumberEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-eq", - xmlName: "x-ms-if-sequence-number-eq", - type: { - name: "Number" + exports2.PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.pageWrite1 = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "clear", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + exports2.PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceUrl = { - parameterPath: "sourceUrl", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + exports2.PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceRange = { - parameterPath: "sourceRange", - mapper: { - serializedName: "x-ms-source-range", - required: true, - xmlName: "x-ms-source-range", - type: { - name: "String" + exports2.PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sourceContentCrc64 = { - parameterPath: ["options", "sourceContentCrc64"], - mapper: { - serializedName: "x-ms-source-content-crc64", - xmlName: "x-ms-source-content-crc64", - type: { - name: "ByteArray" + exports2.PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.range1 = { - parameterPath: "range", - mapper: { - serializedName: "x-ms-range", - required: true, - xmlName: "x-ms-range", - type: { - name: "String" + exports2.PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp20 = { - parameterPath: "comp", - mapper: { - defaultValue: "pagelist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.prevsnapshot = { - parameterPath: ["options", "prevsnapshot"], - mapper: { - serializedName: "prevsnapshot", - xmlName: "prevsnapshot", - type: { - name: "String" + exports2.PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.prevSnapshotUrl = { - parameterPath: ["options", "prevSnapshotUrl"], - mapper: { - serializedName: "x-ms-previous-snapshot-url", - xmlName: "x-ms-previous-snapshot-url", - type: { - name: "String" + exports2.PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.sequenceNumberAction = { - parameterPath: "sequenceNumberAction", - mapper: { - serializedName: "x-ms-sequence-number-action", - required: true, - xmlName: "x-ms-sequence-number-action", - type: { - name: "Enum", - allowedValues: ["max", "update", "increment"] + exports2.AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp21 = { - parameterPath: "comp", - mapper: { - defaultValue: "incrementalcopy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobType1 = { - parameterPath: "blobType", - mapper: { - defaultValue: "AppendBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + exports2.AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp22 = { - parameterPath: "comp", - mapper: { - defaultValue: "appendblock", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.maxSize = { - parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], - mapper: { - serializedName: "x-ms-blob-condition-maxsize", - xmlName: "x-ms-blob-condition-maxsize", - type: { - name: "Number" + exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + copySourceErrorCode: { + serializedName: "x-ms-copy-source-error-code", + xmlName: "x-ms-copy-source-error-code", + type: { + name: "String" + } + }, + copySourceStatusCode: { + serializedName: "x-ms-copy-source-status-code", + xmlName: "x-ms-copy-source-status-code", + type: { + name: "Number" + } + } } } }; - exports2.appendPosition = { - parameterPath: [ - "options", - "appendPositionAccessConditions", - "appendPosition" - ], - mapper: { - serializedName: "x-ms-blob-condition-appendpos", - xmlName: "x-ms-blob-condition-appendpos", - type: { - name: "Number" + exports2.AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } } } }; - exports2.sourceRange1 = { - parameterPath: ["options", "sourceRange"], - mapper: { - serializedName: "x-ms-source-range", - xmlName: "x-ms-source-range", - type: { - name: "String" + exports2.AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp23 = { - parameterPath: "comp", - mapper: { - defaultValue: "seal", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blobType2 = { - parameterPath: "blobType", - mapper: { - defaultValue: "BlockBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + exports2.BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.copySourceBlobProperties = { - parameterPath: ["options", "copySourceBlobProperties"], - mapper: { - serializedName: "x-ms-copy-source-blob-properties", - xmlName: "x-ms-copy-source-blob-properties", - type: { - name: "Boolean" + exports2.BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.comp24 = { - parameterPath: "comp", - mapper: { - defaultValue: "block", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + copySourceErrorCode: { + serializedName: "x-ms-copy-source-error-code", + xmlName: "x-ms-copy-source-error-code", + type: { + name: "String" + } + }, + copySourceStatusCode: { + serializedName: "x-ms-copy-source-status-code", + xmlName: "x-ms-copy-source-status-code", + type: { + name: "Number" + } + } } } }; - exports2.blockId = { - parameterPath: "blockId", - mapper: { - serializedName: "blockid", - required: true, - xmlName: "blockid", - type: { - name: "String" + exports2.BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.blocks = { - parameterPath: "blocks", - mapper: mappers_js_1.BlockLookupList - }; - exports2.comp25 = { - parameterPath: "comp", - mapper: { - defaultValue: "blocklist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - exports2.listType = { - parameterPath: "listType", - mapper: { - defaultValue: "committed", - serializedName: "blocklisttype", - required: true, - xmlName: "blocklisttype", - type: { - name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] + exports2.BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } } }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js -var require_service = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceImpl = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreClient = tslib_1.__importStar(require_commonjs8()); - var Mappers = tslib_1.__importStar(require_mappers()); - var Parameters = tslib_1.__importStar(require_parameters()); - var ServiceImpl = class { - client; - /** - * Initialize a new instance of the class Service class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * Sets properties for a storage account's Blob service endpoint, including properties for Storage - * Analytics and CORS (Cross-Origin Resource Sharing) rules - * @param blobServiceProperties The StorageService properties. - * @param options The options parameters. - */ - setProperties(blobServiceProperties, options) { - return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); - } - /** - * gets the properties of a storage account's Blob service, including properties for Storage Analytics - * and CORS (Cross-Origin Resource Sharing) rules. - * @param options The options parameters. - */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); - } - /** - * Retrieves statistics related to replication for the Blob service. It is only available on the - * secondary location endpoint when read-access geo-redundant replication is enabled for the storage - * account. - * @param options The options parameters. - */ - getStatistics(options) { - return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); - } - /** - * The List Containers Segment operation returns a list of the containers under the specified account - * @param options The options parameters. - */ - listContainersSegment(options) { - return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); - } - /** - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * @param keyInfo Key information - * @param options The options parameters. - */ - getUserDelegationKey(keyInfo, options) { - return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); - } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); - } - /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. - */ - submitBatch(contentLength, multipartContentType, body, options) { - return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); - } - /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a - * given search expression. Filter blobs searches across all containers within a storage account but - * can be scoped within the expression to a single container. - * @param options The options parameters. - */ - filterBlobs(options) { - return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); - } - }; - exports2.ServiceImpl = ServiceImpl; - var xmlSerializer = coreClient.createSerializer( - Mappers, - /* isXml */ - true - ); - var setPropertiesOperationSpec = { - path: "/", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: Mappers.ServiceSetPropertiesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders - } - }, - requestBody: Parameters.blobServiceProperties, - queryParameters: [ - Parameters.restype, - Parameters.comp, - Parameters.timeoutInSeconds - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.contentType, - Parameters.accept, - Parameters.version, - Parameters.requestId - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer - }; - var getPropertiesOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.BlobServiceProperties, - headersMapper: Mappers.ServiceGetPropertiesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders - } - }, - queryParameters: [ - Parameters.restype, - Parameters.comp, - Parameters.timeoutInSeconds - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer - }; - var getStatisticsOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.BlobServiceStatistics, - headersMapper: Mappers.ServiceGetStatisticsHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders - } - }, - queryParameters: [ - Parameters.restype, - Parameters.timeoutInSeconds, - Parameters.comp1 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer - }; - var listContainersSegmentOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.ListContainersSegmentResponse, - headersMapper: Mappers.ServiceListContainersSegmentHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders + exports2.BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + copySourceErrorCode: { + serializedName: "x-ms-copy-source-error-code", + xmlName: "x-ms-copy-source-error-code", + type: { + name: "String" + } + }, + copySourceStatusCode: { + serializedName: "x-ms-copy-source-status-code", + xmlName: "x-ms-copy-source-status-code", + type: { + name: "Number" + } + } } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp2, - Parameters.prefix, - Parameters.marker, - Parameters.maxPageSize, - Parameters.include - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var getUserDelegationKeyOperationSpec = { - path: "/", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: Mappers.UserDelegationKey, - headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders + exports2.BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } - }, - requestBody: Parameters.keyInfo, - queryParameters: [ - Parameters.restype, - Parameters.timeoutInSeconds, - Parameters.comp3 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.contentType, - Parameters.accept, - Parameters.version, - Parameters.requestId - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - var getAccountInfoOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - headersMapper: Mappers.ServiceGetAccountInfoHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders + exports2.BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } - }, - queryParameters: [ - Parameters.comp, - Parameters.timeoutInSeconds, - Parameters.restype1 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var submitBatchOperationSpec = { - path: "/", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" + exports2.BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } }, - headersMapper: Mappers.ServiceSubmitBatchHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } - }, - requestBody: Parameters.body, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.accept, - Parameters.version, - Parameters.requestId, - Parameters.contentLength, - Parameters.multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - var filterBlobsOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.FilterBlobSegment, - headersMapper: Mappers.ServiceFilterBlobsHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders + exports2.BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.marker, - Parameters.maxPageSize, - Parameters.comp5, - Parameters.where - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; } }); -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js -var require_container = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js +var require_parameters = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ContainerImpl = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreClient = tslib_1.__importStar(require_commonjs8()); - var Mappers = tslib_1.__importStar(require_mappers()); - var Parameters = tslib_1.__importStar(require_parameters()); - var ContainerImpl = class { - client; - /** - * Initialize a new instance of the class Container class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * creates a new container under the specified account. If the container with the same name already - * exists, the operation fails - * @param options The options parameters. - */ - create(options) { - return this.client.sendOperationRequest({ options }, createOperationSpec); - } - /** - * returns all user-defined metadata and system properties for the specified container. The data - * returned does not include the container's list of blobs - * @param options The options parameters. - */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); - } - /** - * operation marks the specified container for deletion. The container and any blobs contained within - * it are later deleted during garbage collection - * @param options The options parameters. - */ - delete(options) { - return this.client.sendOperationRequest({ options }, deleteOperationSpec); - } - /** - * operation sets one or more user-defined name-value pairs for the specified container. - * @param options The options parameters. - */ - setMetadata(options) { - return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); - } - /** - * gets the permissions for the specified container. The permissions indicate whether container data - * may be accessed publicly. - * @param options The options parameters. - */ - getAccessPolicy(options) { - return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); - } - /** - * sets the permissions for the specified container. The permissions indicate whether blobs in a - * container may be accessed publicly. - * @param options The options parameters. - */ - setAccessPolicy(options) { - return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); - } - /** - * Restores a previously-deleted container. - * @param options The options parameters. - */ - restore(options) { - return this.client.sendOperationRequest({ options }, restoreOperationSpec); - } - /** - * Renames an existing container. - * @param sourceContainerName Required. Specifies the name of the container to rename. - * @param options The options parameters. - */ - rename(sourceContainerName, options) { - return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); - } - /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. - */ - submitBatch(contentLength, multipartContentType, body, options) { - return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); - } - /** - * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given - * search expression. Filter blobs searches within the given container. - * @param options The options parameters. - */ - filterBlobs(options) { - return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param options The options parameters. - */ - acquireLease(options) { - return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId, options) { - return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId, options) { - return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param options The options parameters. - */ - breakLease(options) { - return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId, proposedLeaseId, options) { - return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); - } - /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param options The options parameters. - */ - listBlobFlatSegment(options) { - return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); - } - /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix - * element in the response body that acts as a placeholder for all blobs whose names begin with the - * same substring up to the appearance of the delimiter character. The delimiter may be a single - * character or a string. - * @param options The options parameters. - */ - listBlobHierarchySegment(delimiter, options) { - return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + exports2.action3 = exports2.action2 = exports2.leaseId1 = exports2.action1 = exports2.proposedLeaseId = exports2.duration = exports2.action = exports2.comp10 = exports2.sourceLeaseId = exports2.sourceContainerName = exports2.comp9 = exports2.deletedContainerVersion = exports2.deletedContainerName = exports2.comp8 = exports2.containerAcl = exports2.comp7 = exports2.comp6 = exports2.ifUnmodifiedSince = exports2.ifModifiedSince = exports2.leaseId = exports2.preventEncryptionScopeOverride = exports2.defaultEncryptionScope = exports2.access = exports2.metadata = exports2.restype2 = exports2.where = exports2.comp5 = exports2.multipartContentType = exports2.contentLength = exports2.comp4 = exports2.body = exports2.restype1 = exports2.comp3 = exports2.keyInfo = exports2.include = exports2.maxPageSize = exports2.marker = exports2.prefix = exports2.comp2 = exports2.comp1 = exports2.accept1 = exports2.requestId = exports2.version = exports2.timeoutInSeconds = exports2.comp = exports2.restype = exports2.url = exports2.accept = exports2.blobServiceProperties = exports2.contentType = void 0; + exports2.fileRequestIntent = exports2.copySourceTags = exports2.copySourceAuthorization = exports2.sourceContentMD5 = exports2.xMsRequiresSync = exports2.legalHold1 = exports2.sealBlob = exports2.blobTagsString = exports2.copySource = exports2.sourceIfTags = exports2.sourceIfNoneMatch = exports2.sourceIfMatch = exports2.sourceIfUnmodifiedSince = exports2.sourceIfModifiedSince = exports2.rehydratePriority = exports2.tier = exports2.comp14 = exports2.encryptionScope = exports2.legalHold = exports2.comp13 = exports2.immutabilityPolicyMode = exports2.immutabilityPolicyExpiry = exports2.comp12 = exports2.blobContentDisposition = exports2.blobContentLanguage = exports2.blobContentEncoding = exports2.blobContentMD5 = exports2.blobContentType = exports2.blobCacheControl = exports2.expiresOn = exports2.expiryOptions = exports2.comp11 = exports2.blobDeleteType = exports2.deleteSnapshots = exports2.ifTags = exports2.ifNoneMatch = exports2.ifMatch = exports2.encryptionAlgorithm = exports2.encryptionKeySha256 = exports2.encryptionKey = exports2.rangeGetContentCRC64 = exports2.rangeGetContentMD5 = exports2.range = exports2.versionId = exports2.snapshot = exports2.delimiter = exports2.include1 = exports2.proposedLeaseId1 = exports2.action4 = exports2.breakPeriod = void 0; + exports2.listType = exports2.comp25 = exports2.blocks = exports2.blockId = exports2.comp24 = exports2.copySourceBlobProperties = exports2.blobType2 = exports2.comp23 = exports2.sourceRange1 = exports2.appendPosition = exports2.maxSize = exports2.comp22 = exports2.blobType1 = exports2.comp21 = exports2.sequenceNumberAction = exports2.prevSnapshotUrl = exports2.prevsnapshot = exports2.comp20 = exports2.range1 = exports2.sourceContentCrc64 = exports2.sourceRange = exports2.sourceUrl = exports2.pageWrite1 = exports2.ifSequenceNumberEqualTo = exports2.ifSequenceNumberLessThan = exports2.ifSequenceNumberLessThanOrEqualTo = exports2.pageWrite = exports2.comp19 = exports2.accept2 = exports2.body1 = exports2.contentType1 = exports2.blobSequenceNumber = exports2.blobContentLength = exports2.blobType = exports2.transactionalContentCrc64 = exports2.transactionalContentMD5 = exports2.tags = exports2.comp18 = exports2.comp17 = exports2.queryRequest = exports2.tier1 = exports2.comp16 = exports2.copyId = exports2.copyActionAbortConstant = exports2.comp15 = void 0; + var mappers_js_1 = require_mappers(); + exports2.contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + }; + exports2.blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: mappers_js_1.BlobServiceProperties + }; + exports2.accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } } }; - exports2.ContainerImpl = ContainerImpl; - var xmlSerializer = coreClient.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.ContainerCreateHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerCreateExceptionHeaders + exports2.url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" } }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.metadata, - Parameters.access, - Parameters.defaultEncryptionScope, - Parameters.preventEncryptionScopeOverride - ], - isXML: true, - serializer: xmlSerializer + skipEncoding: true }; - var getPropertiesOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: Mappers.ContainerGetPropertiesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders + exports2.restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId - ], - isXML: true, - serializer: xmlSerializer + } }; - var deleteOperationSpec = { - path: "/{containerName}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: Mappers.ContainerDeleteHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerDeleteExceptionHeaders + exports2.comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince - ], - isXML: true, - serializer: xmlSerializer + } }; - var setMetadataOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.ContainerSetMetadataHeaders + exports2.timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerSetMetadataExceptionHeaders + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp6 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince - ], - isXML: true, - serializer: xmlSerializer + } }; - var getAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { - name: "Sequence", - element: { - type: { name: "Composite", className: "SignedIdentifier" } - } - }, - serializedName: "SignedIdentifiers", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" - }, - headersMapper: Mappers.ContainerGetAccessPolicyHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders + exports2.version = { + parameterPath: "version", + mapper: { + defaultValue: "2025-11-05", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp7 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId - ], - isXML: true, - serializer: xmlSerializer + } }; - var setAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.ContainerSetAccessPolicyHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders + exports2.requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" } - }, - requestBody: Parameters.containerAcl, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp7 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.contentType, - Parameters.accept, - Parameters.version, - Parameters.requestId, - Parameters.access, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - var restoreOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.ContainerRestoreHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerRestoreExceptionHeaders + exports2.accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp8 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.deletedContainerName, - Parameters.deletedContainerVersion - ], - isXML: true, - serializer: xmlSerializer + } }; - var renameOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.ContainerRenameHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerRenameExceptionHeaders + exports2.comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp9 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.sourceContainerName, - Parameters.sourceLeaseId - ], - isXML: true, - serializer: xmlSerializer + } }; - var submitBatchOperationSpec = { - path: "/{containerName}", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: Mappers.ContainerSubmitBatchHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders + exports2.comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - requestBody: Parameters.body, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp4, - Parameters.restype2 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.accept, - Parameters.version, - Parameters.requestId, - Parameters.contentLength, - Parameters.multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - var filterBlobsOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.FilterBlobSegment, - headersMapper: Mappers.ContainerFilterBlobsHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders + exports2.prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.marker, - Parameters.maxPageSize, - Parameters.comp5, - Parameters.where, - Parameters.restype2 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var acquireLeaseOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.ContainerAcquireLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders + exports2.marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp10 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.action, - Parameters.duration, - Parameters.proposedLeaseId - ], - isXML: true, - serializer: xmlSerializer + } }; - var releaseLeaseOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.ContainerReleaseLeaseHeaders + exports2.maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } + }; + exports2.include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } } }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp10 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.action1, - Parameters.leaseId1 - ], - isXML: true, - serializer: xmlSerializer + collectionFormat: "CSV" }; - var renewLeaseOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.ContainerRenewLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders + exports2.keyInfo = { + parameterPath: "keyInfo", + mapper: mappers_js_1.KeyInfo + }; + exports2.comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } + }; + exports2.comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp10 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.leaseId1, - Parameters.action2 - ], - isXML: true, - serializer: xmlSerializer + } }; - var breakLeaseOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: Mappers.ContainerBreakLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders + exports2.contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp10 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.action3, - Parameters.breakPeriod - ], - isXML: true, - serializer: xmlSerializer + } }; - var changeLeaseOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.ContainerChangeLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders + exports2.multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.restype2, - Parameters.comp10 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.leaseId1, - Parameters.action4, - Parameters.proposedLeaseId1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var listBlobFlatSegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.ListBlobsFlatSegmentResponse, - headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders + exports2.comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp2, - Parameters.prefix, - Parameters.marker, - Parameters.maxPageSize, - Parameters.restype2, - Parameters.include1 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var listBlobHierarchySegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, - headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders + exports2.where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp2, - Parameters.prefix, - Parameters.marker, - Parameters.maxPageSize, - Parameters.restype2, - Parameters.include1, - Parameters.delimiter - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var getAccountInfoOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: Mappers.ContainerGetAccountInfoHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders + exports2.restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.comp, - Parameters.timeoutInSeconds, - Parameters.restype1 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js -var require_blob = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobImpl = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreClient = tslib_1.__importStar(require_commonjs8()); - var Mappers = tslib_1.__importStar(require_mappers()); - var Parameters = tslib_1.__importStar(require_parameters()); - var BlobImpl = class { - client; - /** - * Initialize a new instance of the class Blob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; + exports2.metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } } - /** - * The Download operation reads or downloads a blob from the system, including its metadata and - * properties. You can also call Download to read a snapshot. - * @param options The options parameters. - */ - download(options) { - return this.client.sendOperationRequest({ options }, downloadOperationSpec); + }; + exports2.access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } } - /** - * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system - * properties for the blob. It does not return the content of the blob. - * @param options The options parameters. - */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + }; + exports2.defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } } - /** - * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - * permanently removed from the storage account. If the storage account's soft delete feature is - * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - * immediately. However, the blob service retains the blob or snapshot for the number of days specified - * by the DeleteRetentionPolicy section of [Storage service properties] - * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is - * permanently removed from the storage account. Note that you continue to be charged for the - * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the - * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You - * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a - * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 - * (ResourceNotFound). - * @param options The options parameters. - */ - delete(options) { - return this.client.sendOperationRequest({ options }, deleteOperationSpec); + }; + exports2.preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } } - /** - * Undelete a blob that was previously soft deleted - * @param options The options parameters. - */ - undelete(options) { - return this.client.sendOperationRequest({ options }, undeleteOperationSpec); + }; + exports2.leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } } - /** - * Sets the time a blob will expire and be deleted. - * @param expiryOptions Required. Indicates mode of the expiry time - * @param options The options parameters. - */ - setExpiry(expiryOptions, options) { - return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); + }; + exports2.ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } } - /** - * The Set HTTP Headers operation sets system properties on the blob - * @param options The options parameters. - */ - setHttpHeaders(options) { - return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); + }; + exports2.ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } } - /** - * The Set Immutability Policy operation sets the immutability policy on the blob - * @param options The options parameters. - */ - setImmutabilityPolicy(options) { - return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); + }; + exports2.comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * The Delete Immutability Policy operation deletes the immutability policy on the blob - * @param options The options parameters. - */ - deleteImmutabilityPolicy(options) { - return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); + }; + exports2.comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * The Set Legal Hold operation sets a legal hold on the blob. - * @param legalHold Specified if a legal hold should be set on the blob. - * @param options The options parameters. - */ - setLegalHold(legalHold, options) { - return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); + }; + exports2.containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } } - /** - * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more - * name-value pairs - * @param options The options parameters. - */ - setMetadata(options) { - return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + }; + exports2.comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - acquireLease(options) { - return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + }; + exports2.deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId, options) { - return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); + }; + exports2.deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId, options) { - return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); + }; + exports2.comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId, proposedLeaseId, options) { - return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); + }; + exports2.sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - breakLease(options) { - return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + }; + exports2.sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } } - /** - * The Create Snapshot operation creates a read-only snapshot of a blob - * @param options The options parameters. - */ - createSnapshot(options) { - return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); + }; + exports2.comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * The Start Copy From URL operation copies a blob or an internet resource to a new blob. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - startCopyFromURL(copySource, options) { - return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); + }; + exports2.action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } } - /** - * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return - * a response until the copy is complete. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - copyFromURL(copySource, options) { - return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); + }; + exports2.duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } } - /** - * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination - * blob with zero length and full metadata. - * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob - * operation. - * @param options The options parameters. - */ - abortCopyFromURL(copyId, options) { - return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); + }; + exports2.proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } } - /** - * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant storage only). A - * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block - * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's - * ETag. - * @param tier Indicates the tier to be set on the blob. - * @param options The options parameters. - */ - setTier(tier, options) { - return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); + }; + exports2.action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + }; + exports2.leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } } - /** - * The Query operation enables users to select/project on blob data by providing simple query - * expressions. - * @param options The options parameters. - */ - query(options) { - return this.client.sendOperationRequest({ options }, queryOperationSpec); + }; + exports2.action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } } - /** - * The Get Tags operation enables users to get the tags associated with a blob. - * @param options The options parameters. - */ - getTags(options) { - return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + }; + exports2.action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } } - /** - * The Set Tags operation enables users to set tags on a blob. - * @param options The options parameters. - */ - setTags(options) { - return this.client.sendOperationRequest({ options }, setTagsOperationSpec); + }; + exports2.breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } } }; - exports2.BlobImpl = BlobImpl; - var xmlSerializer = coreClient.createSerializer( - Mappers, - /* isXml */ - true - ); - var downloadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: Mappers.BlobDownloadHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: Mappers.BlobDownloadHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobDownloadExceptionHeaders + exports2.action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.range, - Parameters.rangeGetContentMD5, - Parameters.rangeGetContentCRC64, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var getPropertiesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "HEAD", - responses: { - 200: { - headersMapper: Mappers.BlobGetPropertiesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobGetPropertiesExceptionHeaders + exports2.proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var deleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: Mappers.BlobDeleteHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobDeleteExceptionHeaders + exports2.include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } } }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId, - Parameters.blobDeleteType - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.deleteSnapshots - ], - isXML: true, - serializer: xmlSerializer + collectionFormat: "CSV" }; - var undeleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobUndeleteHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobUndeleteExceptionHeaders + exports2.delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var setExpiryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobSetExpiryHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetExpiryExceptionHeaders + exports2.snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.expiryOptions, - Parameters.expiresOn - ], - isXML: true, - serializer: xmlSerializer + } }; - var setHttpHeadersOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobSetHttpHeadersHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders + exports2.versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" } - }, - queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobCacheControl, - Parameters.blobContentType, - Parameters.blobContentMD5, - Parameters.blobContentEncoding, - Parameters.blobContentLanguage, - Parameters.blobContentDisposition - ], - isXML: true, - serializer: xmlSerializer + } }; - var setImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders + exports2.range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId, - Parameters.comp12 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifUnmodifiedSince, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode - ], - isXML: true, - serializer: xmlSerializer + } }; - var deleteImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 200: { - headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders + exports2.rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId, - Parameters.comp12 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var setLegalHoldOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobSetLegalHoldHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders + exports2.rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } + }; + exports2.encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId, - Parameters.comp13 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.legalHold - ], - isXML: true, - serializer: xmlSerializer + } }; - var setMetadataOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobSetMetadataHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetMetadataExceptionHeaders + exports2.encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope - ], - isXML: true, - serializer: xmlSerializer + } }; - var acquireLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlobAcquireLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders + exports2.encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.action, - Parameters.duration, - Parameters.proposedLeaseId, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var releaseLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobReleaseLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders + exports2.ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.action1, - Parameters.leaseId1, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var renewLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobRenewLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobRenewLeaseExceptionHeaders + exports2.ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.leaseId1, - Parameters.action2, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var changeLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobChangeLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobChangeLeaseExceptionHeaders + exports2.ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.leaseId1, - Parameters.action4, - Parameters.proposedLeaseId1, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var breakLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: Mappers.BlobBreakLeaseHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobBreakLeaseExceptionHeaders + exports2.deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.action3, - Parameters.breakPeriod, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var createSnapshotOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlobCreateSnapshotHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders + exports2.blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope - ], - isXML: true, - serializer: xmlSerializer + } }; - var startCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: Mappers.BlobStartCopyFromURLHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders + exports2.comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode, - Parameters.tier, - Parameters.rehydratePriority, - Parameters.sourceIfModifiedSince, - Parameters.sourceIfUnmodifiedSince, - Parameters.sourceIfMatch, - Parameters.sourceIfNoneMatch, - Parameters.sourceIfTags, - Parameters.copySource, - Parameters.blobTagsString, - Parameters.sealBlob, - Parameters.legalHold1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var copyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: Mappers.BlobCopyFromURLHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobCopyFromURLExceptionHeaders + exports2.expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode, - Parameters.encryptionScope, - Parameters.tier, - Parameters.sourceIfModifiedSince, - Parameters.sourceIfUnmodifiedSince, - Parameters.sourceIfMatch, - Parameters.sourceIfNoneMatch, - Parameters.copySource, - Parameters.blobTagsString, - Parameters.legalHold1, - Parameters.xMsRequiresSync, - Parameters.sourceContentMD5, - Parameters.copySourceAuthorization, - Parameters.copySourceTags, - Parameters.fileRequestIntent - ], - isXML: true, - serializer: xmlSerializer + } }; - var abortCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: Mappers.BlobAbortCopyFromURLHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders + exports2.expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp15, - Parameters.copyId - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.copyActionAbortConstant - ], - isXML: true, - serializer: xmlSerializer + } }; - var setTierOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.BlobSetTierHeaders - }, - 202: { - headersMapper: Mappers.BlobSetTierHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetTierExceptionHeaders + exports2.blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId, - Parameters.comp16 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifTags, - Parameters.rehydratePriority, - Parameters.tier1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var getAccountInfoOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: Mappers.BlobGetAccountInfoHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders + exports2.blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.comp, - Parameters.timeoutInSeconds, - Parameters.restype1 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var queryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: Mappers.BlobQueryHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: Mappers.BlobQueryHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobQueryExceptionHeaders + exports2.blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" } - }, - requestBody: Parameters.queryRequest, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.comp17 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.contentType, - Parameters.accept, - Parameters.version, - Parameters.requestId, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - var getTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.BlobTags, - headersMapper: Mappers.BlobGetTagsHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobGetTagsExceptionHeaders + exports2.blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.versionId, - Parameters.comp18 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var setTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: Mappers.BlobSetTagsHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetTagsExceptionHeaders + exports2.blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" } - }, - requestBody: Parameters.tags, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.versionId, - Parameters.comp18 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.contentType, - Parameters.accept, - Parameters.version, - Parameters.requestId, - Parameters.leaseId, - Parameters.ifTags, - Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js -var require_pageBlob = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PageBlobImpl = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreClient = tslib_1.__importStar(require_commonjs8()); - var Mappers = tslib_1.__importStar(require_mappers()); - var Parameters = tslib_1.__importStar(require_parameters()); - var PageBlobImpl = class { - client; - /** - * Initialize a new instance of the class PageBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; + exports2.blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } } - /** - * The Create operation creates a new page blob. - * @param contentLength The length of the request. - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. - */ - create(contentLength, blobContentLength, options) { - return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec); + }; + exports2.comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * The Upload Pages operation writes a range of pages to a page blob - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - uploadPages(contentLength, body, options) { - return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); + }; + exports2.immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } } - /** - * The Clear Pages operation clears a set of pages from a page blob - * @param contentLength The length of the request. - * @param options The options parameters. - */ - clearPages(contentLength, options) { - return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); + }; + exports2.immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } } - /** - * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a - * URL - * @param sourceUrl Specify a URL to the copy source. - * @param sourceRange Bytes of source data in the specified range. The length of this range should - * match the ContentLength header and x-ms-range/Range destination range header. - * @param contentLength The length of the request. - * @param range The range of bytes to which the source range would be written. The range should be 512 - * aligned and range-end is required. - * @param options The options parameters. - */ - uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); + }; + exports2.comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a - * page blob - * @param options The options parameters. - */ - getPageRanges(options) { - return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); + }; + exports2.legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } } - /** - * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were - * changed between target blob and previous snapshot. - * @param options The options parameters. - */ - getPageRangesDiff(options) { - return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); + }; + exports2.encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } } - /** - * Resize the Blob - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. - */ - resize(blobContentLength, options) { - return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); + }; + exports2.comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * Update the sequence number of the blob - * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. - * This property applies to page blobs only. This property indicates how the service should modify the - * blob's sequence number - * @param options The options parameters. - */ - updateSequenceNumber(sequenceNumberAction, options) { - return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); + }; + exports2.tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } } - /** - * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. - * The snapshot is copied such that only the differential changes between the previously copied - * snapshot are transferred to the destination. The copied snapshots are complete copies of the - * original snapshot and can be read or copied from as usual. This API is supported since REST version - * 2016-05-31. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - copyIncremental(copySource, options) { - return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); + }; + exports2.rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } } }; - exports2.PageBlobImpl = PageBlobImpl; - var xmlSerializer = coreClient.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.PageBlobCreateHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobCreateExceptionHeaders + exports2.sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" } - }, - queryParameters: [Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobCacheControl, - Parameters.blobContentType, - Parameters.blobContentMD5, - Parameters.blobContentEncoding, - Parameters.blobContentLanguage, - Parameters.blobContentDisposition, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode, - Parameters.encryptionScope, - Parameters.tier, - Parameters.blobTagsString, - Parameters.legalHold1, - Parameters.blobType, - Parameters.blobContentLength, - Parameters.blobSequenceNumber + } + }; + exports2.sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" ], - isXML: true, - serializer: xmlSerializer + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } }; - var uploadPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.PageBlobUploadPagesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders + exports2.sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" } - }, - requestBody: Parameters.body1, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.contentLength, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.range, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope, - Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64, - Parameters.contentType1, - Parameters.accept2, - Parameters.pageWrite, - Parameters.ifSequenceNumberLessThanOrEqualTo, - Parameters.ifSequenceNumberLessThan, - Parameters.ifSequenceNumberEqualTo + } + }; + exports2.sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } }; - var clearPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.PageBlobClearPagesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobClearPagesExceptionHeaders + exports2.sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.range, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope, - Parameters.ifSequenceNumberLessThanOrEqualTo, - Parameters.ifSequenceNumberLessThan, - Parameters.ifSequenceNumberEqualTo, - Parameters.pageWrite1 - ], - isXML: true, - serializer: xmlSerializer + } + }; + exports2.copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } }; - var uploadPagesFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders + exports2.blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope, - Parameters.sourceIfModifiedSince, - Parameters.sourceIfUnmodifiedSince, - Parameters.sourceIfMatch, - Parameters.sourceIfNoneMatch, - Parameters.sourceContentMD5, - Parameters.copySourceAuthorization, - Parameters.fileRequestIntent, - Parameters.pageWrite, - Parameters.ifSequenceNumberLessThanOrEqualTo, - Parameters.ifSequenceNumberLessThan, - Parameters.ifSequenceNumberEqualTo, - Parameters.sourceUrl, - Parameters.sourceRange, - Parameters.sourceContentCrc64, - Parameters.range1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var getPageRangesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.PageList, - headersMapper: Mappers.PageBlobGetPageRangesHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders + exports2.sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.marker, - Parameters.maxPageSize, - Parameters.snapshot, - Parameters.comp20 - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.range, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - var getPageRangesDiffOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.PageList, - headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders + exports2.legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.marker, - Parameters.maxPageSize, - Parameters.snapshot, - Parameters.comp20, - Parameters.prevsnapshot - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.range, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.prevSnapshotUrl - ], - isXML: true, - serializer: xmlSerializer + } }; - var resizeOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.PageBlobResizeHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobResizeExceptionHeaders + exports2.xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" } - }, - queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope, - Parameters.blobContentLength - ], - isXML: true, - serializer: xmlSerializer + } }; - var updateSequenceNumberOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders + exports2.sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" } - }, - queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobSequenceNumber, - Parameters.sequenceNumberAction - ], - isXML: true, - serializer: xmlSerializer + } }; - var copyIncrementalOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: Mappers.PageBlobCopyIncrementalHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders + exports2.copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.copySource - ], - isXML: true, - serializer: xmlSerializer + } }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js -var require_appendBlob = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AppendBlobImpl = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreClient = tslib_1.__importStar(require_commonjs8()); - var Mappers = tslib_1.__importStar(require_mappers()); - var Parameters = tslib_1.__importStar(require_parameters()); - var AppendBlobImpl = class { - client; - /** - * Initialize a new instance of the class AppendBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; + exports2.copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } } - /** - * The Create Append Blob operation creates a new append blob. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - create(contentLength, options) { - return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); + }; + exports2.fileRequestIntent = { + parameterPath: ["options", "fileRequestIntent"], + mapper: { + serializedName: "x-ms-file-request-intent", + xmlName: "x-ms-file-request-intent", + type: { + name: "String" + } } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob. The - * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - appendBlock(contentLength, body, options) { - return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); + }; + exports2.comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob where - * the contents are read from a source url. The Append Block operation is permitted only if the blob - * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version - * 2015-02-21 version or later. - * @param sourceUrl Specify a URL to the copy source. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - appendBlockFromUrl(sourceUrl, contentLength, options) { - return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); + }; + exports2.copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } } - /** - * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version - * 2019-12-12 version or later. - * @param options The options parameters. - */ - seal(options) { - return this.client.sendOperationRequest({ options }, sealOperationSpec); + }; + exports2.copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } } }; - exports2.AppendBlobImpl = AppendBlobImpl; - var xmlSerializer = coreClient.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.AppendBlobCreateHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobCreateExceptionHeaders + exports2.comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobCacheControl, - Parameters.blobContentType, - Parameters.blobContentMD5, - Parameters.blobContentEncoding, - Parameters.blobContentLanguage, - Parameters.blobContentDisposition, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode, - Parameters.encryptionScope, - Parameters.blobTagsString, - Parameters.legalHold1, - Parameters.blobType1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var appendBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.AppendBlobAppendBlockHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders + exports2.tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] } - }, - requestBody: Parameters.body1, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.contentLength, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope, - Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64, - Parameters.contentType1, - Parameters.accept2, - Parameters.maxSize, - Parameters.appendPosition - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer + } + }; + exports2.queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: mappers_js_1.QueryRequest + }; + exports2.comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.tags = { + parameterPath: ["options", "tags"], + mapper: mappers_js_1.BlobTags }; - var appendBlockFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders + exports2.transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.encryptionScope, - Parameters.sourceIfModifiedSince, - Parameters.sourceIfUnmodifiedSince, - Parameters.sourceIfMatch, - Parameters.sourceIfNoneMatch, - Parameters.sourceContentMD5, - Parameters.copySourceAuthorization, - Parameters.fileRequestIntent, - Parameters.transactionalContentMD5, - Parameters.sourceUrl, - Parameters.sourceContentCrc64, - Parameters.maxSize, - Parameters.appendPosition, - Parameters.sourceRange1 - ], - isXML: true, - serializer: xmlSerializer + } }; - var sealOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: Mappers.AppendBlobSealHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobSealExceptionHeaders + exports2.transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" } - }, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.appendPosition - ], - isXML: true, - serializer: xmlSerializer + } }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js -var require_blockBlob = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlockBlobImpl = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreClient = tslib_1.__importStar(require_commonjs8()); - var Mappers = tslib_1.__importStar(require_mappers()); - var Parameters = tslib_1.__importStar(require_parameters()); - var BlockBlobImpl = class { - client; - /** - * Initialize a new instance of the class BlockBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; + exports2.blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } } - /** - * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing - * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put - * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a - * partial update of the content of a block blob, use the Put Block List operation. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - upload(contentLength, body, options) { - return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); + }; + exports2.blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } } - /** - * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read - * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are - * not supported with Put Blob from URL; the content of an existing blob is overwritten with the - * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, - * use the Put Block from URL API in conjunction with Put Block List. - * @param contentLength The length of the request. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - putBlobFromUrl(contentLength, copySource, options) { - return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); + }; + exports2.blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + defaultValue: 0, + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } } - /** - * The Stage Block operation creates a new block to be committed as part of a blob - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - stageBlock(blockId, contentLength, body, options) { - return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); + }; + exports2.contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } } - /** - * The Stage Block operation creates a new block to be committed as part of a blob where the contents - * are read from a URL. - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param sourceUrl Specify a URL to the copy source. - * @param options The options parameters. - */ - stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); + }; + exports2.body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } } - /** - * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the - * blob. In order to be written as part of a blob, a block must have been successfully written to the - * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading - * only those blocks that have changed, then committing the new and existing blocks together. You can - * do this by specifying whether to commit a block from the committed block list or from the - * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list - * it may belong to. - * @param blocks Blob Blocks. - * @param options The options parameters. - */ - commitBlockList(blocks, options) { - return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); + }; + exports2.accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } } - /** - * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block - * blob - * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted - * blocks, or both lists together. - * @param options The options parameters. - */ - getBlockList(listType, options) { - return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); + }; + exports2.comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } }; - exports2.BlockBlobImpl = BlockBlobImpl; - var xmlSerializer = coreClient.createSerializer( - Mappers, - /* isXml */ - true - ); - var uploadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlockBlobUploadHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobUploadExceptionHeaders + exports2.pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" } - }, - requestBody: Parameters.body1, - queryParameters: [Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.contentLength, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobCacheControl, - Parameters.blobContentType, - Parameters.blobContentMD5, - Parameters.blobContentEncoding, - Parameters.blobContentLanguage, - Parameters.blobContentDisposition, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode, - Parameters.encryptionScope, - Parameters.tier, - Parameters.blobTagsString, - Parameters.legalHold1, - Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64, - Parameters.contentType1, - Parameters.accept2, - Parameters.blobType2 + } + }; + exports2.ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } }; - var putBlobFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders + exports2.ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" } - }, - queryParameters: [Parameters.timeoutInSeconds], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobCacheControl, - Parameters.blobContentType, - Parameters.blobContentMD5, - Parameters.blobContentEncoding, - Parameters.blobContentLanguage, - Parameters.blobContentDisposition, - Parameters.encryptionScope, - Parameters.tier, - Parameters.sourceIfModifiedSince, - Parameters.sourceIfUnmodifiedSince, - Parameters.sourceIfMatch, - Parameters.sourceIfNoneMatch, - Parameters.sourceIfTags, - Parameters.copySource, - Parameters.blobTagsString, - Parameters.sourceContentMD5, - Parameters.copySourceAuthorization, - Parameters.copySourceTags, - Parameters.fileRequestIntent, - Parameters.transactionalContentMD5, - Parameters.blobType2, - Parameters.copySourceBlobProperties + } + }; + exports2.ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" ], - isXML: true, - serializer: xmlSerializer + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } }; - var stageBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlockBlobStageBlockHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders + exports2.pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" } - }, - requestBody: Parameters.body1, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp24, - Parameters.blockId - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.contentLength, - Parameters.leaseId, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.encryptionScope, - Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64, - Parameters.contentType1, - Parameters.accept2 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer + } }; - var stageBlockFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders + exports2.sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.comp24, - Parameters.blockId - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.contentLength, - Parameters.leaseId, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.encryptionScope, - Parameters.sourceIfModifiedSince, - Parameters.sourceIfUnmodifiedSince, - Parameters.sourceIfMatch, - Parameters.sourceIfNoneMatch, - Parameters.sourceContentMD5, - Parameters.copySourceAuthorization, - Parameters.fileRequestIntent, - Parameters.sourceUrl, - Parameters.sourceContentCrc64, - Parameters.sourceRange1 - ], - isXML: true, - serializer: xmlSerializer + } + }; + exports2.sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } + }; + exports2.sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } }; - var commitBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: Mappers.BlockBlobCommitBlockListHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders + exports2.range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" } - }, - requestBody: Parameters.blocks, - queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.contentType, - Parameters.accept, - Parameters.version, - Parameters.requestId, - Parameters.metadata, - Parameters.leaseId, - Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince, - Parameters.encryptionKey, - Parameters.encryptionKeySha256, - Parameters.encryptionAlgorithm, - Parameters.ifMatch, - Parameters.ifNoneMatch, - Parameters.ifTags, - Parameters.blobCacheControl, - Parameters.blobContentType, - Parameters.blobContentMD5, - Parameters.blobContentEncoding, - Parameters.blobContentLanguage, - Parameters.blobContentDisposition, - Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode, - Parameters.encryptionScope, - Parameters.tier, - Parameters.blobTagsString, - Parameters.legalHold1, - Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer + } }; - var getBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.BlockList, - headersMapper: Mappers.BlockBlobGetBlockListHeaders - }, - default: { - bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders + exports2.comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - }, - queryParameters: [ - Parameters.timeoutInSeconds, - Parameters.snapshot, - Parameters.comp25, - Parameters.listType - ], - urlParameters: [Parameters.url], - headerParameters: [ - Parameters.version, - Parameters.requestId, - Parameters.accept1, - Parameters.leaseId, - Parameters.ifTags - ], - isXML: true, - serializer: xmlSerializer + } }; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js -var require_operations = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - tslib_1.__exportStar(require_service(), exports2); - tslib_1.__exportStar(require_container(), exports2); - tslib_1.__exportStar(require_blob(), exports2); - tslib_1.__exportStar(require_pageBlob(), exports2); - tslib_1.__exportStar(require_appendBlob(), exports2); - tslib_1.__exportStar(require_blockBlob(), exports2); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js -var require_storageClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageClient = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreHttpCompat = tslib_1.__importStar(require_commonjs9()); - var index_js_1 = require_operations(); - var StorageClient = class extends coreHttpCompat.ExtendedServiceClient { - url; - version; - /** - * Initializes a new instance of the StorageClient class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options - */ - constructor(url2, options) { - if (url2 === void 0) { - throw new Error("'url' cannot be null"); + exports2.prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" } - if (!options) { - options = {}; + } + }; + exports2.prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" } - const defaults = { - requestContentType: "application/json; charset=utf-8" - }; - const packageDetails = `azsdk-js-azure-storage-blob/12.29.1`; - const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; - const optionsWithDefaults = { - ...defaults, - ...options, - userAgentOptions: { - userAgentPrefix - }, - endpoint: options.endpoint ?? options.baseUri ?? "{url}" - }; - super(optionsWithDefaults); - this.url = url2; - this.version = options.version || "2025-11-05"; - this.service = new index_js_1.ServiceImpl(this); - this.container = new index_js_1.ContainerImpl(this); - this.blob = new index_js_1.BlobImpl(this); - this.pageBlob = new index_js_1.PageBlobImpl(this); - this.appendBlob = new index_js_1.AppendBlobImpl(this); - this.blockBlob = new index_js_1.BlockBlobImpl(this); } - service; - container; - blob; - pageBlob; - appendBlob; - blockBlob; }; - exports2.StorageClient = StorageClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js -var require_service2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js -var require_container2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js -var require_blob2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js -var require_pageBlob2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js -var require_appendBlob2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js -var require_blockBlob2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js -var require_operationsInterfaces = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - tslib_1.__exportStar(require_service2(), exports2); - tslib_1.__exportStar(require_container2(), exports2); - tslib_1.__exportStar(require_blob2(), exports2); - tslib_1.__exportStar(require_pageBlob2(), exports2); - tslib_1.__exportStar(require_appendBlob2(), exports2); - tslib_1.__exportStar(require_blockBlob2(), exports2); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js -var require_src2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageClient = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - tslib_1.__exportStar(require_models(), exports2); - var storageClient_js_1 = require_storageClient(); - Object.defineProperty(exports2, "StorageClient", { enumerable: true, get: function() { - return storageClient_js_1.StorageClient; - } }); - tslib_1.__exportStar(require_operationsInterfaces(), exports2); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js -var require_StorageContextClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageContextClient = void 0; - var index_js_1 = require_src2(); - var StorageContextClient = class extends index_js_1.StorageClient { - async sendOperationRequest(operationArguments, operationSpec) { - const operationSpecToSend = { ...operationSpec }; - if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { - operationSpecToSend.path = ""; + exports2.sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] } - return super.sendOperationRequest(operationArguments, operationSpecToSend); } }; - exports2.StorageContextClient = StorageContextClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js -var require_StorageClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageClient = void 0; - var StorageContextClient_js_1 = require_StorageContextClient(); - var Pipeline_js_1 = require_Pipeline(); - var utils_common_js_1 = require_utils_common(); - var StorageClient = class { - /** - * Encoded URL string value. - */ - url; - accountName; - /** - * Request policy pipeline. - * - * @internal - */ - pipeline; - /** - * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. - */ - credential; - /** - * StorageClient is a reference to protocol layer operations entry, which is - * generated by AutoRest generator. - */ - storageClientContext; - /** - */ - isHttps; - /** - * Creates an instance of StorageClient. - * @param url - url to resource - * @param pipeline - request policy pipeline. - */ - constructor(url2, pipeline) { - this.url = (0, utils_common_js_1.escapeURLPath)(url2); - this.accountName = (0, utils_common_js_1.getAccountNameFromUrl)(url2); - this.pipeline = pipeline; - this.storageClientContext = new StorageContextClient_js_1.StorageContextClient(this.url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - this.isHttps = (0, utils_common_js_1.iEqual)((0, utils_common_js_1.getURLScheme)(this.url) || "", "https"); - this.credential = (0, Pipeline_js_1.getCredentialFromPipeline)(pipeline); - const storageClientContext = this.storageClientContext; - storageClientContext.requestContentType = void 0; + exports2.comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } } }; - exports2.StorageClient = StorageClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js -var require_tracing = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.tracingClient = void 0; - var core_tracing_1 = require_commonjs5(); - var constants_js_1 = require_constants15(); - exports2.tracingClient = (0, core_tracing_1.createTracingClient)({ - packageName: "@azure/storage-blob", - packageVersion: constants_js_1.SDK_VERSION, - namespace: "Microsoft.Storage" - }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js -var require_BlobSASPermissions = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobSASPermissions = void 0; - var BlobSASPermissions = class _BlobSASPermissions { - /** - * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. - * - * @param permissions - - */ - static parse(permissions) { - const blobSASPermissions = new _BlobSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - blobSASPermissions.read = true; - break; - case "a": - blobSASPermissions.add = true; - break; - case "c": - blobSASPermissions.create = true; - break; - case "w": - blobSASPermissions.write = true; - break; - case "d": - blobSASPermissions.delete = true; - break; - case "x": - blobSASPermissions.deleteVersion = true; - break; - case "t": - blobSASPermissions.tag = true; - break; - case "m": - blobSASPermissions.move = true; - break; - case "e": - blobSASPermissions.execute = true; - break; - case "i": - blobSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - blobSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission: ${char}`); - } + exports2.blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" } - return blobSASPermissions; } - /** - * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - - */ - static from(permissionLike) { - const blobSASPermissions = new _BlobSASPermissions(); - if (permissionLike.read) { - blobSASPermissions.read = true; + }; + exports2.comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - if (permissionLike.add) { - blobSASPermissions.add = true; + } + }; + exports2.maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" } - if (permissionLike.create) { - blobSASPermissions.create = true; + } + }; + exports2.appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" } - if (permissionLike.write) { - blobSASPermissions.write = true; + } + }; + exports2.sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" } - if (permissionLike.delete) { - blobSASPermissions.delete = true; + } + }; + exports2.comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - if (permissionLike.deleteVersion) { - blobSASPermissions.deleteVersion = true; + } + }; + exports2.blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" } - if (permissionLike.tag) { - blobSASPermissions.tag = true; + } + }; + exports2.copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" } - if (permissionLike.move) { - blobSASPermissions.move = true; + } + }; + exports2.comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - if (permissionLike.execute) { - blobSASPermissions.execute = true; + } + }; + exports2.blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" } - if (permissionLike.setImmutabilityPolicy) { - blobSASPermissions.setImmutabilityPolicy = true; + } + }; + exports2.blocks = { + parameterPath: "blocks", + mapper: mappers_js_1.BlockLookupList + }; + exports2.comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } - if (permissionLike.permanentDelete) { - blobSASPermissions.permanentDelete = true; + } + }; + exports2.listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] } - return blobSASPermissions; } + }; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js +var require_service = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var ServiceImpl = class { + client; /** - * Specifies Read access granted. - */ - read = false; - /** - * Specifies Add access granted. - */ - add = false; - /** - * Specifies Create access granted. - */ - create = false; - /** - * Specifies Write access granted. + * Initialize a new instance of the class Service class. + * @param client Reference to the service client */ - write = false; + constructor(client) { + this.client = client; + } /** - * Specifies Delete access granted. + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. */ - delete = false; + setProperties(blobServiceProperties, options) { + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); + } /** - * Specifies Delete version access granted. + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. */ - deleteVersion = false; + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } /** - * Specfies Tag access granted. + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. */ - tag = false; + getStatistics(options) { + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + } /** - * Specifies Move access granted. + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. */ - move = false; + listContainersSegment(options) { + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + } /** - * Specifies Execute access granted. + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. */ - execute = false; + getUserDelegationKey(keyInfo, options) { + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); + } /** - * Specifies SetImmutabilityPolicy access granted. + * Returns the sku name and account kind + * @param options The options parameters. */ - setImmutabilityPolicy = false; + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } /** - * Specifies that Permanent Delete is permitted. + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. */ - permanentDelete = false; + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. - * - * @returns A string which represents the BlobSASPermissions + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + }; + exports2.ServiceImpl = ServiceImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders } - if (this.delete) { - permissions.push("d"); + }, + requestBody: Parameters.blobServiceProperties, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var getPropertiesOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceProperties, + headersMapper: Mappers.ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders } - if (this.deleteVersion) { - permissions.push("x"); + }, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceStatistics, + headersMapper: Mappers.ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders } - if (this.tag) { - permissions.push("t"); + }, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListContainersSegmentResponse, + headersMapper: Mappers.ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders } - if (this.move) { - permissions.push("m"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.include + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.UserDelegationKey, + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders } - if (this.execute) { - permissions.push("e"); + }, + requestBody: Parameters.keyInfo, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp3 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var getAccountInfoOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders } - if (this.setImmutabilityPolicy) { - permissions.push("i"); + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var submitBatchOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders } - if (this.permanentDelete) { - permissions.push("y"); + }, + requestBody: Parameters.body, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var filterBlobsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders } - return permissions.join(""); - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - exports2.BlobSASPermissions = BlobSASPermissions; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js -var require_ContainerSASPermissions = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js +var require_container = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ContainerSASPermissions = void 0; - var ContainerSASPermissions = class _ContainerSASPermissions { + exports2.ContainerImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var ContainerImpl = class { + client; /** - * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. - * - * @param permissions - + * Initialize a new instance of the class Container class. + * @param client Reference to the service client */ - static parse(permissions) { - const containerSASPermissions = new _ContainerSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - containerSASPermissions.read = true; - break; - case "a": - containerSASPermissions.add = true; - break; - case "c": - containerSASPermissions.create = true; - break; - case "w": - containerSASPermissions.write = true; - break; - case "d": - containerSASPermissions.delete = true; - break; - case "l": - containerSASPermissions.list = true; - break; - case "t": - containerSASPermissions.tag = true; - break; - case "x": - containerSASPermissions.deleteVersion = true; - break; - case "m": - containerSASPermissions.move = true; - break; - case "e": - containerSASPermissions.execute = true; - break; - case "i": - containerSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - containerSASPermissions.permanentDelete = true; - break; - case "f": - containerSASPermissions.filterByTags = true; - break; - default: - throw new RangeError(`Invalid permission ${char}`); - } - } - return containerSASPermissions; + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + return this.client.sendOperationRequest({ options }, createOperationSpec); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** - * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. */ - static from(permissionLike) { - const containerSASPermissions = new _ContainerSASPermissions(); - if (permissionLike.read) { - containerSASPermissions.read = true; - } - if (permissionLike.add) { - containerSASPermissions.add = true; - } - if (permissionLike.create) { - containerSASPermissions.create = true; - } - if (permissionLike.write) { - containerSASPermissions.write = true; - } - if (permissionLike.delete) { - containerSASPermissions.delete = true; - } - if (permissionLike.list) { - containerSASPermissions.list = true; - } - if (permissionLike.deleteVersion) { - containerSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - containerSASPermissions.tag = true; - } - if (permissionLike.move) { - containerSASPermissions.move = true; - } - if (permissionLike.execute) { - containerSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - containerSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - containerSASPermissions.permanentDelete = true; - } - if (permissionLike.filterByTags) { - containerSASPermissions.filterByTags = true; - } - return containerSASPermissions; + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** - * Specifies Read access granted. + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. */ - read = false; + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + } /** - * Specifies Add access granted. + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. */ - add = false; + getAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); + } /** - * Specifies Create access granted. + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. */ - create = false; + setAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); + } /** - * Specifies Write access granted. + * Restores a previously-deleted container. + * @param options The options parameters. */ - write = false; + restore(options) { + return this.client.sendOperationRequest({ options }, restoreOperationSpec); + } /** - * Specifies Delete access granted. + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. */ - delete = false; + rename(sourceContainerName, options) { + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); + } /** - * Specifies Delete version access granted. + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. */ - deleteVersion = false; + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } /** - * Specifies List access granted. + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. */ - list = false; + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } /** - * Specfies Tag access granted. + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. */ - tag = false; + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + } /** - * Specifies Move access granted. + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. */ - move = false; + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); + } /** - * Specifies Execute access granted. + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. */ - execute = false; + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); + } /** - * Specifies SetImmutabilityPolicy access granted. + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. */ - setImmutabilityPolicy = false; + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + } /** - * Specifies that Permanent Delete is permitted. + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. */ - permanentDelete = false; + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); + } /** - * Specifies that Filter Blobs by Tags is permitted. + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. */ - filterByTags = false; + listBlobFlatSegment(options) { + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); + } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. - * - * The order of the characters should be as specified here to ensure correctness. - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); + listBlobHierarchySegment(delimiter, options) { + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + }; + exports2.ContainerImpl = ContainerImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerCreateExceptionHeaders } - if (this.create) { - permissions.push("c"); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.access, + Parameters.defaultEncryptionScope, + Parameters.preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer + }; + var getPropertiesOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders } - if (this.write) { - permissions.push("w"); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer + }; + var deleteOperationSpec = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.ContainerDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerDeleteExceptionHeaders } - if (this.delete) { - permissions.push("d"); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer + }; + var setMetadataOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders } - if (this.deleteVersion) { - permissions.push("x"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp6 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer + }; + var getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: Mappers.ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders } - if (this.list) { - permissions.push("l"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer + }; + var setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders } - if (this.tag) { - permissions.push("t"); + }, + requestBody: Parameters.containerAcl, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.access, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerRestoreHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRestoreExceptionHeaders } - if (this.move) { - permissions.push("m"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp8 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.deletedContainerName, + Parameters.deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer + }; + var renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenameHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenameExceptionHeaders } - if (this.execute) { - permissions.push("e"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp9 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.sourceContainerName, + Parameters.sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer + }; + var submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders } - if (this.setImmutabilityPolicy) { - permissions.push("i"); + }, + requestBody: Parameters.body, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp4, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders } - if (this.permanentDelete) { - permissions.push("y"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var acquireLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders } - if (this.filterByTags) { - permissions.push("f"); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer + }; + var releaseLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders } - return permissions.join(""); - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1 + ], + isXML: true, + serializer: xmlSerializer }; - exports2.ContainerSASPermissions = ContainerSASPermissions; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js -var require_UserDelegationKeyCredential = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UserDelegationKeyCredential = void 0; - var node_crypto_1 = require("node:crypto"); - var UserDelegationKeyCredential = class { - /** - * Azure Storage account name; readonly. - */ - accountName; - /** - * Azure Storage user delegation key; readonly. - */ - userDelegationKey; - /** - * Key value in Buffer type. - */ - key; - /** - * Creates an instance of UserDelegationKeyCredential. - * @param accountName - - * @param userDelegationKey - - */ - constructor(accountName, userDelegationKey) { - this.accountName = accountName; - this.userDelegationKey = userDelegationKey; - this.key = Buffer.from(userDelegationKey.value, "base64"); - } - /** - * Generates a hash signature for an HTTP request or for a SAS. - * - * @param stringToSign - - */ - computeHMACSHA256(stringToSign) { - return (0, node_crypto_1.createHmac)("sha256", this.key).update(stringToSign, "utf8").digest("base64"); - } + var renewLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2 + ], + isXML: true, + serializer: xmlSerializer }; - exports2.UserDelegationKeyCredential = UserDelegationKeyCredential; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js -var require_SasIPRange = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ipRangeToString = ipRangeToString; - function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js -var require_SASQueryParameters = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.SASQueryParameters = exports2.SASProtocol = void 0; - var SasIPRange_js_1 = require_SasIPRange(); - var utils_common_js_1 = require_utils_common(); - var SASProtocol; - (function(SASProtocol2) { - SASProtocol2["Https"] = "https"; - SASProtocol2["HttpsAndHttp"] = "https,http"; - })(SASProtocol || (exports2.SASProtocol = SASProtocol = {})); - var SASQueryParameters = class { - /** - * The storage API version. - */ - version; - /** - * Optional. The allowed HTTP protocol(s). - */ - protocol; - /** - * Optional. The start time for this SAS token. - */ - startsOn; - /** - * Optional only when identifier is provided. The expiry time for this SAS token. - */ - expiresOn; - /** - * Optional only when identifier is provided. - * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for - * more details. - */ - permissions; - /** - * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} - * for more details. - */ - services; - /** - * Optional. The storage resource types being accessed (only for Account SAS). Please refer to - * {@link AccountSASResourceTypes} for more details. - */ - resourceTypes; - /** - * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). - * - * @see https://learn.microsoft.com/rest/api/storageservices/establishing-a-stored-access-policy - */ - identifier; - /** - * Optional. Encryption scope to use when sending requests authorized with this SAS URI. - */ - encryptionScope; - /** - * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). - * @see https://learn.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only - */ - resource; - /** - * The signature for the SAS token. - */ - signature; - /** - * Value for cache-control header in Blob/File Service SAS. - */ - cacheControl; - /** - * Value for content-disposition header in Blob/File Service SAS. - */ - contentDisposition; - /** - * Value for content-encoding header in Blob/File Service SAS. - */ - contentEncoding; - /** - * Value for content-length header in Blob/File Service SAS. - */ - contentLanguage; - /** - * Value for content-type header in Blob/File Service SAS. - */ - contentType; - /** - * Inner value of getter ipRange. - */ - ipRangeInner; - /** - * The Azure Active Directory object ID in GUID format. - * Property of user delegation key. - */ - signedOid; - /** - * The Azure Active Directory tenant ID in GUID format. - * Property of user delegation key. - */ - signedTenantId; - /** - * The date-time the key is active. - * Property of user delegation key. - */ - signedStartsOn; - /** - * The date-time the key expires. - * Property of user delegation key. - */ - signedExpiresOn; - /** - * Abbreviation of the Azure Storage service that accepts the user delegation key. - * Property of user delegation key. - */ - signedService; - /** - * The service version that created the user delegation key. - * Property of user delegation key. - */ - signedVersion; - /** - * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key - * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key - * has the required permissions before granting access but no additional permission check for the user specified in - * this value will be performed. This is only used for User Delegation SAS. - */ - preauthorizedAgentObjectId; - /** - * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. - * This is only used for User Delegation SAS. - */ - correlationId; - /** - * Optional. IP range allowed for this SAS. - * - * @readonly - */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start - }; + var breakLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders } - return void 0; - } - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { - this.version = version; - this.signature = signature; - if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { - this.permissions = permissionsOrOptions.permissions; - this.services = permissionsOrOptions.services; - this.resourceTypes = permissionsOrOptions.resourceTypes; - this.protocol = permissionsOrOptions.protocol; - this.startsOn = permissionsOrOptions.startsOn; - this.expiresOn = permissionsOrOptions.expiresOn; - this.ipRangeInner = permissionsOrOptions.ipRange; - this.identifier = permissionsOrOptions.identifier; - this.encryptionScope = permissionsOrOptions.encryptionScope; - this.resource = permissionsOrOptions.resource; - this.cacheControl = permissionsOrOptions.cacheControl; - this.contentDisposition = permissionsOrOptions.contentDisposition; - this.contentEncoding = permissionsOrOptions.contentEncoding; - this.contentLanguage = permissionsOrOptions.contentLanguage; - this.contentType = permissionsOrOptions.contentType; - if (permissionsOrOptions.userDelegationKey) { - this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; - this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; - this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; - this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; - this.signedService = permissionsOrOptions.userDelegationKey.signedService; - this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; - this.correlationId = permissionsOrOptions.correlationId; - } - } else { - this.services = services; - this.resourceTypes = resourceTypes; - this.expiresOn = expiresOn; - this.permissions = permissionsOrOptions; - this.protocol = protocol; - this.startsOn = startsOn; - this.ipRangeInner = ipRange; - this.encryptionScope = encryptionScope; - this.identifier = identifier; - this.resource = resource; - this.cacheControl = cacheControl; - this.contentDisposition = contentDisposition; - this.contentEncoding = contentEncoding; - this.contentLanguage = contentLanguage; - this.contentType = contentType; - if (userDelegationKey) { - this.signedOid = userDelegationKey.signedObjectId; - this.signedTenantId = userDelegationKey.signedTenantId; - this.signedStartsOn = userDelegationKey.signedStartsOn; - this.signedExpiresOn = userDelegationKey.signedExpiresOn; - this.signedService = userDelegationKey.signedService; - this.signedVersion = userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; - this.correlationId = correlationId; - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod + ], + isXML: true, + serializer: xmlSerializer + }; + var changeLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders } - } - /** - * Encodes all SAS query parameters into a string that can be appended to a URL. - * - */ - toString() { - const params = [ - "sv", - "ss", - "srt", - "spr", - "st", - "se", - "sip", - "si", - "ses", - "skoid", - // Signed object ID - "sktid", - // Signed tenant ID - "skt", - // Signed key start time - "ske", - // Signed key expiry time - "sks", - // Signed key service - "skv", - // Signed key version - "sr", - "sp", - "sig", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "saoid", - "scid" - ]; - const queries = []; - for (const param of params) { - switch (param) { - case "sv": - this.tryAppendQueryParameter(queries, param, this.version); - break; - case "ss": - this.tryAppendQueryParameter(queries, param, this.services); - break; - case "srt": - this.tryAppendQueryParameter(queries, param, this.resourceTypes); - break; - case "spr": - this.tryAppendQueryParameter(queries, param, this.protocol); - break; - case "st": - this.tryAppendQueryParameter(queries, param, this.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.startsOn, false) : void 0); - break; - case "se": - this.tryAppendQueryParameter(queries, param, this.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.expiresOn, false) : void 0); - break; - case "sip": - this.tryAppendQueryParameter(queries, param, this.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(this.ipRange) : void 0); - break; - case "si": - this.tryAppendQueryParameter(queries, param, this.identifier); - break; - case "ses": - this.tryAppendQueryParameter(queries, param, this.encryptionScope); - break; - case "skoid": - this.tryAppendQueryParameter(queries, param, this.signedOid); - break; - case "sktid": - this.tryAppendQueryParameter(queries, param, this.signedTenantId); - break; - case "skt": - this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedStartsOn, false) : void 0); - break; - case "ske": - this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedExpiresOn, false) : void 0); - break; - case "sks": - this.tryAppendQueryParameter(queries, param, this.signedService); - break; - case "skv": - this.tryAppendQueryParameter(queries, param, this.signedVersion); - break; - case "sr": - this.tryAppendQueryParameter(queries, param, this.resource); - break; - case "sp": - this.tryAppendQueryParameter(queries, param, this.permissions); - break; - case "sig": - this.tryAppendQueryParameter(queries, param, this.signature); - break; - case "rscc": - this.tryAppendQueryParameter(queries, param, this.cacheControl); - break; - case "rscd": - this.tryAppendQueryParameter(queries, param, this.contentDisposition); - break; - case "rsce": - this.tryAppendQueryParameter(queries, param, this.contentEncoding); - break; - case "rscl": - this.tryAppendQueryParameter(queries, param, this.contentLanguage); - break; - case "rsct": - this.tryAppendQueryParameter(queries, param, this.contentType); - break; - case "saoid": - this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); - break; - case "scid": - this.tryAppendQueryParameter(queries, param, this.correlationId); - break; - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer + }; + var listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsFlatSegmentResponse, + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders } - return queries.join("&"); - } - /** - * A private helper method used to filter and append query key/value pairs into an array. - * - * @param queries - - * @param key - - * @param value - - */ - tryAppendQueryParameter(queries, key, value) { - if (!value) { - return; + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders } - key = encodeURIComponent(key); - value = encodeURIComponent(value); - if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1, + Parameters.delimiter + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getAccountInfoOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders } - } + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - exports2.SASQueryParameters = SASQueryParameters; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js -var require_BlobSASSignatureValues = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js +var require_blob = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; - exports2.generateBlobSASQueryParametersInternal = generateBlobSASQueryParametersInternal; - var BlobSASPermissions_js_1 = require_BlobSASPermissions(); - var ContainerSASPermissions_js_1 = require_ContainerSASPermissions(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var UserDelegationKeyCredential_js_1 = require_UserDelegationKeyCredential(); - var SasIPRange_js_1 = require_SasIPRange(); - var SASQueryParameters_js_1 = require_SASQueryParameters(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; - } - function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; - let userDelegationKeyCredential; - if (sharedKeyCredential === void 0 && accountName !== void 0) { - userDelegationKeyCredential = new UserDelegationKeyCredential_js_1.UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); - } - if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { - throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); - } - if (version >= "2020-12-06") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); - } else { - if (version >= "2025-07-05") { - return generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); - } - } - } - if (version >= "2018-11-09") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); - } else { - if (version >= "2020-02-10") { - return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); - } - } - } - if (version >= "2015-04-05") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); - } else { - throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); - } - } - throw new RangeError("'version' must be >= '2015-04-05'."); - } - function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); - } - let resource = "c"; - if (blobSASSignatureValues.blobName) { - resource = "b"; - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), - stringToSign - }; - } - function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); - } - let resource = "c"; - let timestamp2 = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp2 = blobSASSignatureValues.versionId; - } - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp2, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), - stringToSign - }; - } - function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); - } - let resource = "c"; - let timestamp2 = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp2 = blobSASSignatureValues.versionId; - } - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp2, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp2 = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp2 = blobSASSignatureValues.versionId; - } - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp2, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp2 = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp2 = blobSASSignatureValues.versionId; - } - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp2, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + exports2.BlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var BlobImpl = class { + client; + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - let resource = "c"; - let timestamp2 = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp2 = blobSASSignatureValues.versionId; - } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp2, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } - let resource = "c"; - let timestamp2 = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp2 = blobSASSignatureValues.versionId; - } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - void 0, - // SignedKeyDelegatedUserTenantId, will be added in a future release. - void 0, - // SignedDelegatedUserObjectId, will be added in future release. - blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp2, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function getCanonicalName(accountName, containerName, blobName) { - const elements = [`/blob/${accountName}/${containerName}`]; - if (blobName) { - elements.push(`/${blobName}`); + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } - return elements.join(""); - } - function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { - throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { - throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } - if (blobSASSignatureValues.versionId && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { - throw RangeError("Must provide 'blobName' when providing 'versionId'."); + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } - if (version < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { - throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } - if (version < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { - throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } - if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { - throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); } - if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); } - blobSASSignatureValues.version = version; - return blobSASSignatureValues; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js -var require_BlobLeaseClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobLeaseClient = void 0; - var core_util_1 = require_commonjs4(); - var constants_js_1 = require_constants15(); - var tracing_js_1 = require_tracing(); - var utils_common_js_1 = require_utils_common(); - var BlobLeaseClient = class { - _leaseId; - _url; - _containerOrBlobOperation; - _isContainer; /** - * Gets the lease Id. - * - * @readonly + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. */ - get leaseId() { - return this._leaseId; + abortCopyFromURL(copyId, options) { + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } /** - * Gets the url. - * - * @readonly + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. */ - get url() { - return this._url; + setTier(tier, options) { + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); } /** - * Creates an instance of BlobLeaseClient. - * @param client - The client to make the lease operation requests. - * @param leaseId - Initial proposed lease id. + * Returns the sku name and account kind + * @param options The options parameters. */ - constructor(client, leaseId) { - const clientContext = client.storageClientContext; - this._url = client.url; - if (client.name === void 0) { - this._isContainer = true; - this._containerOrBlobOperation = clientContext.container; - } else { - this._isContainer = false; - this._containerOrBlobOperation = clientContext.blob; + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + return this.client.sendOperationRequest({ options }, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); + } + }; + exports2.BlobImpl = BlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.rangeGetContentMD5, + Parameters.rangeGetContentCRC64, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: Mappers.BlobGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.BlobDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.blobDeleteType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer + }; + var undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobUndeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobUndeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetExpiryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.expiryOptions, + Parameters.expiresOn + ], + isXML: true, + serializer: xmlSerializer + }; + var setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer + }; + var setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp12 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifUnmodifiedSince, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer + }; + var deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders } - if (!leaseId) { - leaseId = (0, core_util_1.randomUUID)(); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp12 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders } - this._leaseId = leaseId; - } - /** - * Establishes and manages a lock on a container for delete operations, or on a blob - * for write and delete operations. - * The lock duration can be 15 to 60 seconds, or can be infinite. - * @see https://learn.microsoft.com/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob - * - * @param duration - Must be between 15 to 60 seconds, or infinite (-1) - * @param options - option to configure lease management operations. - * @returns Response data for acquire lease operation. - */ - async acquireLease(duration, options = {}) { - if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp13 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.legalHold + ], + isXML: true, + serializer: xmlSerializer + }; + var setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetMetadataExceptionHeaders } - return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.acquireLease({ - abortSignal: options.abortSignal, - duration, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - proposedLeaseId: this._leaseId, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * To change the ID of the lease. - * @see https://learn.microsoft.com/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob - * - * @param proposedLeaseId - the proposed new lease Id. - * @param options - option to configure lease management operations. - * @returns Response data for change lease operation. - */ - async changeLease(proposedLeaseId, options = {}) { - if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer + }; + var acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders } - return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - this._leaseId = proposedLeaseId; - return response; - }); - } - /** - * To free the lease if it is no longer needed so that another client may - * immediately acquire a lease against the container or the blob. - * @see https://learn.microsoft.com/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob - * - * @param options - option to configure lease management operations. - * @returns Response data for release lease operation. - */ - async releaseLease(options = {}) { - if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders } - return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.releaseLease(this._leaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * To renew the lease. - * @see https://learn.microsoft.com/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob - * - * @param options - Optional option to configure lease management operations. - * @returns Response data for renew lease operation. - */ - async renewLease(options = {}) { - if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders } - return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { - return this._containerOrBlobOperation.renewLease(this._leaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - }); - }); - } - /** - * To end the lease but ensure that another client cannot acquire a new lease - * until the current lease period has expired. - * @see https://learn.microsoft.com/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob - * - * @param breakPeriod - Break period - * @param options - Optional options to configure lease management operations. - * @returns Response data for break lease operation. - */ - async breakLease(breakPeriod, options = {}) { - if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders } - return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { - const operationOptions = { - abortSignal: options.abortSignal, - breakPeriod, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - }; - return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.breakLease(operationOptions)); - }); - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - exports2.BlobLeaseClient = BlobLeaseClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js -var require_RetriableReadableStream = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RetriableReadableStream = void 0; - var abort_controller_1 = require_commonjs11(); - var node_stream_1 = require("node:stream"); - var RetriableReadableStream = class extends node_stream_1.Readable { - start; - offset; - end; - getter; - source; - retries = 0; - maxRetryRequests; - onProgress; - options; - /** - * Creates an instance of RetriableReadableStream. - * - * @param source - The current ReadableStream returned from getter - * @param getter - A method calling downloading request returning - * a new ReadableStream from specified offset - * @param offset - Offset position in original data source to read - * @param count - How much data in original data source to read - * @param options - - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.getter = getter; - this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; - this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); - } - _read() { - this.source.resume(); - } - setSourceEventHandlers() { - this.source.on("data", this.sourceDataHandler); - this.source.on("end", this.sourceErrorOrEndHandler); - this.source.on("error", this.sourceErrorOrEndHandler); - this.source.on("aborted", this.sourceAbortedHandler); - } - removeSourceEventHandlers() { - this.source.removeListener("data", this.sourceDataHandler); - this.source.removeListener("end", this.sourceErrorOrEndHandler); - this.source.removeListener("error", this.sourceErrorOrEndHandler); - this.source.removeListener("aborted", this.sourceAbortedHandler); - } - sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = void 0; - this.source.pause(); - this.sourceErrorOrEndHandler(); - this.source.destroy(); - return; + var breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer + }; + var startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.tier, + Parameters.rehydratePriority, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sealBlob, + Parameters.legalHold1 + ], + isXML: true, + serializer: xmlSerializer + }; + var copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders } - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.xMsRequiresSync, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.fileRequestIntent + ], + isXML: true, + serializer: xmlSerializer + }; + var abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders } - if (!this.push(data)) { - this.source.pause(); + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp15, + Parameters.copyId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer + }; + var setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetTierHeaders + }, + 202: { + headersMapper: Mappers.BlobSetTierHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTierExceptionHeaders } - }; - sourceAbortedHandler = () => { - const abortError = new abort_controller_1.AbortError("The operation was aborted."); - this.destroy(abortError); - }; - sourceErrorOrEndHandler = (err) => { - if (err && err.name === "AbortError") { - this.destroy(err); - return; + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp16 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags, + Parameters.rehydratePriority, + Parameters.tier1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders } - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); - } else if (this.offset <= this.end) { - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset).then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); - return; - }).catch((error3) => { - this.destroy(error3); - }); - } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); - } - } else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobQueryExceptionHeaders } - }; - _destroy(error3, callback) { - this.removeSourceEventHandlers(); - this.source.destroy(); - callback(error3 === null ? void 0 : error3); - } + }, + requestBody: Parameters.queryRequest, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp17 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - exports2.RetriableReadableStream = RetriableReadableStream; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js -var require_BlobDownloadResponse = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobDownloadResponse = void 0; - var core_util_1 = require_commonjs4(); - var RetriableReadableStream_js_1 = require_RetriableReadableStream(); - var BlobDownloadResponse = class { - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly - */ - get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. - * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. - * - * @readonly - */ - get copyProgress() { - return this.originalResponse.copyProgress; - } - /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. - * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' - * - * @readonly - */ - get copyStatus() { - return this.originalResponse.copyStatus; - } - /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. - * - * @readonly - */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; - } - /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. - * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. - * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } - /** - * The error code. - * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } + var getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobTags, + headersMapper: Mappers.BlobGetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobSetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTagsExceptionHeaders + } + }, + requestBody: Parameters.tags, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifTags, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js +var require_pageBlob = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var PageBlobImpl = class { + client; /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; + constructor(client) { + this.client = client; } /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. */ - get lastModified() { - return this.originalResponse.lastModified; + create(contentLength, blobContentLength, options) { + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec); } /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - get lastAccessed() { - return this.originalResponse.lastAccessed; + uploadPages(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } /** - * Returns the date and time the blob was created. - * - * @readonly + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. */ - get createdOn() { - return this.originalResponse.createdOn; + clearPages(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. */ - get metadata() { - return this.originalResponse.metadata; + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. */ - get requestId() { - return this.originalResponse.requestId; + getPageRanges(options) { + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + getPageRangesDiff(options) { + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** - * Indicates the version of the Blob service used - * to execute the request. - * - * @readonly + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. */ - get version() { - return this.originalResponse.version; + resize(blobContentLength, options) { + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. */ - get versionId() { - return this.originalResponse.versionId; + updateSequenceNumber(sequenceNumberAction, options) { + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } /** - * Indicates whether version of this blob is a current version. - * - * @readonly + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; + copyIncremental(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } + }; + exports2.PageBlobImpl = PageBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType, + Parameters.blobContentLength, + Parameters.blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer + }; + var uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer + }; + var clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobClearPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.pageWrite1 + ], + isXML: true, + serializer: xmlSerializer + }; + var uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.sourceUrl, + Parameters.sourceRange, + Parameters.sourceContentCrc64, + Parameters.range1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20, + Parameters.prevsnapshot + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer + }; + var resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobResizeHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobResizeExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.blobContentLength + ], + isXML: true, + serializer: xmlSerializer + }; + var updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobSequenceNumber, + Parameters.sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer + }; + var copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.copySource + ], + isXML: true, + serializer: xmlSerializer + }; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js +var require_appendBlob = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AppendBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var AppendBlobImpl = class { + client; /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; + constructor(client) { + this.client = client; } /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. */ - get contentCrc64() { - return this.originalResponse.contentCrc64; + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } /** - * Object Replication Policy Id of the destination blob. - * - * @readonly + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. - * - * @readonly + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } /** - * If this blob has been sealed. - * - * @readonly + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. */ - get isSealed() { - return this.originalResponse.isSealed; + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); } + }; + exports2.AppendBlobImpl = AppendBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType1 + ], + isXML: true, + serializer: xmlSerializer + }; + var appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.maxSize, + Parameters.appendPosition + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer + }; + var appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.transactionalContentMD5, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.maxSize, + Parameters.appendPosition, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer + }; + var sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.AppendBlobSealHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobSealExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.appendPosition + ], + isXML: true, + serializer: xmlSerializer + }; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js +var require_blockBlob = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlockBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var BlockBlobImpl = class { + client; /** - * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. - * - * @readonly + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client */ - get immutabilityPolicyExpiresOn() { - return this.originalResponse.immutabilityPolicyExpiresOn; + constructor(client) { + this.client = client; } /** - * Indicates immutability policy mode. - * - * @readonly + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - get immutabilityPolicyMode() { - return this.originalResponse.immutabilityPolicyMode; + upload(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** - * Indicates if a legal hold is present on the blob. - * - * @readonly + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. */ - get legalHold() { - return this.originalResponse.legalHold; + putBlobFromUrl(contentLength, copySource, options) { + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - get contentAsBlob() { - return this.originalResponse.blobBody; + stageBlock(blockId, contentLength, body, options) { + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will automatically retry when internal read stream unexpected ends. - * - * @readonly + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. */ - get readableStreamBody() { - return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** - * The HTTP response. + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. */ - get _response() { - return this.originalResponse._response; + commitBlockList(blocks, options) { + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } - originalResponse; - blobDownloadStream; /** - * Creates an instance of BlobDownloadResponse. - * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream_js_1.RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + getBlockList(listType, options) { + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } }; - exports2.BlobDownloadResponse = BlobDownloadResponse; + exports2.BlockBlobImpl = BlockBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobUploadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobUploadExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.blobType2 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer + }; + var putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.fileRequestIntent, + Parameters.transactionalContentMD5, + Parameters.blobType2, + Parameters.copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer + }; + var stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer + }; + var stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer + }; + var commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: Parameters.blocks, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlockList, + headersMapper: Mappers.BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp25, + Parameters.listType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; } }); -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js -var require_AvroConstants = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js +var require_operations = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AVRO_SCHEMA_KEY = exports2.AVRO_CODEC_KEY = exports2.AVRO_INIT_BYTES = exports2.AVRO_SYNC_MARKER_SIZE = void 0; - exports2.AVRO_SYNC_MARKER_SIZE = 16; - exports2.AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); - exports2.AVRO_CODEC_KEY = "avro.codec"; - exports2.AVRO_SCHEMA_KEY = "avro.schema"; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_service(), exports2); + tslib_1.__exportStar(require_container(), exports2); + tslib_1.__exportStar(require_blob(), exports2); + tslib_1.__exportStar(require_pageBlob(), exports2); + tslib_1.__exportStar(require_appendBlob(), exports2); + tslib_1.__exportStar(require_blockBlob(), exports2); } }); -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js -var require_AvroParser = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js +var require_storageClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AvroType = exports2.AvroParser = void 0; - var AvroParser = class _AvroParser { - /** - * Reads a fixed number of bytes from the stream. - * - * @param stream - - * @param length - - * @param options - - */ - static async readFixedBytes(stream2, length, options = {}) { - const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); - if (bytes.length !== length) { - throw new Error("Hit stream end."); - } - return bytes; - } + exports2.StorageClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreHttpCompat = tslib_1.__importStar(require_commonjs9()); + var index_js_1 = require_operations(); + var StorageClient = class extends coreHttpCompat.ExtendedServiceClient { + url; + version; /** - * Reads a single byte from the stream. - * - * @param stream - - * @param options - + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options */ - static async readByte(stream2, options = {}) { - const buf = await _AvroParser.readFixedBytes(stream2, 1, options); - return buf[0]; - } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream2, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await _AvroParser.readByte(stream2, options); - haveMoreByte = byte & 128; - zigZagEncoded |= (byte & 127) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); - if (haveMoreByte) { - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; - do { - byte = await _AvroParser.readByte(stream2, options); - zigZagEncoded += (byte & 127) * significanceInFloat; - significanceInFloat *= 128; - } while (byte & 128); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; - } - return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); - } - static async readLong(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); - } - static async readInt(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); - } - static async readNull() { - return null; - } - static async readBoolean(stream2, options = {}) { - const b = await _AvroParser.readByte(stream2, options); - if (b === 1) { - return true; - } else if (b === 0) { - return false; - } else { - throw new Error("Byte was not a boolean."); + constructor(url2, options) { + if (url2 === void 0) { + throw new Error("'url' cannot be null"); } - } - static async readFloat(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); - } - static async readDouble(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); - } - static async readBytes(stream2, options = {}) { - const size = await _AvroParser.readLong(stream2, options); - if (size < 0) { - throw new Error("Bytes size was negative."); + if (!options) { + options = {}; } - return stream2.read(size, { abortSignal: options.abortSignal }); - } - static async readString(stream2, options = {}) { - const u8arr = await _AvroParser.readBytes(stream2, options); - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); - } - static async readMapPair(stream2, readItemMethod, options = {}) { - const key = await _AvroParser.readString(stream2, options); - const value = await readItemMethod(stream2, options); - return { key, value }; - } - static async readMap(stream2, readItemMethod, options = {}) { - const readPairMethod = (s, opts = {}) => { - return _AvroParser.readMapPair(s, readItemMethod, opts); + const defaults = { + requestContentType: "application/json; charset=utf-8" }; - const pairs2 = await _AvroParser.readArray(stream2, readPairMethod, options); - const dict = {}; - for (const pair of pairs2) { - dict[pair.key] = pair.value; - } - return dict; - } - static async readArray(stream2, readItemMethod, options = {}) { - const items = []; - for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { - if (count < 0) { - await _AvroParser.readLong(stream2, options); - count = -count; - } - while (count--) { - const item = await readItemMethod(stream2, options); - items.push(item); - } - } - return items; - } - }; - exports2.AvroParser = AvroParser; - var AvroComplex; - (function(AvroComplex2) { - AvroComplex2["RECORD"] = "record"; - AvroComplex2["ENUM"] = "enum"; - AvroComplex2["ARRAY"] = "array"; - AvroComplex2["MAP"] = "map"; - AvroComplex2["UNION"] = "union"; - AvroComplex2["FIXED"] = "fixed"; - })(AvroComplex || (AvroComplex = {})); - var AvroPrimitive; - (function(AvroPrimitive2) { - AvroPrimitive2["NULL"] = "null"; - AvroPrimitive2["BOOLEAN"] = "boolean"; - AvroPrimitive2["INT"] = "int"; - AvroPrimitive2["LONG"] = "long"; - AvroPrimitive2["FLOAT"] = "float"; - AvroPrimitive2["DOUBLE"] = "double"; - AvroPrimitive2["BYTES"] = "bytes"; - AvroPrimitive2["STRING"] = "string"; - })(AvroPrimitive || (AvroPrimitive = {})); - var AvroType = class _AvroType { - /** - * Determines the AvroType from the Avro Schema. - */ - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - static fromSchema(schema2) { - if (typeof schema2 === "string") { - return _AvroType.fromStringSchema(schema2); - } else if (Array.isArray(schema2)) { - return _AvroType.fromArraySchema(schema2); - } else { - return _AvroType.fromObjectSchema(schema2); - } - } - static fromStringSchema(schema2) { - switch (schema2) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema2); - default: - throw new Error(`Unexpected Avro type ${schema2}`); - } - } - static fromArraySchema(schema2) { - return new AvroUnionType(schema2.map(_AvroType.fromSchema)); - } - static fromObjectSchema(schema2) { - const type2 = schema2.type; - try { - return _AvroType.fromStringSchema(type2); - } catch { - } - switch (type2) { - case AvroComplex.RECORD: - if (schema2.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema2}`); - } - if (!schema2.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema2}`); - } - const fields = {}; - if (!schema2.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema2}`); - } - for (const field of schema2.fields) { - fields[field.name] = _AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema2.name); - case AvroComplex.ENUM: - if (schema2.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema2}`); - } - if (!schema2.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema2}`); - } - return new AvroEnumType(schema2.symbols); - case AvroComplex.MAP: - if (!schema2.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema2}`); - } - return new AvroMapType(_AvroType.fromSchema(schema2.values)); - case AvroComplex.ARRAY: - // Unused today - case AvroComplex.FIXED: - // Unused today - default: - throw new Error(`Unexpected Avro type ${type2} in ${schema2}`); - } - } - }; - exports2.AvroType = AvroType; - var AvroPrimitiveType = class extends AvroType { - _primitive; - constructor(primitive) { - super(); - this._primitive = primitive; - } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream2, options); - case AvroPrimitive.INT: - return AvroParser.readInt(stream2, options); - case AvroPrimitive.LONG: - return AvroParser.readLong(stream2, options); - case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream2, options); - case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream2, options); - case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream2, options); - case AvroPrimitive.STRING: - return AvroParser.readString(stream2, options); - default: - throw new Error("Unknown Avro Primitive"); - } - } - }; - var AvroEnumType = class extends AvroType { - _symbols; - constructor(symbols) { - super(); - this._symbols = symbols; - } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { - const value = await AvroParser.readInt(stream2, options); - return this._symbols[value]; - } - }; - var AvroUnionType = class extends AvroType { - _types; - constructor(types) { - super(); - this._types = types; - } - async read(stream2, options = {}) { - const typeIndex = await AvroParser.readInt(stream2, options); - return this._types[typeIndex].read(stream2, options); - } - }; - var AvroMapType = class extends AvroType { - _itemType; - constructor(itemType) { - super(); - this._itemType = itemType; - } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { - const readItemMethod = (s, opts) => { - return this._itemType.read(s, opts); + const packageDetails = `azsdk-js-azure-storage-blob/12.29.1`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; + const optionsWithDefaults = { + ...defaults, + ...options, + userAgentOptions: { + userAgentPrefix + }, + endpoint: options.endpoint ?? options.baseUri ?? "{url}" }; - return AvroParser.readMap(stream2, readItemMethod, options); - } - }; - var AvroRecordType = class extends AvroType { - _name; - _fields; - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; - } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream2, options); - } - } - return record; + super(optionsWithDefaults); + this.url = url2; + this.version = options.version || "2025-11-05"; + this.service = new index_js_1.ServiceImpl(this); + this.container = new index_js_1.ContainerImpl(this); + this.blob = new index_js_1.BlobImpl(this); + this.pageBlob = new index_js_1.PageBlobImpl(this); + this.appendBlob = new index_js_1.AppendBlobImpl(this); + this.blockBlob = new index_js_1.BlockBlobImpl(this); } + service; + container; + blob; + pageBlob; + appendBlob; + blockBlob; }; + exports2.StorageClient = StorageClient; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js +var require_service2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); } }); -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js -var require_utils_common3 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js +var require_container2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.arraysEqual = arraysEqual; - function arraysEqual(a, b) { - if (a === b) - return true; - if (a == null || b == null) - return false; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; - } - return true; - } } }); -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js -var require_AvroReader = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js +var require_blob2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AvroReader = void 0; - var AvroConstants_js_1 = require_AvroConstants(); - var AvroParser_js_1 = require_AvroParser(); - var utils_common_js_1 = require_utils_common3(); - var AvroReader = class { - _dataStream; - _headerStream; - _syncMarker; - _metadata; - _itemType; - _itemsRemainingInBlock; - // Remembers where we started if partial data stream was provided. - _initialBlockOffset; - /// The byte offset within the Avro file (both header and data) - /// of the start of the current block. - _blockOffset; - get blockOffset() { - return this._blockOffset; - } - _objectIndex; - get objectIndex() { - return this._objectIndex; - } - _initialized; - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; - } - async initialize(options = {}) { - const header = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal - }); - if (!(0, utils_common_js_1.arraysEqual)(header, AvroConstants_js_1.AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - this._metadata = await AvroParser_js_1.AvroParser.readMap(this._headerStream, AvroParser_js_1.AvroParser.readString, { - abortSignal: options.abortSignal - }); - const codec = this._metadata[AvroConstants_js_1.AVRO_CODEC_KEY]; - if (!(codec === void 0 || codec === null || codec === "null")) { - throw new Error("Codecs are not supported"); - } - this._syncMarker = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - }); - const schema2 = JSON.parse(this._metadata[AvroConstants_js_1.AVRO_SCHEMA_KEY]); - this._itemType = AvroParser_js_1.AvroType.fromSchema(schema2); - if (this._blockOffset === 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - } - this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - }); - await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; - } - } - } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; - } - async *parseObjects(options = {}) { - if (!this._initialized) { - await this.initialize(options); - } - while (this.hasNext()) { - const result = await this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal - }); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock === 0) { - const marker = await AvroParser_js_1.AvroParser.readFixedBytes(this._dataStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - }); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!(0, utils_common_js_1.arraysEqual)(this._syncMarker, marker)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - }); - } catch { - this._itemsRemainingInBlock = 0; - } - if (this._itemsRemainingInBlock > 0) { - await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - } - } - yield result; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js +var require_pageBlob2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js +var require_appendBlob2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js +var require_blockBlob2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js +var require_operationsInterfaces = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_service2(), exports2); + tslib_1.__exportStar(require_container2(), exports2); + tslib_1.__exportStar(require_blob2(), exports2); + tslib_1.__exportStar(require_pageBlob2(), exports2); + tslib_1.__exportStar(require_appendBlob2(), exports2); + tslib_1.__exportStar(require_blockBlob2(), exports2); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js +var require_src2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_models(), exports2); + var storageClient_js_1 = require_storageClient(); + Object.defineProperty(exports2, "StorageClient", { enumerable: true, get: function() { + return storageClient_js_1.StorageClient; + } }); + tslib_1.__exportStar(require_operationsInterfaces(), exports2); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js +var require_StorageContextClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageContextClient = void 0; + var index_js_1 = require_src2(); + var StorageContextClient = class extends index_js_1.StorageClient { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = { ...operationSpec }; + if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; } + return super.sendOperationRequest(operationArguments, operationSpecToSend); } }; - exports2.AvroReader = AvroReader; + exports2.StorageContextClient = StorageContextClient; } }); -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js -var require_AvroReadable = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js +var require_StorageClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AvroReadable = void 0; - var AvroReadable = class { + exports2.StorageClient = void 0; + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var StorageClient = class { + /** + * Encoded URL string value. + */ + url; + accountName; + /** + * Request policy pipeline. + * + * @internal + */ + pipeline; + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + storageClientContext; + /** + */ + isHttps; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url2, pipeline) { + this.url = (0, utils_common_js_1.escapeURLPath)(url2); + this.accountName = (0, utils_common_js_1.getAccountNameFromUrl)(url2); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient_js_1.StorageContextClient(this.url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + this.isHttps = (0, utils_common_js_1.iEqual)((0, utils_common_js_1.getURLScheme)(this.url) || "", "https"); + this.credential = (0, Pipeline_js_1.getCredentialFromPipeline)(pipeline); + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = void 0; + } }; - exports2.AvroReadable = AvroReadable; + exports2.StorageClient = StorageClient; } }); -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js -var require_AvroReadableFromStream = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js +var require_tracing = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AvroReadableFromStream = void 0; - var AvroReadable_js_1 = require_AvroReadable(); - var abort_controller_1 = require_commonjs11(); - var buffer_1 = require("buffer"); - var ABORT_ERROR = new abort_controller_1.AbortError("Reading from the avro stream was aborted."); - var AvroReadableFromStream = class extends AvroReadable_js_1.AvroReadable { - _position; - _readable; - toUint8Array(data) { - if (typeof data === "string") { - return buffer_1.Buffer.from(data); + exports2.tracingClient = void 0; + var core_tracing_1 = require_commonjs5(); + var constants_js_1 = require_constants15(); + exports2.tracingClient = (0, core_tracing_1.createTracingClient)({ + packageName: "@azure/storage-blob", + packageVersion: constants_js_1.SDK_VERSION, + namespace: "Microsoft.Storage" + }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js +var require_BlobSASPermissions = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobSASPermissions = void 0; + var BlobSASPermissions = class _BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new _BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } } - return data; - } - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; + return blobSASPermissions; } - get position() { - return this._position; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new _BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; } - async read(size, options = {}) { - if (options.abortSignal?.aborted) { - throw ABORT_ERROR; + /** + * Specifies Read access granted. + */ + read = false; + /** + * Specifies Add access granted. + */ + add = false; + /** + * Specifies Create access granted. + */ + create = false; + /** + * Specifies Write access granted. + */ + write = false; + /** + * Specifies Delete access granted. + */ + delete = false; + /** + * Specifies Delete version access granted. + */ + deleteVersion = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Specifies Move access granted. + */ + move = false; + /** + * Specifies Execute access granted. + */ + execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); + if (this.add) { + permissions.push("a"); } - if (size === 0) { - return new Uint8Array(); + if (this.create) { + permissions.push("c"); } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); + if (this.write) { + permissions.push("w"); } - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - return this.toUint8Array(chunk); - } else { - return new Promise((resolve6, reject) => { - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const callbackChunk = this._readable.read(size); - if (callbackChunk) { - this._position += callbackChunk.length; - cleanUp(); - resolve6(this.toUint8Array(callbackChunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } - }); + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); } + return permissions.join(""); } }; - exports2.AvroReadableFromStream = AvroReadableFromStream; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js -var require_internal_avro = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AvroReadableFromStream = exports2.AvroReadable = exports2.AvroReader = void 0; - var AvroReader_js_1 = require_AvroReader(); - Object.defineProperty(exports2, "AvroReader", { enumerable: true, get: function() { - return AvroReader_js_1.AvroReader; - } }); - var AvroReadable_js_1 = require_AvroReadable(); - Object.defineProperty(exports2, "AvroReadable", { enumerable: true, get: function() { - return AvroReadable_js_1.AvroReadable; - } }); - var AvroReadableFromStream_js_1 = require_AvroReadableFromStream(); - Object.defineProperty(exports2, "AvroReadableFromStream", { enumerable: true, get: function() { - return AvroReadableFromStream_js_1.AvroReadableFromStream; - } }); + exports2.BlobSASPermissions = BlobSASPermissions; } }); -// node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js -var require_BlobQuickQueryStream = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js +var require_ContainerSASPermissions = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobQuickQueryStream = void 0; - var node_stream_1 = require("node:stream"); - var index_js_1 = require_internal_avro(); - var BlobQuickQueryStream = class extends node_stream_1.Readable { - source; - avroReader; - avroIter; - avroPaused = true; - onProgress; - onError; + exports2.ContainerSASPermissions = void 0; + var ContainerSASPermissions = class _ContainerSASPermissions { /** - * Creates an instance of BlobQuickQueryStream. + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. * - * @param source - The current ReadableStream returned from getter - * @param options - + * @param permissions - */ - constructor(source, options = {}) { - super(); - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new index_js_1.AvroReader(new index_js_1.AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); - }); - } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema2 = obj.$schema; - if (typeof schema2 !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema2) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - { - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - } + static parse(permissions) { + const containerSASPermissions = new _ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - { - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - } + case "a": + containerSASPermissions.add = true; break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); + case "c": + containerSASPermissions.create = true; break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); - } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); - } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); - } - this.onError({ - position, - name, - isFatal: fatal, - description - }); - } + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; break; default: - throw Error(`Unknown schema ${schema2} in avro progress record.`); + throw new RangeError(`Invalid permission ${char}`); } - } while (!avroNext.done && !this.avroPaused); + } + return containerSASPermissions; } - }; - exports2.BlobQuickQueryStream = BlobQuickQueryStream; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js -var require_BlobQueryResponse = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobQueryResponse = void 0; - var core_util_1 = require_commonjs4(); - var BlobQuickQueryStream_js_1 = require_BlobQuickQueryStream(); - var BlobQueryResponse = class { /** - * Indicates that the service supports - * requests for partial file content. + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. * - * @readonly + * @param permissionLike - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; + static from(permissionLike) { + const containerSASPermissions = new _ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; } /** - * Returns if it was previously specified - * for the file. - * - * @readonly + * Specifies Read access granted. */ - get cacheControl() { - return this.originalResponse.cacheControl; - } + read = false; /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly + * Specifies Add access granted. */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } + add = false; /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly + * Specifies Create access granted. */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } + create = false; /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly + * Specifies Write access granted. */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } + write = false; /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly + * Specifies Delete access granted. */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } + delete = false; /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly + * Specifies Delete version access granted. */ - get blobType() { - return this.originalResponse.blobType; - } + deleteVersion = false; /** - * The number of bytes present in the - * response body. - * - * @readonly + * Specifies List access granted. */ - get contentLength() { - return this.originalResponse.contentLength; - } + list = false; /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly + * Specfies Tag access granted. */ - get contentMD5() { - return this.originalResponse.contentMD5; - } + tag = false; /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly + * Specifies Move access granted. */ - get contentRange() { - return this.originalResponse.contentRange; - } + move = false; /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly + * Specifies Execute access granted. */ - get contentType() { - return this.originalResponse.contentType; - } + execute = false; /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags = false; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * - * @readonly */ - get copyCompletedOn() { - return void 0; + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); } + }; + exports2.ContainerSASPermissions = ContainerSASPermissions; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js +var require_UserDelegationKeyCredential = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UserDelegationKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var UserDelegationKeyCredential = class { /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. - * - * @readonly + * Azure Storage account name; readonly. */ - get copyId() { - return this.originalResponse.copyId; + accountName; + /** + * Azure Storage user delegation key; readonly. + */ + userDelegationKey; + /** + * Key value in Buffer type. + */ + key; + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); } /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * Generates a hash signature for an HTTP request or for a SAS. * - * @readonly + * @param stringToSign - */ - get copyProgress() { - return this.originalResponse.copyProgress; + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } + }; + exports2.UserDelegationKeyCredential = UserDelegationKeyCredential; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js +var require_SasIPRange = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ipRangeToString = ipRangeToString; + function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js +var require_SASQueryParameters = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.SASQueryParameters = exports2.SASProtocol = void 0; + var SasIPRange_js_1 = require_SasIPRange(); + var utils_common_js_1 = require_utils_common(); + var SASProtocol; + (function(SASProtocol2) { + SASProtocol2["Https"] = "https"; + SASProtocol2["HttpsAndHttp"] = "https,http"; + })(SASProtocol || (exports2.SASProtocol = SASProtocol = {})); + var SASQueryParameters = class { /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. - * - * @readonly + * The storage API version. + */ + version; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. */ - get copySource() { - return this.originalResponse.copySource; - } + services; /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' - * - * @readonly + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. */ - get copyStatus() { - return this.originalResponse.copyStatus; - } + resourceTypes; /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). * - * @readonly + * @see https://learn.microsoft.com/rest/api/storageservices/establishing-a-stored-access-policy */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; - } + identifier; /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } + encryptionScope; /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. - * - * @readonly + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://learn.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only */ - get leaseState() { - return this.originalResponse.leaseState; - } + resource; /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly + * The signature for the SAS token. */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } + signature; /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly + * Value for cache-control header in Blob/File Service SAS. */ - get date() { - return this.originalResponse.date; - } + cacheControl; /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. - * - * @readonly + * Value for content-disposition header in Blob/File Service SAS. */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } + contentDisposition; /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly + * Value for content-encoding header in Blob/File Service SAS. */ - get etag() { - return this.originalResponse.etag; - } + contentEncoding; /** - * The error code. - * - * @readonly + * Value for content-length header in Blob/File Service SAS. */ - get errorCode() { - return this.originalResponse.errorCode; - } + contentLanguage; /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly + * Value for content-type header in Blob/File Service SAS. */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } + contentType; /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly + * Inner value of getter ipRange. */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } + ipRangeInner; /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. */ - get lastModified() { - return this.originalResponse.lastModified; - } + signedOid; /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. */ - get metadata() { - return this.originalResponse.metadata; - } + signedTenantId; /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly + * The date-time the key is active. + * Property of user delegation key. */ - get requestId() { - return this.originalResponse.requestId; - } + signedStartsOn; /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly + * The date-time the key expires. + * Property of user delegation key. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } + signedExpiresOn; /** - * Indicates the version of the File service used - * to execute the request. - * - * @readonly + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. */ - get version() { - return this.originalResponse.version; - } + signedService; /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly + * The service version that created the user delegation key. + * Property of user delegation key. */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } + signedVersion; /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } + preauthorizedAgentObjectId; /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. */ - get blobBody() { - return void 0; - } + correlationId; /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will parse avor data returned by blob query. + * Optional. IP range allowed for this SAS. * * @readonly */ - get readableStreamBody() { - return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start + }; + } + return void 0; } - /** - * The HTTP response. - */ - get _response() { - return this.originalResponse._response; + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } } - originalResponse; - blobDownloadStream; /** - * Creates an instance of BlobQueryResponse. + * Encodes all SAS query parameters into a string that can be appended to a URL. * - * @param originalResponse - - * @param options - */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream_js_1.BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } - }; - exports2.BlobQueryResponse = BlobQueryResponse; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/models.js -var require_models2 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/models.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = void 0; - exports2.toAccessTier = toAccessTier; - exports2.ensureCpkIfSpecified = ensureCpkIfSpecified; - exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; - var constants_js_1 = require_constants15(); - var BlockBlobTier; - (function(BlockBlobTier2) { - BlockBlobTier2["Hot"] = "Hot"; - BlockBlobTier2["Cool"] = "Cool"; - BlockBlobTier2["Cold"] = "Cold"; - BlockBlobTier2["Archive"] = "Archive"; - })(BlockBlobTier || (exports2.BlockBlobTier = BlockBlobTier = {})); - var PremiumPageBlobTier; - (function(PremiumPageBlobTier2) { - PremiumPageBlobTier2["P4"] = "P4"; - PremiumPageBlobTier2["P6"] = "P6"; - PremiumPageBlobTier2["P10"] = "P10"; - PremiumPageBlobTier2["P15"] = "P15"; - PremiumPageBlobTier2["P20"] = "P20"; - PremiumPageBlobTier2["P30"] = "P30"; - PremiumPageBlobTier2["P40"] = "P40"; - PremiumPageBlobTier2["P50"] = "P50"; - PremiumPageBlobTier2["P60"] = "P60"; - PremiumPageBlobTier2["P70"] = "P70"; - PremiumPageBlobTier2["P80"] = "P80"; - })(PremiumPageBlobTier || (exports2.PremiumPageBlobTier = PremiumPageBlobTier = {})); - function toAccessTier(tier) { - if (tier === void 0) { - return void 0; - } - return tier; - } - function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); - } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = constants_js_1.EncryptionAlgorithmAES25; - } - } - var StorageBlobAudience; - (function(StorageBlobAudience2) { - StorageBlobAudience2["StorageOAuthScopes"] = "https://storage.azure.com/.default"; - StorageBlobAudience2["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; - })(StorageBlobAudience || (exports2.StorageBlobAudience = StorageBlobAudience = {})); - function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js -var require_PageBlobRangeResponse = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.rangeResponseFromModel = rangeResponseFromModel; - function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - return { - ...response, - pageRange, - clearRange, - _response: { - ...response._response, - parsedBody: { - pageRange, - clearRange + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + // Signed object ID + "sktid", + // Signed tenant ID + "skt", + // Signed key start time + "ske", + // Signed key expiry time + "sks", + // Signed key service + "skv", + // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid" + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.startsOn, false) : void 0); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.expiresOn, false) : void 0); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(this.ipRange) : void 0); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedStartsOn, false) : void 0); + break; + case "ske": + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedExpiresOn, false) : void 0); + break; + case "sks": + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; } } - }; - } - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/logger.js -var require_logger2 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("core-lro"); - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/poller/constants.js -var require_constants17 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.terminalStates = exports2.POLL_INTERVAL_IN_MS = void 0; - exports2.POLL_INTERVAL_IN_MS = 2e3; - exports2.terminalStates = ["succeeded", "canceled", "failed"]; + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } + }; + exports2.SASQueryParameters = SASQueryParameters; } }); -// node_modules/@azure/core-lro/dist/commonjs/poller/operation.js -var require_operation = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js +var require_BlobSASSignatureValues = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.pollOperation = exports2.initOperation = exports2.deserializeState = void 0; - var logger_js_1 = require_logger2(); - var constants_js_1 = require_constants17(); - function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } catch (e) { - throw new Error(`Unable to deserialize input state: ${serializedState}`); + exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; + exports2.generateBlobSASQueryParametersInternal = generateBlobSASQueryParametersInternal; + var BlobSASPermissions_js_1 = require_BlobSASPermissions(); + var ContainerSASPermissions_js_1 = require_ContainerSASPermissions(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var UserDelegationKeyCredential_js_1 = require_UserDelegationKeyCredential(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; + } + function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; + let userDelegationKeyCredential; + if (sharedKeyCredential === void 0 && accountName !== void 0) { + userDelegationKeyCredential = new UserDelegationKeyCredential_js_1.UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + if (version >= "2020-12-06") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version >= "2025-07-05") { + return generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2018-11-09") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } } + throw new RangeError("'version' must be >= '2015-04-05'."); } - exports2.deserializeState = deserializeState; - function setStateError(inputs) { - const { state, stateProxy, isOperationError } = inputs; - return (error3) => { - if (isOperationError(error3)) { - stateProxy.setError(state, error3); - stateProxy.setFailed(state); + function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - throw error3; + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign }; } - function appendReadableErrorMessage(currentMessage, innerMessage) { - let message = currentMessage; - if (message.slice(-1) !== ".") { - message = message + "."; + function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - return message + " " + innerMessage; - } - function simplifyError(err) { - let message = err.message; - let code = err.code; - let curErr = err; - while (curErr.innererror) { - curErr = curErr.innererror; - code = curErr.code; - message = appendReadableErrorMessage(message, curErr.message); + let resource = "c"; + let timestamp2 = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp2 = blobSASSignatureValues.versionId; + } + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp2, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { - code, - message + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign }; } - function processOperationStatus(result) { - const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; - switch (status) { - case "succeeded": { - stateProxy.setSucceeded(state); - break; + function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp2 = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp2 = blobSASSignatureValues.versionId; } - case "failed": { - const err = getError === null || getError === void 0 ? void 0 : getError(response); - let postfix = ""; - if (err) { - const { code, message } = simplifyError(err); - postfix = `. ${code}. ${message}`; - } - const errStr = `The long-running operation has failed${postfix}`; - stateProxy.setError(state, new Error(errStr)); - stateProxy.setFailed(state); - logger_js_1.logger.warning(errStr); - break; + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - case "canceled": { - stateProxy.setCanceled(state); - break; + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp2, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp2 = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp2 = blobSASSignatureValues.versionId; } } - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult - })); + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } - } - function buildResult(inputs) { - const { processResult, response, state } = inputs; - return processResult ? processResult(response, state) : response; - } - async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult } = inputs; - const { operationLocation, resourceLocation, metadata, response } = await init(); - if (operationLocation) - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - const config = { - metadata, - operationLocation, - resourceLocation + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp2, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), + stringToSign }; - logger_js_1.logger.verbose(`LRO: Operation description:`, config); - const state = stateProxy.initState(config); - const status = getOperationStatus({ response, state, operationLocation }); - processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); - return state; } - exports2.initOperation = initOperation; - async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options } = inputs; - const response = await poll(operationLocation, options).catch(setStateError({ - state, - stateProxy, - isOperationError - })); - const status = getOperationStatus(response, state); - logger_js_1.logger.verbose(`LRO: Status: - Polling from: ${state.config.operationLocation} - Operation status: ${status} - Polling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); - if (status === "succeeded") { - const resourceLocation = getResourceLocation(response, state); - if (resourceLocation !== void 0) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), - status - }; + function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp2 = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp2 = blobSASSignatureValues.versionId; } } - return { response, status }; - } - async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; - const { operationLocation } = state.config; - if (operationLocation !== void 0) { - const { response, status } = await pollOperationHelper({ - poll, - getOperationStatus, - state, - stateProxy, - operationLocation, - getResourceLocation, - isOperationError, - options - }); - processOperationStatus({ - status, - response, - state, - stateProxy, - isDone, - processResult, - getError, - setErrorAsResult - }); - if (!constants_js_1.terminalStates.includes(status)) { - const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); - if (intervalInMs) - setDelay(intervalInMs); - const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); - if (location !== void 0) { - const isUpdated = operationLocation !== location; - state.config.operationLocation = location; - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); - } else - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp2, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), + stringToSign + }; } - exports2.pollOperation = pollOperation; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/http/operation.js -var require_operation2 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.pollHttpOperation = exports2.isOperationError = exports2.getResourceLocation = exports2.getOperationStatus = exports2.getOperationLocation = exports2.initHttpOperation = exports2.getStatusFromInitialResponse = exports2.getErrorFromResponse = exports2.parseRetryAfter = exports2.inferLroMode = void 0; - var operation_js_1 = require_operation(); - var logger_js_1 = require_logger2(); - function getOperationLocationPollingUrl(inputs) { - const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; - } - function getLocationHeader(rawResponse) { - return rawResponse.headers["location"]; - } - function getOperationLocationHeader(rawResponse) { - return rawResponse.headers["operation-location"]; - } - function getAzureAsyncOperationHeader(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; - } - function findResourceLocation(inputs) { - var _a; - const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; - switch (requestMethod) { - case "PUT": { - return requestPath; - } - case "DELETE": { - return void 0; + function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp2 = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp2 = blobSASSignatureValues.versionId; } - case "PATCH": { - return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - default: { - return getDefault(); + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp2, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp2 = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp2 = blobSASSignatureValues.versionId; } } - function getDefault() { - switch (resourceLocationConfig) { - case "azure-async-operation": { - return void 0; - } - case "original-uri": { - return requestPath; - } - case "location": - default: { - return location; - } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + void 0, + // SignedKeyDelegatedUserTenantId, will be added in a future release. + void 0, + // SignedDelegatedUserObjectId, will be added in future release. + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp2, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign + }; } - function inferLroMode(inputs) { - const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; - const operationLocation = getOperationLocationHeader(rawResponse); - const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); - const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); - const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); - if (pollingUrl !== void 0) { - return { - mode: "OperationLocation", - operationLocation: pollingUrl, - resourceLocation: findResourceLocation({ - requestMethod: normalizedRequestMethod, - location, - requestPath, - resourceLocationConfig - }) - }; - } else if (location !== void 0) { - return { - mode: "ResourceLocation", - operationLocation: location - }; - } else if (normalizedRequestMethod === "PUT" && requestPath) { - return { - mode: "Body", - operationLocation: requestPath - }; - } else { - return void 0; + function getCanonicalName(accountName, containerName, blobName) { + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); } + return elements.join(""); } - exports2.inferLroMode = inferLroMode; - function transformStatus(inputs) { - const { status, statusCode } = inputs; - if (typeof status !== "string" && status !== void 0) { - throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } - switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { - case void 0: - return toOperationStatus(statusCode); - case "succeeded": - return "succeeded"; - case "failed": - return "failed"; - case "running": - case "accepted": - case "started": - case "canceling": - case "cancelling": - return "running"; - case "canceled": - case "cancelled": - return "canceled"; - default: { - logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); - return status; - } + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); } - } - function getStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return transformStatus({ status, statusCode: rawResponse.statusCode }); - } - function getProvisioningState(rawResponse) { - var _a, _b; - const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return transformStatus({ status, statusCode: rawResponse.statusCode }); - } - function toOperationStatus(statusCode) { - if (statusCode === 202) { - return "running"; - } else if (statusCode < 300) { - return "succeeded"; - } else { - return "failed"; + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); } - } - function parseRetryAfter({ rawResponse }) { - const retryAfter = rawResponse.headers["retry-after"]; - if (retryAfter !== void 0) { - const retryAfterInSeconds = parseInt(retryAfter); - return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); } - return void 0; - } - exports2.parseRetryAfter = parseRetryAfter; - function getErrorFromResponse(response) { - const error3 = accessBodyProperty(response, "error"); - if (!error3) { - logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); - return; + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - if (!error3.code || !error3.message) { - logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); - return; + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } - return error3; - } - exports2.getErrorFromResponse = getErrorFromResponse; - function calculatePollingIntervalFromDate(retryAfterDate) { - const timeNow = Math.floor((/* @__PURE__ */ new Date()).getTime()); - const retryAfterTime = retryAfterDate.getTime(); - if (timeNow < retryAfterTime) { - return retryAfterTime - timeNow; + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); } - return void 0; - } - function getStatusFromInitialResponse(inputs) { - const { response, state, operationLocation } = inputs; - function helper() { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case void 0: - return toOperationStatus(response.rawResponse.statusCode); - case "Body": - return getOperationStatus(response, state); - default: - return "running"; - } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); } - const status = helper(); - return status === "running" && operationLocation === void 0 ? "succeeded" : status; - } - exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; - async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; - return (0, operation_js_1.initOperation)({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = inferLroMode({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - stateProxy, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, - getOperationStatus: getStatusFromInitialResponse, - setErrorAsResult - }); - } - exports2.initHttpOperation = initHttpOperation; - function getOperationLocation({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getOperationLocationPollingUrl({ - operationLocation: getOperationLocationHeader(rawResponse), - azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) - }); - } - case "ResourceLocation": { - return getLocationHeader(rawResponse); - } - case "Body": - default: { - return void 0; - } + if (version < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } - } - exports2.getOperationLocation = getOperationLocation; - function getOperationStatus({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getStatus(rawResponse); - } - case "ResourceLocation": { - return toOperationStatus(rawResponse.statusCode); - } - case "Body": { - return getProvisioningState(rawResponse); - } - default: - throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + if (version < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); } - } - exports2.getOperationStatus = getOperationStatus; - function accessBodyProperty({ flatResponse, rawResponse }, prop) { - var _a, _b; - return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; - } - function getResourceLocation(res, state) { - const loc = accessBodyProperty(res, "resourceLocation"); - if (loc && typeof loc === "string") { - state.config.resourceLocation = loc; + if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } - return state.config.resourceLocation; - } - exports2.getResourceLocation = getResourceLocation; - function isOperationError(e) { - return e.name === "RestError"; - } - exports2.isOperationError = isOperationError; - async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; - return (0, operation_js_1.pollOperation)({ - state, - stateProxy, - setDelay, - processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, - getError: getErrorFromResponse, - updateState, - getPollingInterval: parseRetryAfter, - getOperationLocation, - getOperationStatus, - isOperationError, - getResourceLocation, - options, - /** - * The expansion here is intentional because `lro` could be an object that - * references an inner this, so we need to preserve a reference to it. - */ - poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), - setErrorAsResult - }); + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; } - exports2.pollHttpOperation = pollHttpOperation; } }); -// node_modules/@azure/core-lro/dist/commonjs/poller/poller.js -var require_poller = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js +var require_BlobLeaseClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.buildCreatePoller = void 0; - var operation_js_1 = require_operation(); - var constants_js_1 = require_constants17(); + exports2.BlobLeaseClient = void 0; var core_util_1 = require_commonjs4(); - var createStateProxy = () => ({ + var constants_js_1 = require_constants15(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobLeaseClient = class { + _leaseId; + _url; + _containerOrBlobOperation; + _isContainer; /** - * The state at this point is created to be of type OperationState. - * It will be updated later to be of type TState when the - * customer-provided callback, `updateState`, is called during polling. + * Gets the lease Id. + * + * @readonly */ - initState: (config) => ({ status: "running", config }), - setCanceled: (state) => state.status = "canceled", - setError: (state, error3) => state.error = error3, - setResult: (state, result) => state.result = result, - setRunning: (state) => state.status = "running", - setSucceeded: (state) => state.status = "succeeded", - setFailed: (state) => state.status = "failed", - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => state.status === "canceled", - isFailed: (state) => state.status === "failed", - isRunning: (state) => state.status === "running", - isSucceeded: (state) => state.status === "succeeded" - }); - function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; - return async ({ init, poll }, options) => { - const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; - const stateProxy = createStateProxy(); - const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { - let called = false; - return (operationLocation, isUpdated) => { - if (isUpdated) - withOperationLocationCallback(operationLocation); - else if (!called) - withOperationLocationCallback(operationLocation); - called = true; + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === void 0) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; + } else { + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; + } + if (!leaseId) { + leaseId = (0, core_util_1.randomUUID)(); + } + this._leaseId = leaseId; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * To change the ID of the lease. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + this._leaseId = proposedLeaseId; + return response; + }); + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * To renew the lease. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + }); + }); + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions }; - })() : void 0; - const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ - init, - stateProxy, - processResult, - getOperationStatus: getStatusFromInitialResponse, - withOperationLocation, - setErrorAsResult: !resolveOnUnsuccessful + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.breakLease(operationOptions)); }); - let resultPromise; - const abortController = new AbortController(); - const handlers = /* @__PURE__ */ new Map(); - const handleProgressEvents = async () => handlers.forEach((h) => h(state)); - const cancelErrMsg = "Operation was canceled"; - let currentPollIntervalInMs = intervalInMs; - const poller = { - getOperationState: () => state, - getResult: () => state.result, - isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), - isStopped: () => resultPromise === void 0, - stopPolling: () => { - abortController.abort(); - }, - toString: () => JSON.stringify({ - state - }), - onProgress: (callback) => { - const s = /* @__PURE__ */ Symbol(); - handlers.set(s, callback); - return () => handlers.delete(s); - }, - pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { - const { abortSignal: inputAbortSignal } = pollOptions || {}; - function abortListener() { - abortController.abort(); - } - const abortSignal = abortController.signal; - if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { - abortController.abort(); - } else if (!abortSignal.aborted) { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); - } - try { - if (!poller.isDone()) { - await poller.poll({ abortSignal }); - while (!poller.isDone()) { - await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); - await poller.poll({ abortSignal }); - } - } - } finally { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); - } - if (resolveOnUnsuccessful) { - return poller.getResult(); - } else { - switch (state.status) { - case "succeeded": - return poller.getResult(); - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - case "notStarted": - case "running": - throw new Error(`Polling completed without succeeding or failing`); - } - } - })().finally(() => { - resultPromise = void 0; - }), - async poll(pollOptions) { - if (resolveOnUnsuccessful) { - if (poller.isDone()) - return; - } else { - switch (state.status) { - case "succeeded": - return; - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - await (0, operation_js_1.pollOperation)({ - poll, - state, - stateProxy, - getOperationLocation, - isOperationError, - withOperationLocation, - getPollingInterval, - getOperationStatus: getStatusFromPollResponse, - getResourceLocation, - processResult, - getError, - updateState, - options: pollOptions, - setDelay: (pollIntervalInMs) => { - currentPollIntervalInMs = pollIntervalInMs; - }, - setErrorAsResult: !resolveOnUnsuccessful - }); - await handleProgressEvents(); - if (!resolveOnUnsuccessful) { - switch (state.status) { - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - } - }; - return poller; - }; - } - exports2.buildCreatePoller = buildCreatePoller; + } + }; + exports2.BlobLeaseClient = BlobLeaseClient; } }); -// node_modules/@azure/core-lro/dist/commonjs/http/poller.js -var require_poller2 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js +var require_RetriableReadableStream = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var operation_js_1 = require_operation2(); - var poller_js_1 = require_poller(); - async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; - return (0, poller_js_1.buildCreatePoller)({ - getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, - getStatusFromPollResponse: operation_js_1.getOperationStatus, - isOperationError: operation_js_1.isOperationError, - getOperationLocation: operation_js_1.getOperationLocation, - getResourceLocation: operation_js_1.getResourceLocation, - getPollingInterval: operation_js_1.parseRetryAfter, - getError: operation_js_1.getErrorFromResponse, - resolveOnUnsuccessful - })({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = (0, operation_js_1.inferLroMode)({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - poll: lro.sendPollRequest - }, { - intervalInMs, - withOperationLocation, - restoreFrom, - updateState, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse - }); - } - exports2.createHttpPoller = createHttpPoller; + exports2.RetriableReadableStream = void 0; + var abort_controller_1 = require_commonjs11(); + var node_stream_1 = require("node:stream"); + var RetriableReadableStream = class extends node_stream_1.Readable { + start; + offset; + end; + getter; + source; + retries = 0; + maxRetryRequests; + onProgress; + options; + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = void 0; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; + } + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + sourceAbortedHandler = () => { + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } else if (this.offset <= this.end) { + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset).then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }).catch((error3) => { + this.destroy(error3); + }); + } else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + _destroy(error3, callback) { + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error3 === null ? void 0 : error3); + } + }; + exports2.RetriableReadableStream = RetriableReadableStream; } }); -// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js -var require_operation3 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js +var require_BlobDownloadResponse = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.GenericPollOperation = void 0; - var operation_js_1 = require_operation2(); - var logger_js_1 = require_logger2(); - var createStateProxy = () => ({ - initState: (config) => ({ config, isStarted: true }), - setCanceled: (state) => state.isCancelled = true, - setError: (state, error3) => state.error = error3, - setResult: (state, result) => state.result = result, - setRunning: (state) => state.isStarted = true, - setSucceeded: (state) => state.isCompleted = true, - setFailed: () => { - }, - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => !!state.isCancelled, - isFailed: (state) => !!state.error, - isRunning: (state) => !!state.isStarted, - isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) - }); - var GenericPollOperation = class { - constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.setErrorAsResult = setErrorAsResult; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; + exports2.BlobDownloadResponse = void 0; + var core_util_1 = require_commonjs4(); + var RetriableReadableStream_js_1 = require_RetriableReadableStream(); + var BlobDownloadResponse = class { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; } - async update(options) { - var _a; - const stateProxy = createStateProxy(); - if (!this.state.isStarted) { - this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult - })); - } - const updateState = this.updateState; - const isDone = this.isDone; - if (!this.state.isCompleted && this.state.error === void 0) { - await (0, operation_js_1.pollHttpOperation)({ - lro: this.lro, - state: this.state, - stateProxy, - processResult: this.processResult, - updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, - isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, - options, - setDelay: (intervalInMs) => { - this.pollerConfig.intervalInMs = intervalInMs; - }, - setErrorAsResult: this.setErrorAsResult - }); - } - (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); - return this; + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; } - async cancel() { - logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); - return this; + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; } /** - * Serializes the Poller operation. + * The number of tags associated with the blob + * + * @readonly */ - toString() { - return JSON.stringify({ - state: this.state - }); + get tagCount() { + return this.originalResponse.tagCount; } - }; - exports2.GenericPollOperation = GenericPollOperation; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js -var require_poller3 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; - var PollerStoppedError = class _PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, _PollerStoppedError.prototype); + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; } - }; - exports2.PollerStoppedError = PollerStoppedError; - var PollerCancelledError = class _PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, _PollerCancelledError.prototype); + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; } - }; - exports2.PollerCancelledError = PollerCancelledError; - var Poller = class { /** - * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. * - * When writing an implementation of a Poller, this implementation needs to deal with the initialization - * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's - * operation has already been defined, at least its basic properties. The code below shows how to approach - * the definition of the constructor of a new custom poller. + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. * - * ```ts - * export class MyPoller extends Poller { - * constructor({ - * // Anything you might need outside of the basics - * }) { - * let state: MyOperationState = { - * privateProperty: private, - * publicProperty: public, - * }; + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. * - * const operation = { - * state, - * update, - * cancel, - * toString - * } + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * Returns the date and time the blob was created. * - * // Sending the operation to the parent's constructor. - * super(operation); + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. * - * // You can assign more local properties here. - * } - * } - * ``` + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. * - * Inside of this constructor, a new promise is created. This will be used to - * tell the user when the poller finishes (see `pollUntilDone()`). The promise's - * resolve and reject methods are also used internally to control when to resolve - * or reject anyone waiting for the poller to finish. + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. * - * The constructor of a custom implementation of a poller is where any serialized version of - * a previous poller's operation should be deserialized into the operation sent to the - * base constructor. For example: + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. * - * ```ts - * export class MyPoller extends Poller { - * constructor( - * baseOperation: string | undefined - * ) { - * let state: MyOperationState = {}; - * if (baseOperation) { - * state = { - * ...JSON.parse(baseOperation).state, - * ...state - * }; - * } - * const operation = { - * state, - * // ... - * } - * super(operation); - * } - * } - * ``` + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. * - * @param operation - Must contain the basic properties of `PollOperation`. + * @readonly */ - constructor(operation) { - this.resolveOnUnsuccessful = false; - this.stopped = true; - this.pollProgressCallbacks = []; - this.operation = operation; - this.promise = new Promise((resolve6, reject) => { - this.resolve = resolve6; - this.reject = reject; - }); - this.promise.catch(() => { - }); + get versionId() { + return this.originalResponse.versionId; } /** - * Starts a loop that will break only if the poller is done - * or if the poller is stopped. + * Indicates whether version of this blob is a current version. + * + * @readonly */ - async startPolling(pollOptions = {}) { - if (this.stopped) { - this.stopped = false; - } - while (!this.isStopped() && !this.isDone()) { - await this.poll(pollOptions); - await this.delay(); - } + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; } /** - * pollOnce does one polling, by calling to the update method of the underlying - * poll operation to make any relevant change effective. + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. * - * @param options - Optional properties passed to the operation's update method. + * @readonly */ - async pollOnce(options = {}) { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) - }); - } - this.processUpdatedState(); + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; } /** - * fireProgress calls the functions passed in via onProgress the method of the poller. + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. * - * It loops over all of the callbacks received from onProgress, and executes them, sending them - * the current operation state. + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. * - * @param state - The current operation state. + * @readonly */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { - callback(state); - } + get isSealed() { + return this.originalResponse.isSealed; } /** - * Invokes the underlying operation's cancel method. + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; } /** - * Returns a promise that will resolve once a single polling request finishes. - * It does this by calling the update method of the Poller's operation. + * Indicates immutability policy mode. * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. * - * @param options - Optional properties passed to the operation's update method. + * @readonly */ - poll(options = {}) { - if (!this.pollOncePromise) { - this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = void 0; - }; - this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + originalResponse; + blobDownloadStream; + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream_js_1.RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + }; + exports2.BlobDownloadResponse = BlobDownloadResponse; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js +var require_AvroConstants = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AVRO_SCHEMA_KEY = exports2.AVRO_CODEC_KEY = exports2.AVRO_INIT_BYTES = exports2.AVRO_SYNC_MARKER_SIZE = void 0; + exports2.AVRO_SYNC_MARKER_SIZE = 16; + exports2.AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); + exports2.AVRO_CODEC_KEY = "avro.codec"; + exports2.AVRO_SCHEMA_KEY = "avro.schema"; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js +var require_AvroParser = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroType = exports2.AvroParser = void 0; + var AvroParser = class _AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream2, length, options = {}) { + const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream2, options = {}) { + const buf = await _AvroParser.readFixedBytes(stream2, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream2, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await _AvroParser.readByte(stream2, options); + haveMoreByte = byte & 128; + zigZagEncoded |= (byte & 127) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); + if (haveMoreByte) { + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; + do { + byte = await _AvroParser.readByte(stream2, options); + zigZagEncoded += (byte & 127) * significanceInFloat; + significanceInFloat *= 128; + } while (byte & 128); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); + } + static async readLong(stream2, options = {}) { + return _AvroParser.readZigZagLong(stream2, options); + } + static async readInt(stream2, options = {}) { + return _AvroParser.readZigZagLong(stream2, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream2, options = {}) { + const b = await _AvroParser.readByte(stream2, options); + if (b === 1) { + return true; + } else if (b === 0) { + return false; + } else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream2, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); + } + static async readDouble(stream2, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); + } + static async readBytes(stream2, options = {}) { + const size = await _AvroParser.readLong(stream2, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream2.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream2, options = {}) { + const u8arr = await _AvroParser.readBytes(stream2, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream2, readItemMethod, options = {}) { + const key = await _AvroParser.readString(stream2, options); + const value = await readItemMethod(stream2, options); + return { key, value }; + } + static async readMap(stream2, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return _AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs2 = await _AvroParser.readArray(stream2, readPairMethod, options); + const dict = {}; + for (const pair of pairs2) { + dict[pair.key] = pair.value; } - return this.pollOncePromise; + return dict; } - processUpdatedState() { - if (this.operation.state.error) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - this.reject(this.operation.state.error); - throw this.operation.state.error; + static async readArray(stream2, readItemMethod, options = {}) { + const items = []; + for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { + if (count < 0) { + await _AvroParser.readLong(stream2, options); + count = -count; } - } - if (this.operation.state.isCancelled) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - const error3 = new PollerCancelledError("Operation was canceled"); - this.reject(error3); - throw error3; + while (count--) { + const item = await readItemMethod(stream2, options); + items.push(item); } } - if (this.isDone() && this.resolve) { - this.resolve(this.getResult()); - } + return items; } + }; + exports2.AvroParser = AvroParser; + var AvroComplex; + (function(AvroComplex2) { + AvroComplex2["RECORD"] = "record"; + AvroComplex2["ENUM"] = "enum"; + AvroComplex2["ARRAY"] = "array"; + AvroComplex2["MAP"] = "map"; + AvroComplex2["UNION"] = "union"; + AvroComplex2["FIXED"] = "fixed"; + })(AvroComplex || (AvroComplex = {})); + var AvroPrimitive; + (function(AvroPrimitive2) { + AvroPrimitive2["NULL"] = "null"; + AvroPrimitive2["BOOLEAN"] = "boolean"; + AvroPrimitive2["INT"] = "int"; + AvroPrimitive2["LONG"] = "long"; + AvroPrimitive2["FLOAT"] = "float"; + AvroPrimitive2["DOUBLE"] = "double"; + AvroPrimitive2["BYTES"] = "bytes"; + AvroPrimitive2["STRING"] = "string"; + })(AvroPrimitive || (AvroPrimitive = {})); + var AvroType = class _AvroType { /** - * Returns a promise that will resolve once the underlying operation is completed. + * Determines the AvroType from the Avro Schema. */ - async pollUntilDone(pollOptions = {}) { - if (this.stopped) { - this.startPolling(pollOptions).catch(this.reject); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + static fromSchema(schema2) { + if (typeof schema2 === "string") { + return _AvroType.fromStringSchema(schema2); + } else if (Array.isArray(schema2)) { + return _AvroType.fromArraySchema(schema2); + } else { + return _AvroType.fromObjectSchema(schema2); } - this.processUpdatedState(); - return this.promise; } - /** - * Invokes the provided callback after each polling is completed, - * sending the current state of the poller's operation. - * - * It returns a method that can be used to stop receiving updates on the given callback function. - */ - onProgress(callback) { - this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); - }; + static fromStringSchema(schema2) { + switch (schema2) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema2); + default: + throw new Error(`Unexpected Avro type ${schema2}`); + } } - /** - * Returns true if the poller has finished polling. - */ - isDone() { - const state = this.operation.state; - return Boolean(state.isCompleted || state.isCancelled || state.error); + static fromArraySchema(schema2) { + return new AvroUnionType(schema2.map(_AvroType.fromSchema)); } - /** - * Stops the poller from continuing to poll. - */ - stopPolling() { - if (!this.stopped) { - this.stopped = true; - if (this.reject) { - this.reject(new PollerStoppedError("This poller is already stopped")); - } + static fromObjectSchema(schema2) { + const type2 = schema2.type; + try { + return _AvroType.fromStringSchema(type2); + } catch { + } + switch (type2) { + case AvroComplex.RECORD: + if (schema2.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema2}`); + } + if (!schema2.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema2}`); + } + const fields = {}; + if (!schema2.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema2}`); + } + for (const field of schema2.fields) { + fields[field.name] = _AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema2.name); + case AvroComplex.ENUM: + if (schema2.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema2}`); + } + if (!schema2.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema2}`); + } + return new AvroEnumType(schema2.symbols); + case AvroComplex.MAP: + if (!schema2.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema2}`); + } + return new AvroMapType(_AvroType.fromSchema(schema2.values)); + case AvroComplex.ARRAY: + // Unused today + case AvroComplex.FIXED: + // Unused today + default: + throw new Error(`Unexpected Avro type ${type2} in ${schema2}`); } } - /** - * Returns true if the poller is stopped. - */ - isStopped() { - return this.stopped; + }; + exports2.AvroType = AvroType; + var AvroPrimitiveType = class extends AvroType { + _primitive; + constructor(primitive) { + super(); + this._primitive = primitive; } - /** - * Attempts to cancel the underlying operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * If it's called again before it finishes, it will throw an error. - * - * @param options - Optional properties passed to the operation's update method. - */ - cancelOperation(options = {}) { - if (!this.cancelPromise) { - this.cancelPromise = this.cancelOnce(options); - } else if (options.abortSignal) { - throw new Error("A cancel request is currently pending"); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream2, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream2, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream2, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream2, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream2, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream2, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream2, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream2, options); + default: + throw new Error("Unknown Avro Primitive"); } - return this.cancelPromise; } - /** - * Returns the state of the operation. - * - * Even though TState will be the same type inside any of the methods of any extension of the Poller class, - * implementations of the pollers can customize what's shared with the public by writing their own - * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller - * and a public type representing a safe to share subset of the properties of the internal state. - * Their definition of getOperationState can then return their public type. - * - * Example: - * - * ```ts - * // Let's say we have our poller's operation state defined as: - * interface MyOperationState extends PollOperationState { - * privateProperty?: string; - * publicProperty?: string; - * } - * - * // To allow us to have a true separation of public and private state, we have to define another interface: - * interface PublicState extends PollOperationState { - * publicProperty?: string; - * } - * - * // Then, we define our Poller as follows: - * export class MyPoller extends Poller { - * // ... More content is needed here ... - * - * public getOperationState(): PublicState { - * const state: PublicState = this.operation.state; - * return { - * // Properties from PollOperationState - * isStarted: state.isStarted, - * isCompleted: state.isCompleted, - * isCancelled: state.isCancelled, - * error: state.error, - * result: state.result, - * - * // The only other property needed by PublicState. - * publicProperty: state.publicProperty - * } - * } - * } - * ``` - * - * You can see this in the tests of this repository, go to the file: - * `../test/utils/testPoller.ts` - * and look for the getOperationState implementation. - */ - getOperationState() { - return this.operation.state; + }; + var AvroEnumType = class extends AvroType { + _symbols; + constructor(symbols) { + super(); + this._symbols = symbols; } - /** - * Returns the result value of the operation, - * regardless of the state of the poller. - * It can return undefined or an incomplete form of the final TResult value - * depending on the implementation. - */ - getResult() { - const state = this.operation.state; - return state.result; + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream2, options = {}) { + const value = await AvroParser.readInt(stream2, options); + return this._symbols[value]; } - /** - * Returns a serialized version of the poller's operation - * by invoking the operation's toString method. - */ - toString() { - return this.operation.toString(); + }; + var AvroUnionType = class extends AvroType { + _types; + constructor(types) { + super(); + this._types = types; + } + async read(stream2, options = {}) { + const typeIndex = await AvroParser.readInt(stream2, options); + return this._types[typeIndex].read(stream2, options); } }; - exports2.Poller = Poller; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js -var require_lroEngine = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var operation_js_1 = require_operation3(); - var constants_js_1 = require_constants17(); - var poller_js_1 = require_poller3(); - var operation_js_2 = require_operation(); - var LroEngine = class extends poller_js_1.Poller { - constructor(lro, options) { - const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; - const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; - const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); - super(operation); - this.resolveOnUnsuccessful = resolveOnUnsuccessful; - this.config = { intervalInMs }; - operation.setPollerConfig(this.config); + var AvroMapType = class extends AvroType { + _itemType; + constructor(itemType) { + super(); + this._itemType = itemType; } - /** - * The method used by the poller to wait before attempting to update its operation. - */ - delay() { - return new Promise((resolve6) => setTimeout(() => resolve6(), this.config.intervalInMs)); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream2, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream2, readItemMethod, options); + } + }; + var AvroRecordType = class extends AvroType { + _name; + _fields; + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream2, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream2, options); + } + } + return record; } }; - exports2.LroEngine = LroEngine; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js -var require_lroEngine2 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var lroEngine_js_1 = require_lroEngine(); - Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { - return lroEngine_js_1.LroEngine; - } }); - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js -var require_pollOperation = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); } }); -// node_modules/@azure/core-lro/dist/commonjs/index.js -var require_commonjs14 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js +var require_utils_common3 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var poller_js_1 = require_poller2(); - Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { - return poller_js_1.createHttpPoller; - } }); - tslib_1.__exportStar(require_lroEngine2(), exports2); - tslib_1.__exportStar(require_poller3(), exports2); - tslib_1.__exportStar(require_pollOperation(), exports2); + exports2.arraysEqual = arraysEqual; + function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; + } } }); -// node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js -var require_BlobStartCopyFromUrlPoller = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js +var require_AvroReader = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobBeginCopyFromUrlPoller = void 0; - var core_util_1 = require_commonjs4(); - var core_lro_1 = require_commonjs14(); - var BlobBeginCopyFromUrlPoller = class extends core_lro_1.Poller { - intervalInMs; - constructor(options) { - const { blobClient, copySource, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; - let state; - if (resumeFrom) { - state = JSON.parse(resumeFrom).state; - } - const operation = makeBlobBeginCopyFromURLPollOperation({ - ...state, - blobClient, - copySource, - startCopyFromURLOptions - }); - super(operation); - if (typeof onProgress === "function") { - this.onProgress(onProgress); - } - this.intervalInMs = intervalInMs; - } - delay() { - return (0, core_util_1.delay)(this.intervalInMs); + exports2.AvroReader = void 0; + var AvroConstants_js_1 = require_AvroConstants(); + var AvroParser_js_1 = require_AvroParser(); + var utils_common_js_1 = require_utils_common3(); + var AvroReader = class { + _dataStream; + _headerStream; + _syncMarker; + _metadata; + _itemType; + _itemsRemainingInBlock; + // Remembers where we started if partial data stream was provided. + _initialBlockOffset; + /// The byte offset within the Avro file (both header and data) + /// of the start of the current block. + _blockOffset; + get blockOffset() { + return this._blockOffset; } - }; - exports2.BlobBeginCopyFromUrlPoller = BlobBeginCopyFromUrlPoller; - var cancel = async function cancel2(options = {}) { - const state = this.state; - const { copyId } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); + _objectIndex; + get objectIndex() { + return this._objectIndex; } - if (!copyId) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); + _initialized; + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; } - await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal - }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - }; - var update = async function update2(options = {}) { - const state = this.state; - const { blobClient, copySource, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; + async initialize(options = {}) { + const header = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal + }); + if (!(0, utils_common_js_1.arraysEqual)(header, AvroConstants_js_1.AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + this._metadata = await AvroParser_js_1.AvroParser.readMap(this._headerStream, AvroParser_js_1.AvroParser.readString, { + abortSignal: options.abortSignal + }); + const codec = this._metadata[AvroConstants_js_1.AVRO_CODEC_KEY]; + if (!(codec === void 0 || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + this._syncMarker = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }); + const schema2 = JSON.parse(this._metadata[AvroConstants_js_1.AVRO_SCHEMA_KEY]); + this._itemType = AvroParser_js_1.AvroType.fromSchema(schema2); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; } - } else if (!state.isCompleted) { - try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { - options.fireProgress(state); - } else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; + this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }); + await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; } - } catch (err) { - state.error = err; - state.isCompleted = true; } } - return makeBlobBeginCopyFromURLPollOperation(state); - }; - var toString3 = function toString4() { - return JSON.stringify({ state: this.state }, (key, value) => { - if (key === "blobClient") { - return void 0; + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + async *parseObjects(options = {}) { + if (!this._initialized) { + await this.initialize(options); } - return value; - }); + while (this.hasNext()) { + const result = await this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal + }); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = await AvroParser_js_1.AvroParser.readFixedBytes(this._dataStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!(0, utils_common_js_1.arraysEqual)(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }); + } catch { + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + } + } + yield result; + } + } }; - function makeBlobBeginCopyFromURLPollOperation(state) { - return { - state: { ...state }, - cancel, - toString: toString3, - update - }; - } + exports2.AvroReader = AvroReader; } }); -// node_modules/@azure/storage-blob/dist/commonjs/Range.js -var require_Range = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/Range.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js +var require_AvroReadable = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.rangeToString = rangeToString; - function rangeToString(iRange) { - if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); - } - if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); - } - return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; - } + exports2.AvroReadable = void 0; + var AvroReadable = class { + }; + exports2.AvroReadable = AvroReadable; } }); -// node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js -var require_Batch = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js +var require_AvroReadableFromStream = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Batch = void 0; - var events_1 = require("events"); - var BatchStates; - (function(BatchStates2) { - BatchStates2[BatchStates2["Good"] = 0] = "Good"; - BatchStates2[BatchStates2["Error"] = 1] = "Error"; - })(BatchStates || (BatchStates = {})); - var Batch = class { - /** - * Concurrency. Must be lager than 0. - */ - concurrency; - /** - * Number of active operations under execution. - */ - actives = 0; - /** - * Number of completed operations under execution. - */ - completed = 0; - /** - * Offset of next operation to be executed. - */ - offset = 0; - /** - * Operation array to be executed. - */ - operations = []; - /** - * States of Batch. When an error happens, state will turn into error. - * Batch will stop execute left operations. - */ - state = BatchStates.Good; - /** - * A private emitter used to pass events inside this class. - */ - emitter; - /** - * Creates an instance of Batch. - * @param concurrency - - */ - constructor(concurrency = 5) { - if (concurrency < 1) { - throw new RangeError("concurrency must be larger than 0"); + exports2.AvroReadableFromStream = void 0; + var AvroReadable_js_1 = require_AvroReadable(); + var abort_controller_1 = require_commonjs11(); + var buffer_1 = require("buffer"); + var ABORT_ERROR = new abort_controller_1.AbortError("Reading from the avro stream was aborted."); + var AvroReadableFromStream = class extends AvroReadable_js_1.AvroReadable { + _position; + _readable; + toUint8Array(data) { + if (typeof data === "string") { + return buffer_1.Buffer.from(data); } - this.concurrency = concurrency; - this.emitter = new events_1.EventEmitter(); + return data; } - /** - * Add a operation into queue. - * - * @param operation - - */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } catch (error3) { - this.emitter.emit("error", error3); - } - }); + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; } - /** - * Start execute operations in the queue. - * - */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve6, reject) => { - this.emitter.on("finish", resolve6); - this.emitter.on("error", (error3) => { - this.state = BatchStates.Error; - reject(error3); - }); - }); + get position() { + return this._position; } - /** - * Get next operation to be executed. Return null when reaching ends. - * - */ - nextOperation() { - if (this.offset < this.operations.length) { - return this.operations[this.offset++]; + async read(size, options = {}) { + if (options.abortSignal?.aborted) { + throw ABORT_ERROR; } - return null; - } - /** - * Start execute operations. One one the most important difference between - * this method with do() is that do() wraps as an sync method. - * - */ - parallelExecute() { - if (this.state === BatchStates.Error) { - return; + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); } - if (this.completed >= this.operations.length) { - this.emitter.emit("finish"); - return; + if (size === 0) { + return new Uint8Array(); } - while (this.actives < this.concurrency) { - const operation = this.nextOperation(); - if (operation) { - operation(); - } else { - return; - } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + return this.toUint8Array(chunk); + } else { + return new Promise((resolve6, reject) => { + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + resolve6(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + }); } } }; - exports2.Batch = Batch; + exports2.AvroReadableFromStream = AvroReadableFromStream; } }); -// node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js -var require_utils7 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js +var require_internal_avro = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.fsCreateReadStream = exports2.fsStat = void 0; - exports2.streamToBuffer = streamToBuffer; - exports2.streamToBuffer2 = streamToBuffer2; - exports2.streamToBuffer3 = streamToBuffer3; - exports2.readStreamToLocalFile = readStreamToLocalFile; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_fs_1 = tslib_1.__importDefault(require("node:fs")); - var node_util_1 = tslib_1.__importDefault(require("node:util")); - var constants_js_1 = require_constants15(); - async function streamToBuffer(stream2, buffer, offset, end, encoding) { - let pos = 0; - const count = end - offset; - return new Promise((resolve6, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); - stream2.on("readable", () => { - if (pos >= count) { - clearTimeout(timeout); - resolve6(); - return; - } - let chunk = stream2.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream2.on("end", () => { - clearTimeout(timeout); - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve6(); - }); - stream2.on("error", (msg) => { - clearTimeout(timeout); - reject(msg); - }); - }); - } - async function streamToBuffer2(stream2, buffer, encoding) { - let pos = 0; - const bufferSize = buffer.length; - return new Promise((resolve6, reject) => { - stream2.on("readable", () => { - let chunk = stream2.read(); - if (!chunk) { - return; + exports2.AvroReadableFromStream = exports2.AvroReadable = exports2.AvroReader = void 0; + var AvroReader_js_1 = require_AvroReader(); + Object.defineProperty(exports2, "AvroReader", { enumerable: true, get: function() { + return AvroReader_js_1.AvroReader; + } }); + var AvroReadable_js_1 = require_AvroReadable(); + Object.defineProperty(exports2, "AvroReadable", { enumerable: true, get: function() { + return AvroReadable_js_1.AvroReadable; + } }); + var AvroReadableFromStream_js_1 = require_AvroReadableFromStream(); + Object.defineProperty(exports2, "AvroReadableFromStream", { enumerable: true, get: function() { + return AvroReadableFromStream_js_1.AvroReadableFromStream; + } }); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js +var require_BlobQuickQueryStream = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobQuickQueryStream = void 0; + var node_stream_1 = require("node:stream"); + var index_js_1 = require_internal_avro(); + var BlobQuickQueryStream = class extends node_stream_1.Readable { + source; + avroReader; + avroIter; + avroPaused = true; + onProgress; + onError; + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new index_js_1.AvroReader(new index_js_1.AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); + const obj = avroNext.value; + const schema2 = obj.$schema; + if (typeof schema2 !== "string") { + throw Error("Missing schema in avro record."); } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; + switch (schema2) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description + }); + } + break; + default: + throw Error(`Unknown schema ${schema2} in avro progress record.`); } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream2.on("end", () => { - resolve6(pos); - }); - stream2.on("error", reject); - }); - } - async function streamToBuffer3(readableStream, encoding) { - return new Promise((resolve6, reject) => { - const chunks = []; - readableStream.on("data", (data) => { - chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); - }); - readableStream.on("end", () => { - resolve6(Buffer.concat(chunks)); - }); - readableStream.on("error", reject); - }); - } - async function readStreamToLocalFile(rs, file) { - return new Promise((resolve6, reject) => { - const ws = node_fs_1.default.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); - }); - ws.on("close", resolve6); - rs.pipe(ws); - }); - } - exports2.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); - exports2.fsCreateReadStream = node_fs_1.default.createReadStream; + } while (!avroNext.done && !this.avroPaused); + } + }; + exports2.BlobQuickQueryStream = BlobQuickQueryStream; } }); -// node_modules/@azure/storage-blob/dist/commonjs/Clients.js -var require_Clients = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/Clients.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js +var require_BlobQueryResponse = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PageBlobClient = exports2.BlockBlobClient = exports2.AppendBlobClient = exports2.BlobClient = void 0; - var core_rest_pipeline_1 = require_commonjs6(); - var core_auth_1 = require_commonjs7(); + exports2.BlobQueryResponse = void 0; var core_util_1 = require_commonjs4(); - var core_util_2 = require_commonjs4(); - var BlobDownloadResponse_js_1 = require_BlobDownloadResponse(); - var BlobQueryResponse_js_1 = require_BlobQueryResponse(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var models_js_1 = require_models2(); - var PageBlobRangeResponse_js_1 = require_PageBlobRangeResponse(); - var Pipeline_js_1 = require_Pipeline(); - var BlobStartCopyFromUrlPoller_js_1 = require_BlobStartCopyFromUrlPoller(); - var Range_js_1 = require_Range(); - var StorageClient_js_1 = require_StorageClient(); - var Batch_js_1 = require_Batch(); - var storage_common_1 = require_commonjs13(); - var constants_js_1 = require_constants15(); - var tracing_js_1 = require_tracing(); - var utils_common_js_1 = require_utils_common(); - var utils_js_1 = require_utils7(); - var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); - var BlobLeaseClient_js_1 = require_BlobLeaseClient(); - var BlobClient = class _BlobClient extends StorageClient_js_1.StorageClient { + var BlobQuickQueryStream_js_1 = require_BlobQuickQueryStream(); + var BlobQueryResponse = class { /** - * blobContext provided by protocol layer. + * Indicates that the service supports + * requests for partial file content. + * + * @readonly */ - blobContext; - _name; - _containerName; - _versionId; - _snapshot; + get acceptRanges() { + return this.originalResponse.acceptRanges; + } /** - * The name of the blob. + * Returns if it was previously specified + * for the file. + * + * @readonly */ - get name() { - return this._name; + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * The name of the storage container the blob is associated with. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly */ - get containerName() { - return this._containerName; + get contentDisposition() { + return this.originalResponse.contentDisposition; } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - options = options || {}; - let pipeline; - let url2; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url2, pipeline); - ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = this.storageClientContext.blob; - this._snapshot = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT); - this._versionId = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID); + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; } /** - * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Returns the value that was specified + * for the Content-Language request header. * - * @param snapshot - The snapshot timestamp. - * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + * @readonly */ - withSnapshot(snapshot) { - return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + get contentLanguage() { + return this.originalResponse.contentLanguage; } /** - * Creates a new BlobClient object pointing to a version of this blob. - * Provide "" will remove the versionId and return a Client to the base blob. + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. * - * @param versionId - The versionId. - * @returns A new BlobClient object pointing to the version of this blob. + * @readonly */ - withVersion(versionId) { - return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID, versionId.length === 0 ? void 0 : versionId), this.pipeline); + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * Creates a AppendBlobClient object. + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. * + * @readonly */ - getAppendBlobClient() { - return new AppendBlobClient(this.url, this.pipeline); + get blobType() { + return this.originalResponse.blobType; } /** - * Creates a BlockBlobClient object. + * The number of bytes present in the + * response body. * + * @readonly */ - getBlockBlobClient() { - return new BlockBlobClient(this.url, this.pipeline); + get contentLength() { + return this.originalResponse.contentLength; } /** - * Creates a PageBlobClient object. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. * + * @readonly */ - getPageBlobClient() { - return new PageBlobClient(this.url, this.pipeline); + get contentMD5() { + return this.originalResponse.contentMD5; } /** - * Reads or downloads a blob from the system, including its metadata and properties. - * You can also call Get Blob to read a snapshot. + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. * - * * In Node.js, data returns in a Readable stream readableStreamBody - * * In browsers, data returns in a promise blobBody + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' * - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. * - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Optional options to Blob Download operation. + * @readonly + */ + get copyCompletedOn() { + return void 0; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. * - * Example usage (Node.js): + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. * - * ```ts snippet:ReadmeSampleDownloadBlob_Node - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobClient = containerClient.getBlobClient(blobName); + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. * - * // Get blob content from position 0 to the end - * // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody - * const downloadBlockBlobResponse = await blobClient.download(); - * if (downloadBlockBlobResponse.readableStreamBody) { - * const downloaded = await streamToString(downloadBlockBlobResponse.readableStreamBody); - * console.log(`Downloaded blob content: ${downloaded}`); - * } + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * - * async function streamToString(stream: NodeJS.ReadableStream): Promise { - * const result = await new Promise>((resolve, reject) => { - * const chunks: Buffer[] = []; - * stream.on("data", (data) => { - * chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)); - * }); - * stream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * stream.on("error", reject); - * }); - * return result.toString(); - * } - * ``` + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. * - * Example usage (browser): + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. * - * ```ts snippet:ReadmeSampleDownloadBlob_Browser - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobClient = containerClient.getBlobClient(blobName); + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. * - * // Get blob content from position 0 to the end - * // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody - * const downloadBlockBlobResponse = await blobClient.download(); - * const blobBody = await downloadBlockBlobResponse.blobBody; - * if (blobBody) { - * const downloaded = await blobBody.text(); - * console.log(`Downloaded blob content: ${downloaded}`); - * } - * ``` + * @readonly */ - async download(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { - const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.download({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onDownloadProgress: core_util_1.isNodeLike ? void 0 : options.onProgress - // for Node.js, progress is reported by RetriableReadableStream - }, - range: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - })); - const wrappedRes = { - ...res, - _response: res._response, - // _response is made non-enumerable - objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) - }; - if (!core_util_1.isNodeLike) { - return wrappedRes; - } - if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { - options.maxRetryRequests = constants_js_1.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === void 0) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse_js_1.BlobDownloadResponse(wrappedRes, async (start) => { - const updatedDownloadOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ - count: offset + res.contentLength - start, - offset: start - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey - }; - return (await this.blobContext.download({ - abortSignal: options.abortSignal, - ...updatedDownloadOptions - })).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress - }); - }); + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; } /** - * Returns true if the Azure blob resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing blob might be deleted by other clients or - * applications. Vice versa new blobs might be added by other clients or applications after this - * function completes. + * The response body as a browser Blob. + * Always undefined in node.js. * - * @param options - options to Exists operation. + * @readonly */ - async exists(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { - try { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - return false; - } else if (e.statusCode === 409 && (e.details.errorCode === constants_js_1.BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === constants_js_1.BlobDoesNotUseCustomerSpecifiedEncryption)) { - return true; - } - throw e; - } - }); + get blobBody() { + return void 0; } /** - * Returns all user-defined metadata, standard HTTP properties, and system properties - * for the blob. It does not return the content of the blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties + * The response body as a node.js Readable stream. + * Always undefined in the browser. * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which - * will retain their original casing. + * It will parse avor data returned by blob query. * - * @param options - Optional options to Get Properties operation. + * @readonly */ - async getProperties(options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { - const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.getProperties({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - })); - return { - ...res, - _response: res._response, - // _response is made non-enumerable - objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) - }; - }); + get readableStreamBody() { + return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob - * - * @param options - Optional options to Blob Delete operation. + * The HTTP response. */ - async delete(options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.delete({ - abortSignal: options.abortSignal, - deleteSnapshots: options.deleteSnapshots, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - }); + get _response() { + return this.originalResponse._response; } + originalResponse; + blobDownloadStream; /** - * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob + * Creates an instance of BlobQueryResponse. * - * @param options - Optional options to Blob Delete operation. + * @param originalResponse - + * @param options - */ - async deleteIfExists(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { - try { - const res = (0, utils_common_js_1.assertResponse)(await this.delete(updatedOptions)); - return { - succeeded: true, - ...res, - _response: res._response - // _response is made non-enumerable - }; - } catch (e) { - if (e.details?.errorCode === "BlobNotFound") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; - } - throw e; + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream_js_1.BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + }; + exports2.BlobQueryResponse = BlobQueryResponse; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/models.js +var require_models2 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/models.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = void 0; + exports2.toAccessTier = toAccessTier; + exports2.ensureCpkIfSpecified = ensureCpkIfSpecified; + exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; + var constants_js_1 = require_constants15(); + var BlockBlobTier; + (function(BlockBlobTier2) { + BlockBlobTier2["Hot"] = "Hot"; + BlockBlobTier2["Cool"] = "Cool"; + BlockBlobTier2["Cold"] = "Cold"; + BlockBlobTier2["Archive"] = "Archive"; + })(BlockBlobTier || (exports2.BlockBlobTier = BlockBlobTier = {})); + var PremiumPageBlobTier; + (function(PremiumPageBlobTier2) { + PremiumPageBlobTier2["P4"] = "P4"; + PremiumPageBlobTier2["P6"] = "P6"; + PremiumPageBlobTier2["P10"] = "P10"; + PremiumPageBlobTier2["P15"] = "P15"; + PremiumPageBlobTier2["P20"] = "P20"; + PremiumPageBlobTier2["P30"] = "P30"; + PremiumPageBlobTier2["P40"] = "P40"; + PremiumPageBlobTier2["P50"] = "P50"; + PremiumPageBlobTier2["P60"] = "P60"; + PremiumPageBlobTier2["P70"] = "P70"; + PremiumPageBlobTier2["P80"] = "P80"; + })(PremiumPageBlobTier || (exports2.PremiumPageBlobTier = PremiumPageBlobTier = {})); + function toAccessTier(tier) { + if (tier === void 0) { + return void 0; + } + return tier; + } + function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = constants_js_1.EncryptionAlgorithmAES25; + } + } + var StorageBlobAudience; + (function(StorageBlobAudience2) { + StorageBlobAudience2["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + StorageBlobAudience2["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; + })(StorageBlobAudience || (exports2.StorageBlobAudience = StorageBlobAudience = {})); + function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js +var require_PageBlobRangeResponse = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeResponseFromModel = rangeResponseFromModel; + function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + return { + ...response, + pageRange, + clearRange, + _response: { + ...response._response, + parsedBody: { + pageRange, + clearRange } - }); + } + }; + } + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/logger.js +var require_logger2 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("core-lro"); + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/poller/constants.js +var require_constants17 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.terminalStates = exports2.POLL_INTERVAL_IN_MS = void 0; + exports2.POLL_INTERVAL_IN_MS = 2e3; + exports2.terminalStates = ["succeeded", "canceled", "failed"]; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/poller/operation.js +var require_operation = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.pollOperation = exports2.initOperation = exports2.deserializeState = void 0; + var logger_js_1 = require_logger2(); + var constants_js_1 = require_constants17(); + function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); } - /** - * Restores the contents and metadata of soft deleted blob and any associated - * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 - * or later. - * @see https://learn.microsoft.com/rest/api/storageservices/undelete-blob - * - * @param options - Optional options to Blob Undelete operation. - */ - async undelete(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.undelete({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); + } + exports2.deserializeState = deserializeState; + function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error3) => { + if (isOperationError(error3)) { + stateProxy.setError(state, error3); + stateProxy.setFailed(state); + } + throw error3; + }; + } + function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; + } + function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message + }; + } + function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError === null || getError === void 0 ? void 0 : getError(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger_js_1.logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult + })); + } + } + function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; + } + async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation + }; + logger_js_1.logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; + } + exports2.initOperation = initOperation; + async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError + })); + const status = getOperationStatus(response, state); + logger_js_1.logger.verbose(`LRO: Status: + Polling from: ${state.config.operationLocation} + Operation status: ${status} + Polling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== void 0) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status + }; + } + } + return { response, status }; + } + async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== void 0) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult }); + if (!constants_js_1.terminalStates.includes(status)) { + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); + if (location !== void 0) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); + } else + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + } + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + } + } + exports2.pollOperation = pollOperation; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/http/operation.js +var require_operation2 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.pollHttpOperation = exports2.isOperationError = exports2.getResourceLocation = exports2.getOperationStatus = exports2.getOperationLocation = exports2.initHttpOperation = exports2.getStatusFromInitialResponse = exports2.getErrorFromResponse = exports2.parseRetryAfter = exports2.inferLroMode = void 0; + var operation_js_1 = require_operation(); + var logger_js_1 = require_logger2(); + function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; + } + function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; + } + function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; + } + function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; + } + function findResourceLocation(inputs) { + var _a; + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return void 0; + } + case "PATCH": { + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return void 0; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } + } + function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); + if (pollingUrl !== void 0) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig + }) + }; + } else if (location !== void 0) { + return { + mode: "ResourceLocation", + operationLocation: location + }; + } else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath + }; + } else { + return void 0; + } + } + exports2.inferLroMode = inferLroMode; + function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== void 0) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } - /** - * Sets system properties on the blob. - * - * If no value provided, or no value provided for the specified blob HTTP headers, - * these blob HTTP headers without a value will be cleared. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties - * - * @param blobHTTPHeaders - If no value provided, or no value provided for - * the specified blob HTTP headers, these blob HTTP - * headers without a value will be cleared. - * A common header to set is `blobContentType` - * enabling the browser to provide functionality - * based on file type. - * @param options - Optional options to Blob Set HTTP Headers operation. - */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setHttpHeaders({ - abortSignal: options.abortSignal, - blobHttpHeaders: blobHTTPHeaders, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. - tracingOptions: updatedOptions.tracingOptions - })); - }); + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { + case void 0: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } } - /** - * Sets user-defined metadata for the specified blob as one or more name-value pairs. - * - * If no option provided, or no metadata defined in the parameter, the blob - * metadata will be removed. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata - * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Optional options to Set Metadata operation. - */ - async setMetadata(metadata, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setMetadata({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); + } + function getStatus(rawResponse) { + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); + } + function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); + } + function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } else if (statusCode < 300) { + return "succeeded"; + } else { + return "failed"; } - /** - * Sets tags on the underlying blob. - * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. - * Valid tag key and value characters include lower and upper case letters, digits (0-9), - * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). - * - * @param tags - - * @param options - - */ - async setTags(tags, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTags({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions, - tags: (0, utils_common_js_1.toBlobTags)(tags) - })); - }); + } + function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== void 0) { + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; } - /** - * Gets the tags associated with the underlying blob. - * - * @param options - - */ - async getTags(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.blobContext.getTags({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - const wrappedResponse = { - ...response, - _response: response._response, - // _response is made non-enumerable - tags: (0, utils_common_js_1.toTags)({ blobTagSet: response.blobTagSet }) || {} - }; - return wrappedResponse; - }); + return void 0; + } + exports2.parseRetryAfter = parseRetryAfter; + function getErrorFromResponse(response) { + const error3 = accessBodyProperty(response, "error"); + if (!error3) { + logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; } - /** - * Get a {@link BlobLeaseClient} that manages leases on the blob. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the blob. - */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); + if (!error3.code || !error3.message) { + logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; } - /** - * Creates a read-only snapshot of a blob. - * @see https://learn.microsoft.com/rest/api/storageservices/snapshot-blob - * - * @param options - Optional options to the Blob Create Snapshot operation. - */ - async createSnapshot(options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.createSnapshot({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); + return error3; + } + exports2.getErrorFromResponse = getErrorFromResponse; + function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor((/* @__PURE__ */ new Date()).getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; } - /** - * Asynchronously copies a blob to a destination within the storage account. - * This method returns a long running operation poller that allows you to wait - * indefinitely until the copy is completed. - * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. - * Note that the onProgress callback will not be invoked if the operation completes in the first - * request, and attempting to cancel a completed copy will result in an error being thrown. - * - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob - * - * ```ts snippet:ClientsBeginCopyFromURL - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobClient = containerClient.getBlobClient(blobName); - * - * // Example using automatic polling - * const automaticCopyPoller = await blobClient.beginCopyFromURL("url"); - * const automaticResult = await automaticCopyPoller.pollUntilDone(); - * - * // Example using manual polling - * const manualCopyPoller = await blobClient.beginCopyFromURL("url"); - * while (!manualCopyPoller.isDone()) { - * await manualCopyPoller.poll(); - * } - * const manualResult = manualCopyPoller.getResult(); - * - * // Example using progress updates - * const progressUpdatesCopyPoller = await blobClient.beginCopyFromURL("url", { - * onProgress(state) { - * console.log(`Progress: ${state.copyProgress}`); - * }, - * }); - * const progressUpdatesResult = await progressUpdatesCopyPoller.pollUntilDone(); - * - * // Example using a changing polling interval (default 15 seconds) - * const pollingIntervalCopyPoller = await blobClient.beginCopyFromURL("url", { - * intervalInMs: 1000, // poll blob every 1 second for copy progress - * }); - * const pollingIntervalResult = await pollingIntervalCopyPoller.pollUntilDone(); - * - * // Example using copy cancellation: - * const cancelCopyPoller = await blobClient.beginCopyFromURL("url"); - * // cancel operation after starting it. - * try { - * await cancelCopyPoller.cancelOperation(); - * // calls to get the result now throw PollerCancelledError - * cancelCopyPoller.getResult(); - * } catch (err: any) { - * if (err.name === "PollerCancelledError") { - * console.log("The copy was cancelled."); - * } - * } - * ``` - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async beginCopyFromURL(copySource, options = {}) { - const client = { - abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), - getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) - }; - const poller = new BlobStartCopyFromUrlPoller_js_1.BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options - }); - await poller.poll(); - return poller; + return void 0; + } + function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case void 0: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } } - /** - * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero - * length and full metadata. Version 2012-02-12 and newer. - * @see https://learn.microsoft.com/rest/api/storageservices/abort-copy-blob - * - * @param copyId - Id of the Copy From URL operation. - * @param options - Optional options to the Blob Abort Copy From URL operation. - */ - async abortCopyFromURL(copyId, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - })); - }); + const status = helper(); + return status === "running" && operationLocation === void 0 ? "succeeded" : status; + } + exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; + async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + stateProxy, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult + }); + } + exports2.initHttpOperation = initHttpOperation; + function getOperationLocation({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return void 0; + } } - /** - * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not - * return a response until the copy is complete. - * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob-from-url - * - * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication - * @param options - - */ - async syncCopyFromURL(copySource, options = {}) { - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - return tracing_js_1.tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.copyFromURL(copySource, { - abortSignal: options.abortSignal, - metadata: options.metadata, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince - }, - sourceContentMD5: options.sourceContentMD5, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - encryptionScope: options.encryptionScope, - copySourceTags: options.copySourceTags, - fileRequestIntent: options.sourceShareTokenIntent, - tracingOptions: updatedOptions.tracingOptions - })); - }); + } + exports2.getOperationLocation = getOperationLocation; + function getOperationStatus({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } + } + exports2.getOperationStatus = getOperationStatus; + function accessBodyProperty({ flatResponse, rawResponse }, prop) { + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; + } + function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; } + return state.config.resourceLocation; + } + exports2.getResourceLocation = getResourceLocation; + function isOperationError(e) { + return e.name === "RestError"; + } + exports2.isOperationError = isOperationError; + async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult + }); + } + exports2.pollHttpOperation = pollHttpOperation; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/poller/poller.js +var require_poller = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.buildCreatePoller = void 0; + var operation_js_1 = require_operation(); + var constants_js_1 = require_constants17(); + var core_util_1 = require_commonjs4(); + var createStateProxy = () => ({ /** - * Sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant - * storage only). A premium page blob's tier determines the allowed size, IOPS, - * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive - * storage type. This operation does not update the blob's ETag. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-tier - * - * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. - * @param options - Optional options to the Blob Set Tier operation. + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. */ - async setAccessTier(tier, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTier((0, models_js_1.toAccessTier)(tier), { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - rehydratePriority: options.rehydratePriority, - tracingOptions: updatedOptions.tracingOptions - })); + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => state.status = "canceled", + setError: (state, error3) => state.error = error3, + setResult: (state, result) => state.result = result, + setRunning: (state) => state.status = "running", + setSucceeded: (state) => state.status = "succeeded", + setFailed: (state) => state.status = "failed", + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded" + }); + function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() : void 0; + const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful }); - } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; - } else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; - } - let blockSize = options.blockSize ?? 0; - if (blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (blockSize === 0) { - blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { - if (!count) { - const response = await this.getProperties({ - ...options, - tracingOptions: updatedOptions.tracingOptions - }); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + let resultPromise; + const abortController = new AbortController(); + const handlers = /* @__PURE__ */ new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === void 0, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state + }), + onProgress: (callback) => { + const s = /* @__PURE__ */ Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { + abortController.abort(); + } else if (!abortSignal.aborted) { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); } - } - if (!buffer) { try { - buffer = Buffer.alloc(count); - } catch (error3) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error3.message}`); + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } finally { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); } - } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch_js_1.Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + blockSize) { - batch.addOperation(async () => { - let chunkEnd = offset + count; - if (off + blockSize < chunkEnd) { - chunkEnd = off + blockSize; + if (resolveOnUnsuccessful) { + return poller.getResult(); + } else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - }); - const stream2 = response.readableStreamBody; - await (0, utils_js_1.streamToBuffer)(stream2, buffer, off - offset, chunkEnd - offset); - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); + } + })().finally(() => { + resultPromise = void 0; + }), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; } + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } } - await batch.do(); - return buffer; - }); - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. - * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. - * - * @param filePath - - * @param offset - From which position of the block blob to download. - * @param count - How much data to be downloaded. Will download to the end when passing undefined. - * @param options - Options to Blob download options. - * @returns The response data for blob download operation, - * but with readableStreamBody set to undefined since its - * content is already read and written into a local file - * at the specified path. - */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { - const response = await this.download(offset, count, { - ...options, - tracingOptions: updatedOptions.tracingOptions + }; + return poller; + }; + } + exports2.buildCreatePoller = buildCreatePoller; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/http/poller.js +var require_poller2 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var operation_js_1 = require_operation2(); + var poller_js_1 = require_poller(); + async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig }); - if (response.readableStreamBody) { - await (0, utils_js_1.readStreamToLocalFile)(response.readableStreamBody, filePath); - } - response.blobDownloadStream = void 0; - return response; - }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + poll: lro.sendPollRequest + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse + }); + } + exports2.createHttpPoller = createHttpPoller; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js +var require_operation3 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.GenericPollOperation = void 0; + var operation_js_1 = require_operation2(); + var logger_js_1 = require_logger2(); + var createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => state.isCancelled = true, + setError: (state, error3) => state.error = error3, + setResult: (state, result) => state.result = result, + setRunning: (state) => state.isStarted = true, + setSucceeded: (state) => state.isCompleted = true, + setFailed: () => { + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) + }); + var GenericPollOperation = class { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; - try { - const parsedUrl = new URL(this.url); - if (parsedUrl.host.split(".")[1] === "blob") { - const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { - const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); - containerName = pathComponents[2]; - blobName = pathComponents[4]; - } else { - const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - containerName = decodeURIComponent(containerName); - blobName = decodeURIComponent(blobName); - blobName = blobName.replace(/\\/g, "/"); - if (!containerName) { - throw new Error("Provided containerName is invalid."); - } - return { blobName, containerName }; - } catch (error3) { - throw new Error("Unable to extract blobName and containerName with provided information."); - } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; } - /** - * Asynchronously copies a blob to a destination within the storage account. - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async startCopyFromURL(copySource, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - return (0, utils_common_js_1.assertResponse)(await this.blobContext.startCopyFromURL(copySource, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions - }, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - rehydratePriority: options.rehydratePriority, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - sealBlob: options.sealBlob, - tracingOptions: updatedOptions.tracingOptions + async update(options) { + var _a; + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult })); - }); - } - /** - * Only available for BlobClient constructed with a shared key credential. - * - * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateSasUrl(options) { - return new Promise((resolve6) => { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, this.credential).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); - }); - } - /** - * Only available for BlobClient constructed with a shared key credential. - * - * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, this.credential).stringToSign; - } - /** - * - * Generates a Blob Service Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the input user delegation key. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve6) => { - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, userDelegationKey, this.accountName).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); - }); - } - /** - * Only available for BlobClient constructed with a shared key credential. - * - * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the input user delegation key. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasStringToSign(options, userDelegationKey) { - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, userDelegationKey, this.accountName).stringToSign; - } - /** - * Delete the immutablility policy on the blob. - * - * @param options - Optional options to delete immutability policy on the blob. - */ - async deleteImmutabilityPolicy(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({ - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * Set immutability policy on the blob. - * - * @param options - Optional options to set immutability policy on the blob. - */ - async setImmutabilityPolicy(immutabilityPolicy, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setImmutabilityPolicy({ - immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, - immutabilityPolicyMode: immutabilityPolicy.policyMode, - tracingOptions: updatedOptions.tracingOptions - })); - }); + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === void 0) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, + isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult + }); + } + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); + return this; } - /** - * Set legal hold on the blob. - * - * @param options - Optional options to set legal hold on the blob. - */ - async setLegalHold(legalHoldEnabled, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setLegalHold(legalHoldEnabled, { - tracingOptions: updatedOptions.tracingOptions - })); - }); + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; } /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. + * Serializes the Poller operation. */ - async getAccountInfo(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.getAccountInfo({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); + toString() { + return JSON.stringify({ + state: this.state }); } }; - exports2.BlobClient = BlobClient; - var AppendBlobClient = class _AppendBlobClient extends BlobClient { - /** - * appendBlobsContext provided by protocol layer. - */ - appendBlobContext; - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url2, pipeline); - this.appendBlobContext = this.storageClientContext.appendBlob; + exports2.GenericPollOperation = GenericPollOperation; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js +var require_poller3 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; + var PollerStoppedError = class _PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, _PollerStoppedError.prototype); } - /** - * Creates a new AppendBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot) { - return new _AppendBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + }; + exports2.PollerStoppedError = PollerStoppedError; + var PollerCancelledError = class _PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, _PollerCancelledError.prototype); } + }; + exports2.PollerCancelledError = PollerCancelledError; + var Poller = class { /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * - * @param options - Options to the Append Block Create operation. + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; * - * Example usage: + * const operation = { + * state, + * update, + * cancel, + * toString + * } * - * ```ts snippet:ClientsCreateAppendBlob - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; + * // Sending the operation to the parent's constructor. + * super(operation); * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); + * // You can assign more local properties here. + * } + * } + * ``` * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. * - * const appendBlobClient = containerClient.getAppendBlobClient(blobName); - * await appendBlobClient.create(); + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. */ - async create(options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.create(0, { - abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - tracingOptions: updatedOptions.tracingOptions - })); + constructor(operation) { + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve6, reject) => { + this.resolve = resolve6; + this.reject = reject; + }); + this.promise.catch(() => { }); } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * If the blob with the same name already exists, the content of the existing blob will remain unchanged. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob - * - * @param options - + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. */ - async createIfNotExists(options = {}) { - const conditions = { ifNoneMatch: constants_js_1.ETagAny }; - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { - try { - const res = (0, utils_common_js_1.assertResponse)(await this.create({ - ...updatedOptions, - conditions - })); - return { - succeeded: true, - ...res, - _response: res._response - // _response is made non-enumerable - }; - } catch (e) { - if (e.details?.errorCode === "BlobAlreadyExists") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; - } - throw e; - } - }); + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } } /** - * Seals the append blob, making it read only. + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. * - * @param options - + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. */ - async seal(options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.seal({ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ abortSignal: options.abortSignal, - appendPositionAccessConditions: options.conditions, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - }); + fireProgress: this.fireProgress.bind(this) + }); + } + this.processUpdatedState(); } /** - * Commits a new block of data to the end of the existing append blob. - * @see https://learn.microsoft.com/rest/api/storageservices/append-block - * - * @param body - Data to be appended. - * @param contentLength - Length of the body in bytes. - * @param options - Options to the Append Block operation. - * - * - * Example usage: - * - * ```ts snippet:ClientsAppendBlock - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); + * fireProgress calls the functions passed in via onProgress the method of the poller. * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. * - * const content = "Hello World!"; + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. * - * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(blobName); - * await newAppendBlobClient.create(); - * await newAppendBlobClient.appendBlock(content, content.length); + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(blobName); - * await existingAppendBlobClient.appendBlock(content, content.length); - * ``` + * @param options - Optional properties passed to the operation's update method. */ - async appendBlock(body, contentLength, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlock(contentLength, body, { - abortSignal: options.abortSignal, - appendPositionAccessConditions: options.conditions, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onUploadProgress: options.onProgress - }, - transactionalContentMD5: options.transactionalContentMD5, - transactionalContentCrc64: options.transactionalContentCrc64, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = void 0; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error3 = new PollerCancelledError("Operation was canceled"); + this.reject(error3); + throw error3; + } + } + if (this.isDone() && this.resolve) { + this.resolve(this.getResult()); + } } /** - * The Append Block operation commits a new block of data to the end of an existing append blob - * where the contents are read from a source url. - * @see https://learn.microsoft.com/rest/api/storageservices/append-block-from-url - * - * @param sourceURL - - * The url to the blob that will be the source of the copy. A source blob in the same storage account can - * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob - * must either be public or must be authenticated via a shared access signature. If the source blob is - * public, no authentication is required to perform the operation. - * @param sourceOffset - Offset in source to be appended - * @param count - Number of bytes to be appended as a block - * @param options - + * Returns a promise that will resolve once the underlying operation is completed. */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { - abortSignal: options.abortSignal, - sourceRange: (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), - sourceContentMD5: options.sourceContentMD5, - sourceContentCrc64: options.sourceContentCrc64, - leaseAccessConditions: options.conditions, - appendPositionAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince - }, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - fileRequestIntent: options.sourceShareTokenIntent, - tracingOptions: updatedOptions.tracingOptions - })); - }); + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + this.processUpdatedState(); + return this.promise; } - }; - exports2.AppendBlobClient = AppendBlobClient; - var BlockBlobClient = class _BlockBlobClient extends BlobClient { /** - * blobContext provided by protocol layer. + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. * - * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API - * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + * It returns a method that can be used to stop receiving updates on the given callback function. */ - _blobContext; + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } /** - * blockBlobContext provided by protocol layer. + * Returns true if the poller has finished polling. */ - blockBlobContext; - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } } - super(url2, pipeline); - this.blockBlobContext = this.storageClientContext.blockBlob; - this._blobContext = this.storageClientContext.blob; } /** - * Creates a new BlockBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a URL to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + * Returns true if the poller is stopped. */ - withSnapshot(snapshot) { - return new _BlockBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + isStopped() { + return this.stopped; } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Quick query for a JSON or CSV formatted blob. - * - * Example usage (Node.js): - * - * ```ts snippet:ClientsQuery - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * Attempts to cancel the underlying operation. * - * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select from BlobStorage"); - * if (queryBlockBlobResponse.readableStreamBody) { - * const downloadedBuffer = await streamToBuffer(queryBlockBlobResponse.readableStreamBody); - * const downloaded = downloadedBuffer.toString(); - * console.log(`Query blob content: ${downloaded}`); - * } + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise { - * return new Promise((resolve, reject) => { - * const chunks: Buffer[] = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); - * } - * ``` + * If it's called again before it finishes, it will throw an error. * - * @param query - - * @param options - + * @param options - Optional properties passed to the operation's update method. */ - async query(query, options = {}) { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - if (!core_util_1.isNodeLike) { - throw new Error("This operation currently is only supported in Node.js."); + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); } - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this._blobContext.query({ - abortSignal: options.abortSignal, - queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.inputTextConfiguration), - outputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.outputTextConfiguration) - }, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - })); - return new BlobQueryResponse_js_1.BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError - }); - }); + return this.cancelPromise; } /** - * Creates a new block blob, or updates the content of an existing block blob. - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link stageBlock} and {@link commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link uploadFile}, - * {@link uploadStream} or {@link uploadBrowserData} for better performance - * with concurrency uploading. + * Returns the state of the operation. * - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. * - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to the Block Blob Upload operation. - * @returns Response data for the Block Blob Upload operation. + * Example: * - * Example usage: + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } * - * ```ts snippet:ClientsUpload - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, * - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. */ - async upload(body, contentLength, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.upload(contentLength, body, { - abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onUploadProgress: options.onProgress - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - tracingOptions: updatedOptions.tracingOptions - })); - }); + getOperationState() { + return this.operation.state; } /** - * Creates a new Block Blob where the contents of the blob are read from a given URL. - * This API is supported beginning with the 2020-04-08 version. Partial updates - * are not supported with Put Blob from URL; the content of an existing blob is overwritten with - * the content of the new blob. To perform partial updates to a block blob’s contents using a - * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. - * - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Optional parameters. + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. */ - async syncUploadFromURL(sourceURL, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, { - ...options, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - copySourceTags: options.copySourceTags, - fileRequestIntent: options.sourceShareTokenIntent, - tracingOptions: updatedOptions.tracingOptions - })); - }); + getResult() { + const state = this.operation.state; + return state.result; } /** - * Uploads the specified block to the block blob's "staging area" to be later - * committed by a call to commitBlockList. - * @see https://learn.microsoft.com/rest/api/storageservices/put-block - * - * @param blockId - A 64-byte value that is base64-encoded - * @param body - Data to upload to the staging area. - * @param contentLength - Number of bytes to upload. - * @param options - Options to the Block Blob Stage Block operation. - * @returns Response data for the Block Blob Stage Block operation. + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. */ - async stageBlock(blockId, body, contentLength, options = {}) { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - requestOptions: { - onUploadProgress: options.onProgress - }, - transactionalContentMD5: options.transactionalContentMD5, - transactionalContentCrc64: options.transactionalContentCrc64, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); + toString() { + return this.operation.toString(); + } + }; + exports2.Poller = Poller; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js +var require_lroEngine = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var operation_js_1 = require_operation3(); + var constants_js_1 = require_constants17(); + var poller_js_1 = require_poller3(); + var operation_js_2 = require_operation(); + var LroEngine = class extends poller_js_1.Poller { + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; + const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve6) => setTimeout(() => resolve6(), this.config.intervalInMs)); + } + }; + exports2.LroEngine = LroEngine; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js +var require_lroEngine2 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var lroEngine_js_1 = require_lroEngine(); + Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { + return lroEngine_js_1.LroEngine; + } }); + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js +var require_pollOperation = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/index.js +var require_commonjs14 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var poller_js_1 = require_poller2(); + Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { + return poller_js_1.createHttpPoller; + } }); + tslib_1.__exportStar(require_lroEngine2(), exports2); + tslib_1.__exportStar(require_poller3(), exports2); + tslib_1.__exportStar(require_pollOperation(), exports2); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js +var require_BlobStartCopyFromUrlPoller = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBeginCopyFromUrlPoller = void 0; + var core_util_1 = require_commonjs4(); + var core_lro_1 = require_commonjs14(); + var BlobBeginCopyFromUrlPoller = class extends core_lro_1.Poller { + intervalInMs; + constructor(options) { + const { blobClient, copySource, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation({ + ...state, + blobClient, + copySource, + startCopyFromURLOptions }); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return (0, core_util_1.delay)(this.intervalInMs); + } + }; + exports2.BlobBeginCopyFromUrlPoller = BlobBeginCopyFromUrlPoller; + var cancel = async function cancel2(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + }; + var update = async function update2(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { + options.fireProgress(state); + } else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); + }; + var toString3 = function toString4() { + return JSON.stringify({ state: this.state }, (key, value) => { + if (key === "blobClient") { + return void 0; + } + return value; + }); + }; + function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: { ...state }, + cancel, + toString: toString3, + update + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/Range.js +var require_Range = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/Range.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeToString = rangeToString; + function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js +var require_Batch = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Batch = void 0; + var events_1 = require("events"); + var BatchStates; + (function(BatchStates2) { + BatchStates2[BatchStates2["Good"] = 0] = "Good"; + BatchStates2[BatchStates2["Error"] = 1] = "Error"; + })(BatchStates || (BatchStates = {})); + var Batch = class { /** - * The Stage Block From URL operation creates a new block to be committed as part - * of a blob where the contents are read from a URL. - * This API is available starting in version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/put-block-from-url - * - * @param blockId - A 64-byte value that is base64-encoded - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Options to the Block Blob Stage Block From URL operation. - * @returns Response data for the Block Blob Stage Block From URL operation. + * Concurrency. Must be lager than 0. + */ + concurrency; + /** + * Number of active operations under execution. + */ + actives = 0; + /** + * Number of completed operations under execution. + */ + completed = 0; + /** + * Offset of next operation to be executed. + */ + offset = 0; + /** + * Operation array to be executed. */ - async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - sourceContentMD5: options.sourceContentMD5, - sourceContentCrc64: options.sourceContentCrc64, - sourceRange: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - fileRequestIntent: options.sourceShareTokenIntent, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } + operations = []; /** - * Writes a blob by specifying the list of block IDs that make up the blob. - * In order to be written as part of a blob, a block must have been successfully written - * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to - * update a blob by uploading only those blocks that have changed, then committing the new and existing - * blocks together. Any blocks not specified in the block list and permanently deleted. - * @see https://learn.microsoft.com/rest/api/storageservices/put-block-list - * - * @param blocks - Array of 64-byte value that is base64-encoded - * @param options - Options to the Block Blob Commit Block List operation. - * @returns Response data for the Block Blob Commit Block List operation. + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. */ - async commitBlockList(blocks, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.commitBlockList({ latest: blocks }, { - abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - tracingOptions: updatedOptions.tracingOptions - })); - }); + state = BatchStates.Good; + /** + * A private emitter used to pass events inside this class. + */ + emitter; + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events_1.EventEmitter(); } /** - * Returns the list of blocks that have been uploaded as part of a block blob - * using the specified block list filter. - * @see https://learn.microsoft.com/rest/api/storageservices/get-block-list + * Add a operation into queue. * - * @param listType - Specifies whether to return the list of committed blocks, - * the list of uncommitted blocks, or both lists together. - * @param options - Options to the Block Blob Get Block List operation. - * @returns Response data for the Block Blob Get Block List operation. + * @param operation - */ - async getBlockList(listType, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { - const res = (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.getBlockList(listType, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } catch (error3) { + this.emitter.emit("error", error3); } - return res; }); } - // High level functions /** - * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. - * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * Start execute operations in the queue. * - * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView - * @param options - */ - async uploadData(data, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { - if (core_util_1.isNodeLike) { - let buffer; - if (data instanceof Buffer) { - buffer = data; - } else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); - } else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); - } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } else { - const browserBlob = new Blob([data]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve6, reject) => { + this.emitter.on("finish", resolve6); + this.emitter.on("error", (error3) => { + this.state = BatchStates.Error; + reject(error3); + }); }); } /** - * ONLY AVAILABLE IN BROWSERS. - * - * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. - * - * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call - * {@link commitBlockList} to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. - * - * @deprecated Use {@link uploadData} instead. + * Get next operation to be executed. Return null when reaching ends. * - * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView - * @param options - Options to upload browser data. - * @returns Response data for the Blob Upload operation. */ - async uploadBrowserData(browserData, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { - const browserBlob = new Blob([browserData]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - }); + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; } /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. * - * Uploads data to block blob. Requires a bodyFactory as the data source, - * which need to return a {@link HttpRequestBody} object with the offset and size provided. - * - * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. - * - * @param bodyFactory - - * @param size - size of the data to upload. - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - let blockSize = options.blockSize ?? 0; - if (blockSize < 0 || blockSize > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + parallelExecute() { + if (this.state === BatchStates.Error) { + return; } - const maxSingleShotSize = options.maxSingleShotSize ?? constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - if (maxSingleShotSize < 0 || maxSingleShotSize > constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; } - if (blockSize === 0) { - if (size > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > maxSingleShotSize) { - blockSize = Math.ceil(size / constants_js_1.BLOCK_BLOB_MAX_BLOCKS); - if (blockSize < constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } else { + return; } } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { - if (size <= maxSingleShotSize) { - return (0, utils_common_js_1.assertResponse)(await this.upload(bodyFactory(0, size), size, updatedOptions)); + } + }; + exports2.Batch = Batch; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js +var require_utils7 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.fsCreateReadStream = exports2.fsStat = void 0; + exports2.streamToBuffer = streamToBuffer; + exports2.streamToBuffer2 = streamToBuffer2; + exports2.streamToBuffer3 = streamToBuffer3; + exports2.readStreamToLocalFile = readStreamToLocalFile; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_fs_1 = tslib_1.__importDefault(require("node:fs")); + var node_util_1 = tslib_1.__importDefault(require("node:util")); + var constants_js_1 = require_constants15(); + async function streamToBuffer(stream2, buffer, offset, end, encoding) { + let pos = 0; + const count = end - offset; + return new Promise((resolve6, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); + stream2.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve6(); + return; } - const numBlocks = Math.floor((size - 1) / blockSize) + 1; - if (numBlocks > constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${constants_js_1.BLOCK_BLOB_MAX_BLOCKS}`); + let chunk = stream2.read(); + if (!chunk) { + return; } - const blockList = []; - const blockIDPrefix = (0, core_util_2.randomUUID)(); - let transferProgress = 0; - const batch = new Batch_js_1.Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, i); - const start = blockSize * i; - const end = i === numBlocks - 1 ? size : start + blockSize; - const contentLength = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += contentLength; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress - }); - } - }); + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; }); - } + stream2.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve6(); + }); + stream2.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); + } + async function streamToBuffer2(stream2, buffer, encoding) { + let pos = 0; + const bufferSize = buffer.length; + return new Promise((resolve6, reject) => { + stream2.on("readable", () => { + let chunk = stream2.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream2.on("end", () => { + resolve6(pos); + }); + stream2.on("error", reject); + }); + } + async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve6, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); + }); + readableStream.on("end", () => { + resolve6(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); + } + async function readStreamToLocalFile(rs, file) { + return new Promise((resolve6, reject) => { + const ws = node_fs_1.default.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve6); + rs.pipe(ws); + }); + } + exports2.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); + exports2.fsCreateReadStream = node_fs_1.default.createReadStream; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/Clients.js +var require_Clients = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/Clients.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobClient = exports2.BlockBlobClient = exports2.AppendBlobClient = exports2.BlobClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_auth_1 = require_commonjs7(); + var core_util_1 = require_commonjs4(); + var core_util_2 = require_commonjs4(); + var BlobDownloadResponse_js_1 = require_BlobDownloadResponse(); + var BlobQueryResponse_js_1 = require_BlobQueryResponse(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var models_js_1 = require_models2(); + var PageBlobRangeResponse_js_1 = require_PageBlobRangeResponse(); + var Pipeline_js_1 = require_Pipeline(); + var BlobStartCopyFromUrlPoller_js_1 = require_BlobStartCopyFromUrlPoller(); + var Range_js_1 = require_Range(); + var StorageClient_js_1 = require_StorageClient(); + var Batch_js_1 = require_Batch(); + var storage_common_1 = require_commonjs13(); + var constants_js_1 = require_constants15(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var utils_js_1 = require_utils7(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var BlobClient = class _BlobClient extends StorageClient_js_1.StorageClient { /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a local file in blocks to a block blob. - * - * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList - * to commit the block list. - * - * @param filePath - Full path of local file - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * blobContext provided by protocol layer. */ - async uploadFile(filePath, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { - const size = (await (0, utils_js_1.fsStat)(filePath)).size; - return this.uploadSeekableInternal((offset, count) => { - return () => (0, utils_js_1.fsCreateReadStream)(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset - }); - }, size, { - ...options, - tracingOptions: updatedOptions.tracingOptions - }); - }); - } + blobContext; + _name; + _containerName; + _versionId; + _snapshot; /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a Node.js Readable stream into block blob. - * - * PERFORMANCE IMPROVEMENT TIPS: - * * Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. - * - * @param stream - Node.js Readable stream - * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB - * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, - * positive correlation with max uploading concurrency. Default value is 5 - * @param options - Options to Upload Stream to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * The name of the blob. */ - async uploadStream(stream2, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { - let blockNum = 0; - const blockIDPrefix = (0, core_util_2.randomUUID)(); - let transferProgress = 0; - const blockList = []; - const scheduler = new storage_common_1.BufferScheduler( - stream2, - bufferSize, - maxConcurrency, - async (body, length) => { - const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body, length, { - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil(maxConcurrency / 4 * 3) - ); - await scheduler.do(); - return (0, utils_common_js_1.assertResponse)(await this.commitBlockList(blockList, { - ...options, - tracingOptions: updatedOptions.tracingOptions - })); - }); + get name() { + return this._name; } - }; - exports2.BlockBlobClient = BlockBlobClient; - var PageBlobClient = class _PageBlobClient extends BlobClient { /** - * pageBlobsContext provided by protocol layer. + * The name of the storage container the blob is associated with. */ - pageBlobContext; + get containerName() { + return this._containerName; + } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + options = options || {}; let pipeline; let url2; - options = options || {}; if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { url2 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; @@ -86422,6 +85808,9 @@ var require_Clients = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url2 = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; @@ -86448,487 +85837,499 @@ var require_Clients = __commonJS({ throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url2, pipeline); - this.pageBlobContext = this.storageClientContext.pageBlob; + ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); + this.blobContext = this.storageClientContext.blob; + this._snapshot = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT); + this._versionId = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID); + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID, versionId.length === 0 ? void 0 : versionId), this.pipeline); } /** - * Creates a new PageBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Creates a AppendBlobClient object. * - * @param snapshot - The snapshot timestamp. - * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { - return new _PageBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Creates a BlockBlobClient object. * - * @param size - size of the page blob. - * @param options - Options to the Page Blob Create operation. - * @returns Response data for the Page Blob Create operation. */ - async create(size, options = {}) { + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```ts snippet:ReadmeSampleDownloadBlob_Node + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody + * const downloadBlockBlobResponse = await blobClient.download(); + * if (downloadBlockBlobResponse.readableStreamBody) { + * const downloaded = await streamToString(downloadBlockBlobResponse.readableStreamBody); + * console.log(`Downloaded blob content: ${downloaded}`); + * } + * + * async function streamToString(stream: NodeJS.ReadableStream): Promise { + * const result = await new Promise>((resolve, reject) => { + * const chunks: Buffer[] = []; + * stream.on("data", (data) => { + * chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)); + * }); + * stream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * stream.on("error", reject); + * }); + * return result.toString(); + * } + * ``` + * + * Example usage (browser): + * + * ```ts snippet:ReadmeSampleDownloadBlob_Browser + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + * const downloadBlockBlobResponse = await blobClient.download(); + * const blobBody = await downloadBlockBlobResponse.blobBody; + * if (blobBody) { + * const downloaded = await blobBody.text(); + * console.log(`Downloaded blob content: ${downloaded}`); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.create(0, size, { + return tracing_js_1.tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.download({ abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, - metadata: options.metadata, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, + requestOptions: { + onDownloadProgress: core_util_1.isNodeLike ? void 0 : options.onProgress + // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); + const wrappedRes = { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; + if (!core_util_1.isNodeLike) { + return wrappedRes; + } + if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { + options.maxRetryRequests = constants_js_1.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === void 0) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse_js_1.BlobDownloadResponse(wrappedRes, async (start) => { + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ + count: offset + res.contentLength - start, + offset: start + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey + }; + return (await this.blobContext.download({ + abortSignal: options.abortSignal, + ...updatedDownloadOptions + })).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress + }); }); } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. If the blob with the same name already exists, the content - * of the existing blob will remain unchanged. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Returns true if the Azure blob resource represented by this client exists; false otherwise. * - * @param size - size of the page blob. - * @param options - + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. */ - async createIfNotExists(size, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + async exists(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { try { - const conditions = { ifNoneMatch: constants_js_1.ETagAny }; - const res = (0, utils_common_js_1.assertResponse)(await this.create(size, { - ...options, - conditions, + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, tracingOptions: updatedOptions.tracingOptions - })); - return { - succeeded: true, - ...res, - _response: res._response - // _response is made non-enumerable - }; + }); + return true; } catch (e) { - if (e.details?.errorCode === "BlobAlreadyExists") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; + if (e.statusCode === 404) { + return false; + } else if (e.statusCode === 409 && (e.details.errorCode === constants_js_1.BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === constants_js_1.BlobDoesNotUseCustomerSpecifiedEncryption)) { + return true; } throw e; } }); } /** - * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. - * @see https://learn.microsoft.com/rest/api/storageservices/put-page + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties * - * @param body - Data to upload - * @param offset - Offset of destination page blob - * @param count - Content length of the body, also number of bytes to be uploaded - * @param options - Options to the Page Blob Upload Pages operation. - * @returns Response data for the Page Blob Upload Pages operation. - */ - async uploadPages(body, offset, count, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPages(count, body, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onUploadProgress: options.onProgress - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - sequenceNumberAccessConditions: options.conditions, - transactionalContentMD5: options.transactionalContentMD5, - transactionalContentCrc64: options.transactionalContentCrc64, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * The Upload Pages operation writes a range of pages to a page blob where the - * contents are read from a URL. - * @see https://learn.microsoft.com/rest/api/storageservices/put-page-from-url + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. * - * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication - * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob - * @param destOffset - Offset of destination page blob - * @param count - Number of bytes to be uploaded from source page blob - * @param options - + * @param options - Optional options to Get Properties operation. */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + async getProperties(options = {}) { options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPagesFromURL(sourceURL, (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), 0, (0, Range_js_1.rangeToString)({ offset: destOffset, count }), { + return tracing_js_1.tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.getProperties({ abortSignal: options.abortSignal, - sourceContentMD5: options.sourceContentMD5, - sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, - sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); + return { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; }); } /** - * Frees the specified pages from the page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/put-page + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * - * @param offset - Starting byte position of the pages to clear. - * @param count - Number of bytes to clear. - * @param options - Options to the Page Blob Clear Pages operation. - * @returns Response data for the Page Blob Clear Pages operation. + * @param options - Optional options to Blob Delete operation. */ - async clearPages(offset = 0, count, options = {}) { + async delete(options = {}) { options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.clearPages(0, { + return tracing_js_1.tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.delete({ abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - sequenceNumberAccessConditions: options.conditions, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Returns the list of valid page ranges for a page blob or snapshot of a page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns Response data for the Page Blob Get Ranges operation. + * @param options - Optional options to Blob Delete operation. */ - async getPageRanges(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - tracingOptions: updatedOptions.tracingOptions - })); - return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); + async deleteIfExists(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + try { + const res = (0, utils_common_js_1.assertResponse)(await this.delete(updatedOptions)); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; + } catch (e) { + if (e.details?.errorCode === "BlobNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } + throw e; + } }); } /** - * getPageRangesSegment returns a single segment of page ranges starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://learn.microsoft.com/rest/api/storageservices/undelete-blob * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to PageBlob Get Page Ranges Segment operation. + * @param options - Optional options to Blob Undelete operation. */ - async listPageRangesSegment(offset = 0, count, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ + async undelete(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.undelete({ abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - marker, - maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get - * items. The marker value is opaque to the client. - * @param options - Options to List Page Ranges operation. - */ - async *listPageRangeItemSegments(offset = 0, count, marker, options = {}) { - let getPageRangeItemSegmentsResponse; - if (!!marker || marker === void 0) { - do { - getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(offset, count, marker, options); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield await getPageRangeItemSegmentsResponse; - } while (marker); - } - } - /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to List Page Ranges operation. - */ - async *listPageRangeItems(offset = 0, count, options = {}) { - let marker; - for await (const getPageRangesSegment of this.listPageRangeItemSegments(offset, count, marker, options)) { - yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); - } - } - /** - * Returns an async iterable iterator to list of page ranges for a page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges - * - * .byPage() returns an async iterable iterator to list of page ranges for a page blob. - * - * ```ts snippet:ClientsListPageBlobs - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const pageBlobClient = containerClient.getPageBlobClient(blobName); - * - * // Example using `for await` syntax - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRanges()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = pageBlobClient.listPageRanges(); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); - * ({ value, done } = await iter.next()); - * } - * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of page.pageRange || []) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } + * Sets system properties on the blob. * - * // Example using paging with a marker - * i = 1; - * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * ``` + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. */ - listPageRanges(offset = 0, count, options = {}) { + async setHTTPHeaders(blobHTTPHeaders, options = {}) { options.conditions = options.conditions || {}; - const iter = this.listPageRangeItems(offset, count, options); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...options - }); - } - }; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Sets user-defined metadata for the specified blob as one or more name-value pairs. * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + async setMetadata(metadata, options = {}) { options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { - const result = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, + metadata, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - prevsnapshot: prevSnapshot, - range: (0, Range_js_1.rangeToString)({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); - return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(result); }); } /** - * getPageRangesDiffSegment returns a single segment of page ranges starting from the - * specified Marker for difference between previous snapshot and the target page blob. - * Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param tags - + * @param options - */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ - abortSignal: options?.abortSignal, - leaseAccessConditions: options?.conditions, + async setTags(tags, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, modifiedAccessConditions: { - ...options?.conditions, - ifTags: options?.conditions?.tagConditions + ...options.conditions, + ifTags: options.conditions?.tagConditions }, - prevsnapshot: prevSnapshotOrUrl, - range: (0, Range_js_1.rangeToString)({ - offset, - count - }), - marker, - maxPageSize: options?.maxPageSize, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, + tags: (0, utils_common_js_1.toBlobTags)(tags) })); }); } /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * Gets the tags associated with the underlying blob. * + * @param options - + */ + async getTags(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + tags: (0, utils_common_js_1.toTags)({ blobTagSet: response.blobTagSet }) || {} + }; + return wrappedResponse; + }); + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get - * items. The marker value is opaque to the client. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. */ - async *listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { - let getPageRangeItemSegmentsResponse; - if (!!marker || marker === void 0) { - do { - getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield await getPageRangeItemSegmentsResponse; - } while (marker); - } + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Creates a read-only snapshot of a blob. + * @see https://learn.microsoft.com/rest/api/storageservices/snapshot-blob * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param options - Optional options to the Blob Create Snapshot operation. */ - async *listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - let marker; - for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)) { - yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); - } + async createSnapshot(options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. * - * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob * - * ```ts snippet:ClientsListPageBlobsDiff + * ```ts snippet:ClientsBeginCopyFromURL * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -86941,843 +86342,759 @@ var require_Clients = __commonJS({ * const containerName = ""; * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const pageBlobClient = containerClient.getPageBlobClient(blobName); + * const blobClient = containerClient.getBlobClient(blobName); * - * const offset = 0; - * const count = 1024; - * const previousSnapshot = ""; - * // Example using `for await` syntax - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot)) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } + * // Example using automatic polling + * const automaticCopyPoller = await blobClient.beginCopyFromURL("url"); + * const automaticResult = await automaticCopyPoller.pollUntilDone(); * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); - * ({ value, done } = await iter.next()); + * // Example using manual polling + * const manualCopyPoller = await blobClient.beginCopyFromURL("url"); + * while (!manualCopyPoller.isDone()) { + * await manualCopyPoller.poll(); * } + * const manualResult = manualCopyPoller.getResult(); * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of pageBlobClient - * .listPageRangesDiff(offset, count, previousSnapshot) - * .byPage({ maxPageSize: 20 })) { - * for (const pageRange of page.pageRange || []) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } + * // Example using progress updates + * const progressUpdatesCopyPoller = await blobClient.beginCopyFromURL("url", { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * }, + * }); + * const progressUpdatesResult = await progressUpdatesCopyPoller.pollUntilDone(); * - * // Example using paging with a marker - * i = 1; - * let iterator = pageBlobClient - * .listPageRangesDiff(offset, count, previousSnapshot) - * .byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = pageBlobClient - * .listPageRangesDiff(offset, count, previousSnapshot) - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Example using a changing polling interval (default 15 seconds) + * const pollingIntervalCopyPoller = await blobClient.beginCopyFromURL("url", { + * intervalInMs: 1000, // poll blob every 1 second for copy progress + * }); + * const pollingIntervalResult = await pollingIntervalCopyPoller.pollUntilDone(); + * + * // Example using copy cancellation: + * const cancelCopyPoller = await blobClient.beginCopyFromURL("url"); + * // cancel operation after starting it. + * try { + * await cancelCopyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * cancelCopyPoller.getResult(); + * } catch (err: any) { + * if (err.name === "PollerCancelledError") { + * console.log("The copy was cancelled."); * } * } * ``` * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - listPageRangesDiff(offset, count, prevSnapshot, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, { - ...options - }); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...options - }); - } + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args) }; + const poller = new BlobStartCopyFromUrlPoller_js_1.BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options + }); + await poller.poll(); + return poller; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://learn.microsoft.com/rest/api/storageservices/abort-copy-blob * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + async abortCopyFromURL(copyId, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.abortCopyFromURL(copyId, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - prevSnapshotUrl, - range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** - * Resizes the page blob to the specified size (which must be a multiple of 512). - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob-from-url * - * @param size - Target size - * @param options - Options to the Page Blob Resize operation. - * @returns Response data for the Page Blob Resize operation. + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - */ - async resize(size, options = {}) { + async syncCopyFromURL(copySource, options = {}) { options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.resize(size, { + options.sourceConditions = options.sourceConditions || {}; + return tracing_js_1.tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.copyFromURL(copySource, { abortSignal: options.abortSignal, + metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Sets a page blob's sequence number. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-tier * - * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. - * @param sequenceNumber - Required if sequenceNumberAction is max or update - * @param options - Options to the Page Blob Update Sequence Number operation. - * @returns Response data for the Page Blob Update Sequence Number operation. + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. */ - async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + async setAccessTier(tier, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTier((0, models_js_1.toAccessTier)(tier), { abortSignal: options.abortSignal, - blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, + rehydratePriority: options.rehydratePriority, tracingOptions: updatedOptions.tracingOptions })); }); } - /** - * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. - * The snapshot is copied such that only the differential changes between the previously - * copied snapshot are transferred to the destination. - * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. - * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://learn.microsoft.com/azure/virtual-machines/windows/incremental-snapshots - * - * @param copySource - Specifies the name of the source page blob snapshot. For example, - * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Options to the Page Blob Copy Incremental operation. - * @returns Response data for the Page Blob Copy Incremental operation. - */ - async startCopyIncremental(copySource, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.copyIncremental(copySource, { - abortSignal: options.abortSignal, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - }; - exports2.PageBlobClient = PageBlobClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js -var require_BatchUtils = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getBodyAsText = getBodyAsText; - exports2.utf8ByteLength = utf8ByteLength; - var utils_js_1 = require_utils7(); - var constants_js_1 = require_constants15(); - async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(constants_js_1.BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await (0, utils_js_1.streamToBuffer2)(batchResponse.readableStreamBody, buffer); - buffer = buffer.slice(0, responseLength); - return buffer.toString(); - } - function utf8ByteLength(str2) { - return Buffer.byteLength(str2); - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js -var require_BatchResponseParser = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BatchResponseParser = void 0; - var core_rest_pipeline_1 = require_commonjs6(); - var core_http_compat_1 = require_commonjs9(); - var constants_js_1 = require_constants15(); - var BatchUtils_js_1 = require_BatchUtils(); - var log_js_1 = require_log5(); - var HTTP_HEADER_DELIMITER = ": "; - var SPACE_DELIMITER = " "; - var NOT_FOUND = -1; - var BatchResponseParser = class { - batchResponse; - responseBatchBoundary; - perResponsePrefix; - batchResponseEnding; - subRequests; - constructor(batchResponse, subRequests) { - if (!batchResponse || !batchResponse.contentType) { - throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; } - if (!subRequests || subRequests.size === 0) { - throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + let blockSize = options.blockSize ?? 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); } - this.batchResponse = batchResponse; - this.subRequests = subRequests; - this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${constants_js_1.HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; - } - // For example of response, please refer to https://learn.microsoft.com/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - if (this.batchResponse._response.status !== constants_js_1.HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + if (blockSize === 0) { + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } - const responseBodyAsText = await (0, BatchUtils_js_1.getBodyAsText)(this.batchResponse); - const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); - const subResponseCount = subResponses.length; - if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = (0, core_http_compat_1.toHttpHeadersLike)((0, core_rest_pipeline_1.createHttpHeaders)()); - const responseLines = subResponse.split(`${constants_js_1.HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - if (responseLine.startsWith(constants_js_1.HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - if (responseLine.startsWith(constants_js_1.HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; - } - if (responseLine.trim() === "") { - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; - } - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === constants_js_1.HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } - } else { - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + if (!count) { + const response = await this.getProperties({ + ...options, + tracingOptions: updatedOptions.tracingOptions + }); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } } - if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; - } else { - log_js_1.logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } catch (error3) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error3.message}`); + } } - if (subRespFailed) { - subResponsesFailedCount++; - } else { - subResponsesSucceededCount++; + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); } - } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount, - subResponsesFailedCount - }; - } - }; - exports2.BatchResponseParser = BatchResponseParser; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js -var require_Mutex = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Mutex = void 0; - var MutexLockStatus; - (function(MutexLockStatus2) { - MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; - MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; - })(MutexLockStatus || (MutexLockStatus = {})); - var Mutex = class { - /** - * Lock for a specific key. If the lock has been acquired by another customer, then - * will wait until getting the lock. - * - * @param key - lock key - */ - static async lock(key) { - return new Promise((resolve6) => { - if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve6(); - } else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve6(); + let transferProgress = 0; + const batch = new Batch_js_1.Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + blockSize) { + batch.addOperation(async () => { + let chunkEnd = offset + count; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions + }); + const stream2 = response.readableStreamBody; + await (0, utils_js_1.streamToBuffer)(stream2, buffer, off - offset, chunkEnd - offset); + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } }); } + await batch.do(); + return buffer; }); } /** - * Unlock a key. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param key - + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. */ - static async unlock(key) { - return new Promise((resolve6) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); + async downloadToFile(filePath, offset = 0, count, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); + if (response.readableStreamBody) { + await (0, utils_js_1.readStreamToLocalFile)(response.readableStreamBody, filePath); } - delete this.keys[key]; - resolve6(); + response.blobDownloadStream = void 0; + return response; }); } - static keys = {}; - static listeners = {}; - static onUnlockEvent(key, handler2) { - if (this.listeners[key] === void 0) { - this.listeners[key] = [handler2]; - } else { - this.listeners[key].push(handler2); - } - } - static emitUnlockEvent(key) { - if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { - const handler2 = this.listeners[key].shift(); - setImmediate(() => { - handler2.call(this); - }); + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } else { + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } catch (error3) { + throw new Error("Unable to extract blobName and containerName with provided information."); } } - }; - exports2.Mutex = Mutex; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js -var require_BlobBatch = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobBatch = void 0; - var core_util_1 = require_commonjs4(); - var core_auth_1 = require_commonjs7(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_2 = require_commonjs4(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var Clients_js_1 = require_Clients(); - var Mutex_js_1 = require_Mutex(); - var Pipeline_js_1 = require_Pipeline(); - var utils_common_js_1 = require_utils_common(); - var core_xml_1 = require_commonjs10(); - var constants_js_1 = require_constants15(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var tracing_js_1 = require_tracing(); - var core_client_1 = require_commonjs8(); - var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); - var BlobBatch = class { - batchRequest; - batch = "batch"; - batchType; - constructor() { - this.batchRequest = new InnerBatchRequest(); - } - /** - * Get the value of Content-Type for a batch request. - * The value must be multipart/mixed with a batch boundary. - * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 - */ - getMultiPartContentType() { - return this.batchRequest.getMultipartContentType(); - } /** - * Get assembled HTTP request body for sub requests. + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - getHttpRequestBody() { - return this.batchRequest.getHttpRequestBody(); + async startCopyFromURL(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return (0, utils_common_js_1.assertResponse)(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions + }, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * Get sub requests that are added into the batch request. + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - getSubRequests() { - return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex_js_1.Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); - } finally { - await Mutex_js_1.Mutex.unlock(this.batch); - } - } - setBatchType(batchType) { - if (!this.batchType) { - this.batchType = batchType; - } - if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); - } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url2; - let credential; - if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrOptions instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrOptions))) { - url2 = urlOrBlobClient; - credential = credentialOrOptions; - } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { - url2 = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - return tracing_js_1.tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url: url2, - credential - }, async () => { - await new Clients_js_1.BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); - }); - } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url2; - let credential; - let tier; - if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrTier instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrTier))) { - url2 = urlOrBlobClient; - credential = credentialOrTier; - tier = tierOrOptions; - } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { - url2 = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier = credentialOrTier; - options = tierOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - return tracing_js_1.tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url: url2, - credential - }, async () => { - await new Clients_js_1.BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); - }); + generateSasUrl(options) { + return new Promise((resolve6) => { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).toString(); + resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } - }; - exports2.BlobBatch = BlobBatch; - var InnerBatchRequest = class { - operationCount; - body; - subRequests; - boundary; - subRequestPrefix; - multipartContentType; - batchRequestEnding; - constructor() { - this.operationCount = 0; - this.body = ""; - const tempGuid = (0, core_util_1.randomUUID)(); - this.boundary = `batch_${tempGuid}`; - this.subRequestPrefix = `--${this.boundary}${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TYPE}: application/http${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; - this.batchRequestEnding = `--${this.boundary}--`; - this.subRequests = /* @__PURE__ */ new Map(); - } /** - * Create pipeline to assemble sub requests. The idea here is to use existing - * credential and serialization/deserialization components, with additional policies to - * filter unnecessary headers, assemble sub requests into request's body - * and intercept request from going to wire. - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - createPipeline(credential) { - const corePipeline = (0, core_rest_pipeline_1.createEmptyPipeline)(); - corePipeline.addPolicy((0, core_client_1.serializationPolicy)({ - stringifyXML: core_xml_1.stringifyXML, - serializerOptions: { - xml: { - xmlCharKey: "#" - } - } - }), { phase: "Serialize" }); - corePipeline.addPolicy(batchHeaderFilterPolicy()); - corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); - if ((0, core_auth_1.isTokenCredential)(credential)) { - corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ - credential, - scopes: constants_js_1.StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } - }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { - corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ - accountName: credential.accountName, - accountKey: credential.accountKey - }), { phase: "Sign" }); + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const pipeline = new Pipeline_js_1.Pipeline([]); - pipeline._credential = credential; - pipeline._corePipeline = corePipeline; - return pipeline; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).stringToSign; } - appendSubRequestToBody(request2) { - this.body += [ - this.subRequestPrefix, - // sub request constant prefix - `${constants_js_1.HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - // sub request's content ID - "", - // empty line after sub request's content ID - `${request2.method.toString()} ${(0, utils_common_js_1.getURLPathAndQuery)(request2.url)} ${constants_js_1.HTTP_VERSION_1_1}${constants_js_1.HTTP_LINE_ENDING}` - // sub request start line with method - ].join(constants_js_1.HTTP_LINE_ENDING); - for (const [name, value] of request2.headers) { - this.body += `${name}: ${value}${constants_js_1.HTTP_LINE_ENDING}`; - } - this.body += constants_js_1.HTTP_LINE_ENDING; + /** + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve6) => { + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + }); } - preAddSubRequest(subRequest) { - if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); - } - const path13 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path13 || path13 === "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); - } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).stringToSign; } - postAddSubRequest(subRequest) { - this.subRequests.set(this.operationCount, subRequest); - this.operationCount++; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions + })); + }); } - // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${constants_js_1.HTTP_LINE_ENDING}`; + /** + * Set immutability policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions + })); + }); } - getMultipartContentType() { - return this.multipartContentType; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions + })); + }); } - getSubRequests() { - return this.subRequests; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); } }; - function batchRequestAssemblePolicy(batchRequest) { - return { - name: "batchRequestAssemblePolicy", - async sendRequest(request2) { - batchRequest.appendSubRequestToBody(request2); - return { - request: request2, - status: 200, - headers: (0, core_rest_pipeline_1.createHttpHeaders)() - }; - } - }; - } - function batchHeaderFilterPolicy() { - return { - name: "batchHeaderFilterPolicy", - async sendRequest(request2, next) { - let xMsHeaderName = ""; - for (const [name] of request2.headers) { - if ((0, utils_common_js_1.iEqual)(name, constants_js_1.HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = name; - } - } - if (xMsHeaderName !== "") { - request2.headers.delete(xMsHeaderName); - } - return next(request2); - } - }; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js -var require_BlobBatchClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobBatchClient = void 0; - var BatchResponseParser_js_1 = require_BatchResponseParser(); - var BatchUtils_js_1 = require_BatchUtils(); - var BlobBatch_js_1 = require_BlobBatch(); - var tracing_js_1 = require_tracing(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var StorageContextClient_js_1 = require_StorageContextClient(); - var Pipeline_js_1 = require_Pipeline(); - var utils_common_js_1 = require_utils_common(); - var BlobBatchClient = class { - serviceOrContainerContext; - constructor(url2, credentialOrPipeline, options) { + exports2.BlobClient = BlobClient; + var AppendBlobClient = class _AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + appendBlobContext; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (!credentialOrPipeline) { + let url2; + options = options || {}; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } } else { - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); - } - const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path13 = (0, utils_common_js_1.getURLPath)(url2); - if (path13 && path13 !== "/") { - this.serviceOrContainerContext = storageClientContext.container; - } else { - this.serviceOrContainerContext = storageClientContext.service; + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } + super(url2, pipeline); + this.appendBlobContext = this.storageClientContext.appendBlob; } /** - * Creates a {@link BlobBatch}. - * A BlobBatch represents an aggregated set of operations on blobs. + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - createBatch() { - return new BlobBatch_js_1.BlobBatch(); - } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch_js_1.BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } - } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch_js_1.BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); - } - } - return this.submitBatch(batch); + withSnapshot(snapshot) { + return new _AppendBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** - * Submit batch request which consists of multiple subrequests. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. * - * Get `blobBatchClient` and other details before running the snippets. - * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * * Example usage: * - * ```ts snippet:BlobBatchClientSubmitBatch + * ```ts snippet:ClientsCreateAppendBlob + * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; - * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; * * const account = ""; - * const credential = new DefaultAzureCredential(); * const blobServiceClient = new BlobServiceClient( * `https://${account}.blob.core.windows.net`, - * credential, + * new DefaultAzureCredential(), * ); * * const containerName = ""; + * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobBatchClient = containerClient.getBlobBatchClient(); * - * const batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob("", credential); - * await batchRequest.deleteBlob("", credential, { - * deleteSnapshots: "include", - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); + * const appendBlobClient = containerClient.getAppendBlobClient(blobName); + * await appendBlobClient.create(); * ``` + */ + async create(options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * - * Example using a lease: + * @param options - + */ + async createIfNotExists(options = {}) { + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + try { + const res = (0, utils_common_js_1.assertResponse)(await this.create({ + ...updatedOptions, + conditions + })); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; + } catch (e) { + if (e.details?.errorCode === "BlobAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } + throw e; + } + }); + } + /** + * Seals the append blob, making it read only. * - * ```ts snippet:BlobBatchClientSubmitBatchWithLease + * @param options - + */ + async seal(options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://learn.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```ts snippet:ClientsAppendBlock + * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; - * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; * * const account = ""; - * const credential = new DefaultAzureCredential(); * const blobServiceClient = new BlobServiceClient( * `https://${account}.blob.core.windows.net`, - * credential, + * new DefaultAzureCredential(), * ); * * const containerName = ""; + * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobBatchClient = containerClient.getBlobBatchClient(); - * const blobClient = containerClient.getBlobClient(""); * - * const batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blobClient, "Cool"); - * await batchRequest.setBlobAccessTier(blobClient, "Cool", { - * conditions: { leaseId: "" }, - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` + * const content = "Hello World!"; * - * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(blobName); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); * - * @param batchRequest - A set of Delete or SetTier operations. + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(blobName); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onUploadProgress: options.onProgress + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://learn.microsoft.com/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block * @param options - */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size === 0) { - throw new RangeError("Batch request should contain one or more sub requests."); - } - return tracing_js_1.tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { - const batchRequestBody = batchRequest.getHttpRequestBody(); - const rawBatchResponse = (0, utils_common_js_1.assertResponse)(await this.serviceOrContainerContext.submitBatch((0, BatchUtils_js_1.utf8ByteLength)(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, { - ...updatedOptions + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince + }, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions })); - const batchResponseParser = new BatchResponseParser_js_1.BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount - }; - return res; }); } }; - exports2.BlobBatchClient = BlobBatchClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js -var require_ContainerClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ContainerClient = void 0; - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var core_auth_1 = require_commonjs7(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var Pipeline_js_1 = require_Pipeline(); - var StorageClient_js_1 = require_StorageClient(); - var tracing_js_1 = require_tracing(); - var utils_common_js_1 = require_utils_common(); - var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); - var BlobLeaseClient_js_1 = require_BlobLeaseClient(); - var Clients_js_1 = require_Clients(); - var BlobBatchClient_js_1 = require_BlobBatchClient(); - var ContainerClient = class extends StorageClient_js_1.StorageClient { + exports2.AppendBlobClient = AppendBlobClient; + var BlockBlobClient = class _BlockBlobClient extends BlobClient { /** - * containerContext provided by protocol layer. + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. */ - containerContext; - _containerName; + _blobContext; /** - * The name of the container. + * blockBlobContext provided by protocol layer. */ - get containerName() { - return this._containerName; - } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { + blockBlobContext; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; let url2; options = options || {}; @@ -87786,17 +87103,22 @@ var require_ContainerClient = __commonJS({ pipeline = credentialOrPipelineOrContainerName; } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { url2 = urlOrConnectionString; + options = blobNameOrOptions; pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url2 = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (core_util_1.isNodeLike) { const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)); + url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } @@ -87805,30 +87127,37 @@ var require_ContainerClient = __commonJS({ throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; + url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { - throw new Error("Expecting non-empty strings for containerName parameter"); + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url2, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = this.storageClientContext.container; + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; } /** - * Creates a new container under the specified account. If the container with - * the same name already exists, the operation fails. - * @see https://learn.microsoft.com/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. * - * @param options - Options to Container Create operation. + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new _BlockBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. * + * Quick query for a JSON or CSV formatted blob. * - * Example usage: + * Example usage (Node.js): * - * ```ts snippet:ContainerClientCreate + * ```ts snippet:ClientsQuery * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -87839,94 +87168,83 @@ var require_ContainerClient = __commonJS({ * ); * * const containerName = ""; + * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const createContainerResponse = await containerClient.create(); - * console.log("Container was created successfully", createContainerResponse.requestId); - * ``` - */ - async create(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.create(updatedOptions)); - }); - } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, it is not changed. - * @see https://learn.microsoft.com/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * @param options - - */ - async createIfNotExists(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { - try { - const res = await this.create(updatedOptions); - return { - succeeded: true, - ...res, - _response: res._response - // _response is made non-enumerable - }; - } catch (e) { - if (e.details?.errorCode === "ContainerAlreadyExists") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; - } else { - throw e; - } - } - }); - } - /** - * Returns true if the Azure container resource represented by this client exists; false otherwise. + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select from BlobStorage"); + * if (queryBlockBlobResponse.readableStreamBody) { + * const downloadedBuffer = await streamToBuffer(queryBlockBlobResponse.readableStreamBody); + * const downloaded = downloadedBuffer.toString(); + * console.log(`Query blob content: ${downloaded}`); + * } * - * NOTE: use this function with care since an existing container might be deleted by other clients or - * applications. Vice versa new containers with the same name might be added by other clients or - * applications after this function completes. + * async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise { + * return new Promise((resolve, reject) => { + * const chunks: Buffer[] = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` * + * @param query - * @param options - */ - async exists(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - return false; - } - throw e; - } + async query(query, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + if (!core_util_1.isNodeLike) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.inputTextConfiguration), + outputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.outputTextConfiguration) + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions + })); + return new BlobQueryResponse_js_1.BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError + }); }); } /** - * Creates a {@link BlobClient} - * - * @param blobName - A blob name - * @returns A new BlobClient object for the given blob name. - */ - getBlobClient(blobName) { - return new Clients_js_1.BlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); - } - /** - * Creates an {@link AppendBlobClient} + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. * - * @param blobName - An append blob name - */ - getAppendBlobClient(blobName) { - return new Clients_js_1.AppendBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); - } - /** - * Creates a {@link BlockBlobClient} + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. * - * @param blobName - A block blob name + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. * * Example usage: * @@ -87949,79 +87267,557 @@ var require_ContainerClient = __commonJS({ * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - getBlockBlobClient(blobName) { - return new Clients_js_1.BlockBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + async upload(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onUploadProgress: options.onProgress + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, { + ...options, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://learn.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { + onUploadProgress: options.onProgress + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://learn.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://learn.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + }); + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (core_util_1.isNodeLike) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + }); + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + let blockSize = options.blockSize ?? 0; + if (blockSize < 0 || blockSize > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + const maxSingleShotSize = options.maxSingleShotSize ?? constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (blockSize === 0) { + if (size > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / constants_js_1.BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return (0, utils_common_js_1.assertResponse)(await this.upload(bodyFactory(0, size), size, updatedOptions)); + } + const numBlocks = Math.floor((size - 1) / blockSize) + 1; + if (numBlocks > constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${constants_js_1.BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = (0, core_util_2.randomUUID)(); + let transferProgress = 0; + const batch = new Batch_js_1.Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, i); + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + }); } /** - * Creates a {@link PageBlobClient} + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param blobName - A page blob name + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - getPageBlobClient(blobName) { - return new Clients_js_1.PageBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + async uploadFile(filePath, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await (0, utils_js_1.fsStat)(filePath)).size; + return this.uploadSeekableInternal((offset, count) => { + return () => (0, utils_js_1.fsCreateReadStream)(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset + }); + }, size, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); + }); } /** - * Returns all user-defined metadata and system properties for the specified - * container. The data returned does not include the container's list of blobs. - * @see https://learn.microsoft.com/rest/api/storageservices/get-container-properties + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which - * will retain their original casing. + * Uploads a Node.js Readable stream into block blob. * - * @param options - Options to Container Get Properties operation. + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - async getProperties(options = {}) { + async uploadStream(stream2, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } if (!options.conditions) { options.conditions = {}; } - return tracing_js_1.tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.getProperties({ - abortSignal: options.abortSignal, - ...options.conditions, + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + let blockNum = 0; + const blockIDPrefix = (0, core_util_2.randomUUID)(); + let transferProgress = 0; + const blockList = []; + const scheduler = new storage_common_1.BufferScheduler( + stream2, + bufferSize, + maxConcurrency, + async (body, length) => { + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil(maxConcurrency / 4 * 3) + ); + await scheduler.do(); + return (0, utils_common_js_1.assertResponse)(await this.commitBlockList(blockList, { + ...options, tracingOptions: updatedOptions.tracingOptions })); }); } + }; + exports2.BlockBlobClient = BlockBlobClient; + var PageBlobClient = class _PageBlobClient extends BlobClient { /** - * Marks the specified container for deletion. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-container - * - * @param options - Options to Container Delete operation. + * pageBlobsContext provided by protocol layer. */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; + pageBlobContext; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url2; + options = options || {}; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - return tracing_js_1.tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.delete({ + super(url2, pipeline); + this.pageBlobContext = this.storageClientContext.pageBlob; + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new _PageBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.create(0, size, { abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, - modifiedAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Marks the specified container for deletion if it exists. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-container + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * - * @param options - Options to Container Delete operation. + * @param size - size of the page blob. + * @param options - */ - async deleteIfExists(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + async createIfNotExists(size, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { try { - const res = await this.delete(updatedOptions); + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + const res = (0, utils_common_js_1.assertResponse)(await this.create(size, { + ...options, + conditions, + tracingOptions: updatedOptions.tracingOptions + })); return { succeeded: true, ...res, _response: res._response + // _response is made non-enumerable }; } catch (e) { - if (e.details?.errorCode === "ContainerNotFound") { + if (e.details?.errorCode === "BlobAlreadyExists") { return { succeeded: false, ...e.response?.parsedHeaders, @@ -88033,318 +87829,203 @@ var require_ContainerClient = __commonJS({ }); } /** - * Sets one or more user-defined name-value pairs for the specified container. - * - * If no option provided, or no metadata defined in the parameter, the container - * metadata will be removed. - * - * @see https://learn.microsoft.com/rest/api/storageservices/set-container-metadata + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://learn.microsoft.com/rest/api/storageservices/put-page * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Options to Container Set Metadata operation. + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. */ - async setMetadata(metadata, options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); - } - return tracing_js_1.tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.setMetadata({ + async uploadPages(body, offset, count, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPages(count, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata, - modifiedAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onUploadProgress: options.onProgress + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Gets the permissions for the specified container. The permissions indicate - * whether container data may be accessed publicly. - * - * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. - * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". - * - * @see https://learn.microsoft.com/rest/api/storageservices/get-container-acl + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://learn.microsoft.com/rest/api/storageservices/put-page-from-url * - * @param options - Options to Container Get Access Policy operation. + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccessPolicy({ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPagesFromURL(sourceURL, (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), 0, (0, Range_js_1.rangeToString)({ offset: destOffset, count }), { abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version - }; - for (const identifier of response) { - let accessPolicy = void 0; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); - } - } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id - }); - } - return res; }); } /** - * Sets the permissions for the specified container. The permissions indicate - * whether blobs in a container may be accessed publicly. - * - * When you set permissions for a container, the existing permissions are replaced. - * If no access or containerAcl provided, the existing container ACL will be - * removed. - * - * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. - * During this interval, a shared access signature that is associated with the stored access policy will - * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://learn.microsoft.com/rest/api/storageservices/set-container-acl + * Frees the specified pages from the page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/put-page * - * @param access - The level of public access to data in the container. - * @param containerAcl - Array of elements each having a unique Id and details of the access policy. - * @param options - Options to Container Set Access Policy operation. + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. */ - async setAccessPolicy(access, containerAcl, options = {}) { + async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { - const acl = []; - for (const identifier of containerAcl || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.expiresOn) : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.startsOn) : "" - }, - id: identifier.id - }); - } - return (0, utils_common_js_1.assertResponse)(await this.containerContext.setAccessPolicy({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.clearPages(0, { abortSignal: options.abortSignal, - access, - containerAcl: acl, leaseAccessConditions: options.conditions, - modifiedAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Get a {@link BlobLeaseClient} that manages leases on the container. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the container. - */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); - } - /** - * Creates a new block blob, or updates the content of an existing block blob. - * - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, - * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better - * performance with concurrency uploading. - * - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob - * - * @param blobName - Name of the block blob to create or update. - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to configure the Block Blob Upload operation. - * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. - */ - async uploadBlockBlob(blobName, body, contentLength, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body, contentLength, updatedOptions); - return { - blockBlobClient, - response - }; - }); - } - /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob - * - * @param blobName - - * @param options - Options to Blob Delete operation. - * @returns Block blob deletion response data. - */ - async deleteBlob(blobName, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); - } - return blobClient.delete(updatedOptions); - }); - } - /** - * listBlobFlatSegment returns a single segment of blobs starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Flat Segment operation. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. */ - async listBlobFlatSegment(marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobFlatSegment({ - marker, - ...options, + async getPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = { - ...response, - _response: { - ...response._response, - parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobFlat)(response._response.parsedBody) - }, - // _response is made non-enumerable - segment: { - ...response.segment, - blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = { - ...blobItemInternal, - name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), - tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) - }; - return blobItem; - }) - } - }; - return wrappedResponse; + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** - * listBlobHierarchySegment returns a single segment of blobs starting from - * the specified Marker. Use an empty Marker to start enumeration from the - * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param delimiter - The character or string used to define the virtual hierarchy + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Hierarchy Segment operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - async listBlobHierarchySegment(delimiter, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobHierarchySegment(delimiter, { + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), marker, - ...options, + maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = { - ...response, - _response: { - ...response._response, - parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobHierarchy)(response._response.parsedBody) - }, - // _response is made non-enumerable - segment: { - ...response.segment, - blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = { - ...blobItemInternal, - name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), - tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) - }; - return blobItem; - }), - blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => { - const blobPrefix = { - ...blobPrefixInternal, - name: (0, utils_common_js_1.BlobNameToString)(blobPrefixInternal.name) - }; - return blobPrefix; - }) - } - }; - return wrappedResponse; }); } /** - * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The + * the get of page ranges to be returned with the next getting operation. The * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * @param options - Options to List Page Ranges operation. */ - async *listSegments(marker, options = {}) { - let listBlobsFlatSegmentResponse; + async *listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; if (!!marker || marker === void 0) { do { - listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options); - marker = listBlobsFlatSegmentResponse.continuationToken; - yield await listBlobsFlatSegmentResponse; + getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(offset, count, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; } while (marker); } } /** - * Returns an AsyncIterableIterator of {@link BlobItem} objects + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * - * @param options - Options to list blobs operation. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. */ - async *listItems(options = {}) { + async *listPageRangeItems(offset = 0, count, options = {}) { let marker; - for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) { - yield* listBlobsFlatSegmentResponse.segment.blobItems; + for await (const getPageRangesSegment of this.listPageRangeItemSegments(offset, count, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); } } /** - * Returns an async iterable iterator to list all the blobs - * under the specified account. + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * .byPage() returns an async iterable iterator to list the blobs in pages. + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * - * ```ts snippet:ReadmeSampleListBlobs_Multiple + * ```ts snippet:ClientsListPageBlobs * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -88355,98 +88036,64 @@ var require_ContainerClient = __commonJS({ * ); * * const containerName = ""; + * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); * * // Example using `for await` syntax * let i = 1; - * const blobs = containerClient.listBlobsFlat(); - * for await (const blob of blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * * // Example using `iter.next()` syntax * i = 1; - * const iter = containerClient.listBlobsFlat(); + * const iter = pageBlobClient.listPageRanges(); * let { value, done } = await iter.next(); * while (!done) { - * console.log(`Blob ${i++}: ${value.name}`); + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); * ({ value, done } = await iter.next()); * } * * // Example using `byPage()` syntax * i = 1; - * for await (const page of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of page.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const page of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * * // Example using paging with a marker * i = 1; - * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints 2 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken - * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * // Prints 10 blob names - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints 10 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * - * @param options - Options to list blobs. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. * @returns An asyncIterableIterator that supports paging. */ - listBlobsFlat(options = {}) { - const include = []; - if (options.includeCopy) { - include.push("copy"); - } - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSnapshots) { - include.push("snapshots"); - } - if (options.includeVersions) { - include.push("versions"); - } - if (options.includeUncommitedBlobs) { - include.push("uncommittedblobs"); - } - if (options.includeTags) { - include.push("tags"); - } - if (options.includeDeletedWithVersions) { - include.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = void 0; - } - const updatedOptions = { - ...options, - ...include.length > 0 ? { include } : {} - }; - const iter = this.listItems(updatedOptions); + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeItems(offset, count, options); return { /** * The next method, part of the iteration protocol @@ -88464,313 +88111,121 @@ var require_ContainerClient = __commonJS({ * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, { maxPageSize: settings.maxPageSize, - ...updatedOptions + ...options }); } }; } /** - * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. - */ - async *listHierarchySegments(delimiter, marker, options = {}) { - let listBlobsHierarchySegmentResponse; - if (!!marker || marker === void 0) { - do { - listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(delimiter, marker, options); - marker = listBlobsHierarchySegmentResponse.continuationToken; - yield await listBlobsHierarchySegmentResponse; - } while (marker); - } - } - /** - * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. - */ - async *listItemsByHierarchy(delimiter, options = {}) { - let marker; - for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(delimiter, marker, options)) { - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix of segment.blobPrefixes) { - yield { - kind: "prefix", - ...prefix - }; - } - } - for (const blob of segment.blobItems) { - yield { kind: "blob", ...blob }; - } - } - } - /** - * Returns an async iterable iterator to list all the blobs by hierarchy. - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. - * - * ```ts snippet:ReadmeSampleListBlobsByHierarchy - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * - * // Example using `for await` syntax - * let i = 1; - * const blobs = containerClient.listBlobsByHierarchy("/"); - * for await (const blob of blobs) { - * if (blob.kind === "prefix") { - * console.log(`\tBlobPrefix: ${blob.name}`); - * } else { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = containerClient.listBlobsByHierarchy("/"); - * let { value, done } = await iter.next(); - * while (!done) { - * if (value.kind === "prefix") { - * console.log(`\tBlobPrefix: ${value.name}`); - * } else { - * console.log(`\tBlobItem: name - ${value.name}`); - * } - * ({ value, done } = await iter.next()); - * } - * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 20 })) { - * const segment = page.segment; - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * for (const blob of page.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * - * // Example using paging with a marker - * i = 1; - * let iterator = containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.blobPrefixes) { - * for (const prefix of response.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = containerClient - * .listBlobsByHierarchy("/") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 blob names - * if (response.blobPrefixes) { - * for (const prefix of response.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - listBlobsByHierarchy(delimiter, options = {}) { - if (delimiter === "") { - throw new RangeError("delimiter should contain one or more characters"); - } - const include = []; - if (options.includeCopy) { - include.push("copy"); - } - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSnapshots) { - include.push("snapshots"); - } - if (options.includeVersions) { - include.push("versions"); - } - if (options.includeUncommitedBlobs) { - include.push("uncommittedblobs"); - } - if (options.includeTags) { - include.push("tags"); - } - if (options.includeDeletedWithVersions) { - include.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = void 0; - } - const updatedOptions = { - ...options, - ...include.length > 0 ? { include } : {} - }; - const iter = this.listItemsByHierarchy(delimiter, updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - async next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...updatedOptions - }); - } - }; + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + const result = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + prevsnapshot: prevSnapshot, + range: (0, Range_js_1.rangeToString)({ offset, count }), + tracingOptions: updatedOptions.tracingOptions + })); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(result); + }); } /** - * The Filter Blobs operation enables callers to list blobs in the container whose tags - * match a given search expression. + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.filterBlobs({ - abortSignal: options.abortSignal, - where: tagFilterSqlExpression, + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options?.abortSignal, + leaseAccessConditions: options?.conditions, + modifiedAccessConditions: { + ...options?.conditions, + ifTags: options?.conditions?.tagConditions + }, + prevsnapshot: prevSnapshotOrUrl, + range: (0, Range_js_1.rangeToString)({ + offset, + count + }), marker, - maxPageSize: options.maxPageSize, + maxPageSize: options?.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = { - ...response, - _response: response._response, - // _response is made non-enumerable - blobs: response.blobs.map((blob) => { - let tagValue = ""; - if (blob.tags?.blobTagSet.length === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; - }) - }; - return wrappedResponse; }); } /** - * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - let response; + async *listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + let getPageRangeItemSegmentsResponse; if (!!marker || marker === void 0) { do { - response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield response; + getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; } while (marker); } } /** - * Returns an AsyncIterableIterator for blobs. + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + async *listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { let marker; - for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { - yield* segment.blobs; + for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); } } /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified container. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * Example using `for await` syntax: + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * - * ```ts snippet:ReadmeSampleFindBlobsByTags + * ```ts snippet:ClientsListPageBlobsDiff * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -88781,69 +88236,76 @@ var require_ContainerClient = __commonJS({ * ); * * const containerName = ""; + * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); * + * const offset = 0; + * const count = 1024; + * const previousSnapshot = ""; * // Example using `for await` syntax * let i = 1; - * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const pageRange of pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot)) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * * // Example using `iter.next()` syntax * i = 1; - * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * const iter = pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot); * let { value, done } = await iter.next(); * while (!done) { - * console.log(`Blob ${i++}: ${value.name}`); + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); * ({ value, done } = await iter.next()); * } * * // Example using `byPage()` syntax * i = 1; - * for await (const page of containerClient - * .findBlobsByTags("tagkey='tagvalue'") + * for await (const page of pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) * .byPage({ maxPageSize: 20 })) { - * for (const blob of page.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for (const pageRange of page.pageRange || []) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * * // Example using paging with a marker * i = 1; - * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints 2 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken - * iterator = containerClient - * .findBlobsByTags("tagkey='tagvalue'") + * iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * // Prints 10 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints 10 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = { + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, { ...options - }; - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + }); return { /** * The next method, part of the iteration protocol @@ -88861,669 +88323,809 @@ var require_ContainerClient = __commonJS({ * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, { maxPageSize: settings.maxPageSize, - ...listSegmentOptions + ...options }); } }; } /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getAccountInfo(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccountInfo({ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + prevSnapshotUrl, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } - getContainerNameFromUrl() { - let containerName; - try { - const parsedUrl = new URL(this.url); - if (parsedUrl.hostname.split(".")[1] === "blob") { - containerName = parsedUrl.pathname.split("/")[1]; - } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { - containerName = parsedUrl.pathname.split("/")[2]; - } else { - containerName = parsedUrl.pathname.split("/")[1]; - } - containerName = decodeURIComponent(containerName); - if (!containerName) { - throw new Error("Provided containerName is invalid."); - } - return containerName; - } catch (error3) { - throw new Error("Unable to extract containerName with provided information."); - } - } /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. */ - generateSasUrl(options) { - return new Promise((resolve6) => { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - ...options - }, this.credential).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + async resize(size, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI - * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - ...options - }, this.credential).stringToSign; - } - /** - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the input user delegation key. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * Sets a page blob's sequence number. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. */ - generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve6) => { - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - ...options - }, userDelegationKey, this.accountName).toString(); - resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** - * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI - * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasStringToSign(options, userDelegationKey) { - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - ...options - }, userDelegationKey, this.accountName).stringToSign; - } - /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://learn.microsoft.com/azure/virtual-machines/windows/incremental-snapshots * - * @returns A new BlobBatchClient object for this container. + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. */ - getBlobBatchClient() { - return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); + async startCopyIncremental(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); } }; - exports2.ContainerClient = ContainerClient; + exports2.PageBlobClient = PageBlobClient; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js -var require_AccountSASPermissions = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js +var require_BatchUtils = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AccountSASPermissions = void 0; - var AccountSASPermissions = class _AccountSASPermissions { - /** - * Parse initializes the AccountSASPermissions fields from a string. - * - * @param permissions - - */ - static parse(permissions) { - const accountSASPermissions = new _AccountSASPermissions(); - for (const c of permissions) { - switch (c) { - case "r": - accountSASPermissions.read = true; - break; - case "w": - accountSASPermissions.write = true; - break; - case "d": - accountSASPermissions.delete = true; - break; - case "x": - accountSASPermissions.deleteVersion = true; - break; - case "l": - accountSASPermissions.list = true; - break; - case "a": - accountSASPermissions.add = true; - break; - case "c": - accountSASPermissions.create = true; - break; - case "u": - accountSASPermissions.update = true; - break; - case "p": - accountSASPermissions.process = true; - break; - case "t": - accountSASPermissions.tag = true; - break; - case "f": - accountSASPermissions.filter = true; - break; - case "i": - accountSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - accountSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission character: ${c}`); - } - } - return accountSASPermissions; - } - /** - * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - - */ - static from(permissionLike) { - const accountSASPermissions = new _AccountSASPermissions(); - if (permissionLike.read) { - accountSASPermissions.read = true; - } - if (permissionLike.write) { - accountSASPermissions.write = true; - } - if (permissionLike.delete) { - accountSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - accountSASPermissions.deleteVersion = true; - } - if (permissionLike.filter) { - accountSASPermissions.filter = true; - } - if (permissionLike.tag) { - accountSASPermissions.tag = true; - } - if (permissionLike.list) { - accountSASPermissions.list = true; - } - if (permissionLike.add) { - accountSASPermissions.add = true; - } - if (permissionLike.create) { - accountSASPermissions.create = true; - } - if (permissionLike.update) { - accountSASPermissions.update = true; - } - if (permissionLike.process) { - accountSASPermissions.process = true; - } - if (permissionLike.setImmutabilityPolicy) { - accountSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - accountSASPermissions.permanentDelete = true; - } - return accountSASPermissions; - } - /** - * Permission to read resources and list queues and tables granted. - */ - read = false; - /** - * Permission to write resources granted. - */ - write = false; - /** - * Permission to delete blobs and files granted. - */ - delete = false; - /** - * Permission to delete versions granted. - */ - deleteVersion = false; - /** - * Permission to list blob containers, blobs, shares, directories, and files granted. - */ - list = false; - /** - * Permission to add messages, table entities, and append to blobs granted. - */ - add = false; - /** - * Permission to create blobs and files granted. - */ - create = false; - /** - * Permissions to update messages and table entities granted. - */ - update = false; - /** - * Permission to get and delete messages granted. - */ - process = false; - /** - * Specfies Tag access granted. - */ - tag = false; - /** - * Permission to filter blobs. - */ - filter = false; - /** - * Permission to set immutability policy. - */ - setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - permanentDelete = false; - /** - * Produces the SAS permissions string for an Azure Storage account. - * Call this method to set AccountSASSignatureValues Permissions field. - * - * Using this method will guarantee the resource types are in - * an order accepted by the service. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas - * - */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.filter) { - permissions.push("f"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.list) { - permissions.push("l"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); + exports2.getBodyAsText = getBodyAsText; + exports2.utf8ByteLength = utf8ByteLength; + var utils_js_1 = require_utils7(); + var constants_js_1 = require_constants15(); + async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(constants_js_1.BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await (0, utils_js_1.streamToBuffer2)(batchResponse.readableStreamBody, buffer); + buffer = buffer.slice(0, responseLength); + return buffer.toString(); + } + function utf8ByteLength(str2) { + return Buffer.byteLength(str2); + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js +var require_BatchResponseParser = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BatchResponseParser = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_http_compat_1 = require_commonjs9(); + var constants_js_1 = require_constants15(); + var BatchUtils_js_1 = require_BatchUtils(); + var log_js_1 = require_log5(); + var HTTP_HEADER_DELIMITER = ": "; + var SPACE_DELIMITER = " "; + var NOT_FOUND = -1; + var BatchResponseParser = class { + batchResponse; + responseBatchBoundary; + perResponsePrefix; + batchResponseEnding; + subRequests; + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); } - if (this.update) { - permissions.push("u"); + if (!subRequests || subRequests.size === 0) { + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); } - if (this.process) { - permissions.push("p"); + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${constants_js_1.HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://learn.microsoft.com/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + if (this.batchResponse._response.status !== constants_js_1.HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); } - if (this.setImmutabilityPolicy) { - permissions.push("i"); + const responseBodyAsText = await (0, BatchUtils_js_1.getBodyAsText)(this.batchResponse); + const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); + const subResponseCount = subResponses.length; + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); } - if (this.permanentDelete) { - permissions.push("y"); + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = (0, core_http_compat_1.toHttpHeadersLike)((0, core_rest_pipeline_1.createHttpHeaders)()); + const responseLines = subResponse.split(`${constants_js_1.HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + if (responseLine.startsWith(constants_js_1.HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + if (responseLine.startsWith(constants_js_1.HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; + } + if (responseLine.trim() === "") { + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; + } + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === constants_js_1.HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } else { + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } + if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } else { + log_js_1.logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } else { + subResponsesSucceededCount++; + } } - return permissions.join(""); + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount, + subResponsesFailedCount + }; } }; - exports2.AccountSASPermissions = AccountSASPermissions; + exports2.BatchResponseParser = BatchResponseParser; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js -var require_AccountSASResourceTypes = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js +var require_Mutex = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AccountSASResourceTypes = void 0; - var AccountSASResourceTypes = class _AccountSASResourceTypes { + exports2.Mutex = void 0; + var MutexLockStatus; + (function(MutexLockStatus2) { + MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; + })(MutexLockStatus || (MutexLockStatus = {})); + var Mutex = class { /** - * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an - * Error if it encounters a character that does not correspond to a valid resource type. + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. * - * @param resourceTypes - + * @param key - lock key */ - static parse(resourceTypes) { - const accountSASResourceTypes = new _AccountSASResourceTypes(); - for (const c of resourceTypes) { - switch (c) { - case "s": - accountSASResourceTypes.service = true; - break; - case "c": - accountSASResourceTypes.container = true; - break; - case "o": - accountSASResourceTypes.object = true; - break; - default: - throw new RangeError(`Invalid resource type: ${c}`); + static async lock(key) { + return new Promise((resolve6) => { + if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve6(); + } else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve6(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve6) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); } + delete this.keys[key]; + resolve6(); + }); + } + static keys = {}; + static listeners = {}; + static onUnlockEvent(key, handler2) { + if (this.listeners[key] === void 0) { + this.listeners[key] = [handler2]; + } else { + this.listeners[key].push(handler2); } - return accountSASResourceTypes; + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { + const handler2 = this.listeners[key].shift(); + setImmediate(() => { + handler2.call(this); + }); + } + } + }; + exports2.Mutex = Mutex; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js +var require_BlobBatch = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatch = void 0; + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_2 = require_commonjs4(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var Clients_js_1 = require_Clients(); + var Mutex_js_1 = require_Mutex(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var core_xml_1 = require_commonjs10(); + var constants_js_1 = require_constants15(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var tracing_js_1 = require_tracing(); + var core_client_1 = require_commonjs8(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); + var BlobBatch = class { + batchRequest; + batch = "batch"; + batchType; + constructor() { + this.batchRequest = new InnerBatchRequest(); } /** - * Permission to access service level APIs granted. + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ - service = false; + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } /** - * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + * Get assembled HTTP request body for sub requests. */ - container = false; + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } /** - * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + * Get sub requests that are added into the batch request. */ - object = false; + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex_js_1.Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } finally { + await Mutex_js_1.Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url2; + let credential; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrOptions instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrOptions))) { + url2 = urlOrBlobClient; + credential = credentialOrOptions; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url2 = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracing_js_1.tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url2, + credential + }, async () => { + await new Clients_js_1.BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + }); + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url2; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrTier instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrTier))) { + url2 = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url2 = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracing_js_1.tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url2, + credential + }, async () => { + await new Clients_js_1.BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + }); + } + }; + exports2.BlobBatch = BlobBatch; + var InnerBatchRequest = class { + operationCount; + body; + subRequests; + boundary; + subRequestPrefix; + multipartContentType; + batchRequestEnding; + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = (0, core_util_1.randomUUID)(); + this.boundary = `batch_${tempGuid}`; + this.subRequestPrefix = `--${this.boundary}${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TYPE}: application/http${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = /* @__PURE__ */ new Map(); + } /** - * Converts the given resource types to a string. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas - * + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ - toString() { - const resourceTypes = []; - if (this.service) { - resourceTypes.push("s"); + createPipeline(credential) { + const corePipeline = (0, core_rest_pipeline_1.createEmptyPipeline)(); + corePipeline.addPolicy((0, core_client_1.serializationPolicy)({ + stringifyXML: core_xml_1.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#" + } + } + }), { phase: "Serialize" }); + corePipeline.addPolicy(batchHeaderFilterPolicy()); + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential, + scopes: constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } + }), { phase: "Sign" }); + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ + accountName: credential.accountName, + accountKey: credential.accountKey + }), { phase: "Sign" }); } - if (this.container) { - resourceTypes.push("c"); + const pipeline = new Pipeline_js_1.Pipeline([]); + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; + } + appendSubRequestToBody(request2) { + this.body += [ + this.subRequestPrefix, + // sub request constant prefix + `${constants_js_1.HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + // sub request's content ID + "", + // empty line after sub request's content ID + `${request2.method.toString()} ${(0, utils_common_js_1.getURLPathAndQuery)(request2.url)} ${constants_js_1.HTTP_VERSION_1_1}${constants_js_1.HTTP_LINE_ENDING}` + // sub request start line with method + ].join(constants_js_1.HTTP_LINE_ENDING); + for (const [name, value] of request2.headers) { + this.body += `${name}: ${value}${constants_js_1.HTTP_LINE_ENDING}`; } - if (this.object) { - resourceTypes.push("o"); + this.body += constants_js_1.HTTP_LINE_ENDING; + } + preAddSubRequest(subRequest) { + if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - return resourceTypes.join(""); + const path13 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path13 || path13 === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${constants_js_1.HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; } }; - exports2.AccountSASResourceTypes = AccountSASResourceTypes; + function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request2) { + batchRequest.appendSubRequestToBody(request2); + return { + request: request2, + status: 200, + headers: (0, core_rest_pipeline_1.createHttpHeaders)() + }; + } + }; + } + function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request2, next) { + let xMsHeaderName = ""; + for (const [name] of request2.headers) { + if ((0, utils_common_js_1.iEqual)(name, constants_js_1.HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; + } + } + if (xMsHeaderName !== "") { + request2.headers.delete(xMsHeaderName); + } + return next(request2); + } + }; + } } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js -var require_AccountSASServices = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js +var require_BlobBatchClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AccountSASServices = void 0; - var AccountSASServices = class _AccountSASServices { + exports2.BlobBatchClient = void 0; + var BatchResponseParser_js_1 = require_BatchResponseParser(); + var BatchUtils_js_1 = require_BatchUtils(); + var BlobBatch_js_1 = require_BlobBatch(); + var tracing_js_1 = require_tracing(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var BlobBatchClient = class { + serviceOrContainerContext; + constructor(url2, credentialOrPipeline, options) { + let pipeline; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (!credentialOrPipeline) { + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); + } + const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + const path13 = (0, utils_common_js_1.getURLPath)(url2); + if (path13 && path13 !== "/") { + this.serviceOrContainerContext = storageClientContext.container; + } else { + this.serviceOrContainerContext = storageClientContext.service; + } + } /** - * Creates an {@link AccountSASServices} from the specified services string. This method will throw an - * Error if it encounters a character that does not correspond to a valid service. - * - * @param services - + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. */ - static parse(services) { - const accountSASServices = new _AccountSASServices(); - for (const c of services) { - switch (c) { - case "b": - accountSASServices.blob = true; - break; - case "f": - accountSASServices.file = true; - break; - case "q": - accountSASServices.queue = true; - break; - case "t": - accountSASServices.table = true; - break; - default: - throw new RangeError(`Invalid service character: ${c}`); + createBatch() { + return new BlobBatch_js_1.BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { + const batch = new BlobBatch_js_1.BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); } } - return accountSASServices; + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { + const batch = new BlobBatch_js_1.BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); } /** - * Permission to access blob resources granted. - */ - blob = false; - /** - * Permission to access file resources granted. - */ - file = false; - /** - * Permission to access queue resources granted. - */ - queue = false; - /** - * Permission to access table resources granted. - */ - table = false; - /** - * Converts the given services to a string. + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```ts snippet:BlobBatchClientSubmitBatch + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * + * const batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob("", credential); + * await batchRequest.deleteBlob("", credential, { + * deleteSnapshots: "include", + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```ts snippet:BlobBatchClientSubmitBatchWithLease + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * const blobClient = containerClient.getBlobClient(""); + * + * const batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blobClient, "Cool"); + * await batchRequest.setBlobAccessTier(blobClient, "Cool", { + * conditions: { leaseId: "" }, + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - */ - toString() { - const services = []; - if (this.blob) { - services.push("b"); - } - if (this.table) { - services.push("t"); - } - if (this.queue) { - services.push("q"); - } - if (this.file) { - services.push("f"); + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); } - return services.join(""); + return tracing_js_1.tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + const batchRequestBody = batchRequest.getHttpRequestBody(); + const rawBatchResponse = (0, utils_common_js_1.assertResponse)(await this.serviceOrContainerContext.submitBatch((0, BatchUtils_js_1.utf8ByteLength)(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, { + ...updatedOptions + })); + const batchResponseParser = new BatchResponseParser_js_1.BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount + }; + return res; + }); } }; - exports2.AccountSASServices = AccountSASServices; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js -var require_AccountSASSignatureValues = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; - exports2.generateAccountSASQueryParametersInternal = generateAccountSASQueryParametersInternal; - var AccountSASPermissions_js_1 = require_AccountSASPermissions(); - var AccountSASResourceTypes_js_1 = require_AccountSASResourceTypes(); - var AccountSASServices_js_1 = require_AccountSASServices(); - var SasIPRange_js_1 = require_SasIPRange(); - var SASQueryParameters_js_1 = require_SASQueryParameters(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; - } - function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { - const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : constants_js_1.SERVICE_VERSION; - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); - } - if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); - } - const parsedPermissions = AccountSASPermissions_js_1.AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices_js_1.AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes_js_1.AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - let stringToSign; - if (version >= "2020-12-06") { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", - (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", - "" - // Account SAS requires an additional newline character - ].join("\n"); - } else { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", - (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "" - // Account SAS requires an additional newline character - ].join("\n"); - } - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), - stringToSign - }; - } + exports2.BlobBatchClient = BlobBatchClient; } }); -// node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js -var require_BlobServiceClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js +var require_ContainerClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobServiceClient = void 0; - var core_auth_1 = require_commonjs7(); + exports2.ContainerClient = void 0; var core_rest_pipeline_1 = require_commonjs6(); var core_util_1 = require_commonjs4(); - var Pipeline_js_1 = require_Pipeline(); - var ContainerClient_js_1 = require_ContainerClient(); - var utils_common_js_1 = require_utils_common(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var core_auth_1 = require_commonjs7(); var AnonymousCredential_js_1 = require_AnonymousCredential(); - var utils_common_js_2 = require_utils_common(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var Pipeline_js_1 = require_Pipeline(); + var StorageClient_js_1 = require_StorageClient(); var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var Clients_js_1 = require_Clients(); var BlobBatchClient_js_1 = require_BlobBatchClient(); - var StorageClient_js_1 = require_StorageClient(); - var AccountSASPermissions_js_1 = require_AccountSASPermissions(); - var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); - var AccountSASServices_js_1 = require_AccountSASServices(); - var BlobServiceClient = class _BlobServiceClient extends StorageClient_js_1.StorageClient { + var ContainerClient = class extends StorageClient_js_1.StorageClient { /** - * serviceContext provided by protocol layer. + * containerContext provided by protocol layer. */ - serviceContext; + containerContext; + _containerName; /** - * - * Creates an instance of BlobServiceClient from connection string. - * - * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. - * [ Note - Account connection string can only be used in NODE.JS runtime. ] - * Account connection string example - - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` - * SAS connection string example - - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` - * @param options - Optional. Options to configure the HTTP pipeline. + * The name of the container. */ - static fromConnectionString(connectionString, options) { + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { + let pipeline; + let url2; options = options || {}; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(connectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); } - const pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - return new _BlobServiceClient(extractedCreds.url, pipeline); + } else if (extractedCreds.kind === "SASConnString") { + url2 = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { - throw new Error("Account connection string is only supported in Node.js environment"); + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - } else if (extractedCreds.kind === "SASConnString") { - const pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } - constructor(url2, credentialOrPipeline, options) { - let pipeline; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (core_util_1.isNodeLike && credentialOrPipeline instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipeline)) { - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } else { - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + throw new Error("Expecting non-empty strings for containerName parameter"); } super(url2, pipeline); - this.serviceContext = this.storageClientContext.service; + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = this.storageClientContext.container; } /** - * Creates a {@link ContainerClient} object + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://learn.microsoft.com/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. * - * @param containerName - A container name - * @returns A new ContainerClient object for the given container name. * * Example usage: * - * ```ts snippet:BlobServiceClientGetContainerClient + * ```ts snippet:ContainerClientCreate * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -89533,249 +89135,513 @@ var require_BlobServiceClient = __commonJS({ * new DefaultAzureCredential(), * ); * - * const containerClient = blobServiceClient.getContainerClient(""); + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ - getContainerClient(containerName) { - return new ContainerClient_js_1.ContainerClient((0, utils_common_js_1.appendToURLPath)(this.url, encodeURIComponent(containerName)), this.pipeline); + async create(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.create(updatedOptions)); + }); } /** - * Create a Blob container. @see https://learn.microsoft.com/rest/api/storageservices/create-container + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://learn.microsoft.com/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @param containerName - Name of the container to create. - * @param options - Options to configure Container Create operation. - * @returns Container creation response and the corresponding container client. + * @param options - */ - async createContainer(containerName, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse - }; + async createIfNotExists(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + try { + const res = await this.create(updatedOptions); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; + } catch (e) { + if (e.details?.errorCode === "ContainerAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } else { + throw e; + } + } }); } /** - * Deletes a Blob container. + * Returns true if the Azure container resource represented by this client exists; false otherwise. * - * @param containerName - Name of the container to delete. - * @param options - Options to configure Container Delete operation. - * @returns Container deletion response. + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - */ - async deleteContainer(containerName, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(containerName); - return containerClient.delete(updatedOptions); + async exists(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + }); + return true; + } catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } }); } /** - * Restore a previously deleted Blob container. - * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * Creates a {@link BlobClient} * - * @param deletedContainerName - Name of the previously deleted container. - * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. - * @param options - Options to configure Container Restore operation. - * @returns Container deletion response. + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. */ - async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); - const containerContext = containerClient["storageClientContext"].container; - const containerUndeleteResponse = (0, utils_common_js_2.assertResponse)(await containerContext.restore({ - deletedContainerName, - deletedContainerVersion, + getBlobClient(blobName) { + return new Clients_js_1.BlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new Clients_js_1.AppendBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```ts snippet:ClientsUpload + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new Clients_js_1.BlockBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new Clients_js_1.PageBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getProperties({ + abortSignal: options.abortSignal, + ...options.conditions, tracingOptions: updatedOptions.tracingOptions })); - return { containerClient, containerUndeleteResponse }; }); } /** - * Gets the properties of a storage account’s Blob service, including properties - * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + try { + const res = await this.delete(updatedOptions); + return { + succeeded: true, + ...res, + _response: res._response + }; + } catch (e) { + if (e.details?.errorCode === "ContainerNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } + throw e; + } + }); + } + /** + * Sets one or more user-defined name-value pairs for the specified container. * - * @param options - Options to the Service Get Properties operation. - * @returns Response data for the Service Get Properties operation. + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. */ - async getProperties(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getProperties({ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setMetadata({ abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Sets properties for a storage account’s Blob service endpoint, including properties - * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. * - * @param properties - - * @param options - Options to the Service Set Properties operation. - * @returns Response data for the Service Set Properties operation. + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. */ - async setProperties(properties, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.setProperties(properties, { + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccessPolicy({ abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version + }; + for (const identifier of response) { + let accessPolicy = void 0; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id + }); + } + return res; }); } /** - * Retrieves statistics related to replication for the Blob service. It is only - * available on the secondary location endpoint when read-access geo-redundant - * replication is enabled for the storage account. - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-stats + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. * - * @param options - Options to the Service Get Statistics operation. - * @returns Response data for the Service Get Statistics operation. + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. */ - async getStatistics(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getStatistics({ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.expiresOn) : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.startsOn) : "" + }, + id: identifier.id + }); + } + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setAccessPolicy({ abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information + * Get a {@link BlobLeaseClient} that manages leases on the container. * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. */ - async getAccountInfo(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getAccountInfo({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response + }; }); } /** - * Returns a list of the containers under the specified account. - * @see https://learn.microsoft.com/rest/api/storageservices/list-containers2 + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to the Service List Container Segment operation. - * @returns Response data for the Service List Container Segment operation. + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. */ - async listContainersSegment(marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.listContainersSegment({ - abortSignal: options.abortSignal, + async deleteBlob(blobName, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return blobClient.delete(updatedOptions); + }); + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobFlatSegment({ marker, ...options, - include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobFlat)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }) + } + }; + return wrappedResponse; }); } /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags - * match a given search expression. Filter blobs searches across all containers within a - * storage account but can be scoped within the expression to a single container. + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.filterBlobs({ - abortSignal: options.abortSignal, - where: tagFilterSqlExpression, + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobHierarchySegment(delimiter, { marker, - maxPageSize: options.maxPageSize, + ...options, tracingOptions: updatedOptions.tracingOptions })); const wrappedResponse = { ...response, - _response: response._response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobHierarchy)(response._response.parsedBody) + }, // _response is made non-enumerable - blobs: response.blobs.map((blob) => { - let tagValue = ""; - if (blob.tags?.blobTagSet.length === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; - }) + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }), + blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobPrefixInternal.name) + }; + return blobPrefix; + }) + } }; return wrappedResponse; }); } /** - * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the + * operation returns the ContinuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for + * with the current page. The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * @param options - Options to list blobs operation. */ - async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - let response; + async *listSegments(marker, options = {}) { + let listBlobsFlatSegmentResponse; if (!!marker || marker === void 0) { do { - response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield response; + listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield await listBlobsFlatSegmentResponse; } while (marker); } } /** - * Returns an AsyncIterableIterator for blobs. + * Returns an AsyncIterableIterator of {@link BlobItem} objects * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. + * @param options - Options to list blobs operation. */ - async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + async *listItems(options = {}) { let marker; - for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { - yield* segment.blobs; + for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) { + yield* listBlobsFlatSegmentResponse.segment.blobItems; } } /** - * Returns an async iterable iterator to find all blobs with specified tag + * Returns an async iterable iterator to list all the blobs * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties - * - * ```ts snippet:BlobServiceClientFindBlobsByTags + * ```ts snippet:ReadmeSampleListBlobs_Multiple * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -89785,68 +89651,99 @@ var require_BlobServiceClient = __commonJS({ * new DefaultAzureCredential(), * ); * - * // Use for await to iterate the blobs + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax * let i = 1; - * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * const blobs = containerClient.listBlobsFlat(); + * for await (const blob of blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * - * // Use iter.next() to iterate the blobs + * // Example using `iter.next()` syntax * i = 1; - * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * const iter = containerClient.listBlobsFlat(); * let { value, done } = await iter.next(); * while (!done) { * console.log(`Blob ${i++}: ${value.name}`); * ({ value, done } = await iter.next()); * } * - * // Use byPage() to iterate the blobs + * // Example using `byPage()` syntax * i = 1; - * for await (const page of blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ maxPageSize: 20 })) { - * for (const blob of page.blobs) { + * for await (const page of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of page.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * - * // Use paging with a marker + * // Example using paging with a marker * i = 1; - * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { + * // Prints 10 blob names + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = { - ...options + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} }; - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + const iter = this.listItems(updatedOptions); return { /** * The next method, part of the iteration protocol @@ -89864,54 +89761,66 @@ var require_BlobServiceClient = __commonJS({ * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + return this.listSegments(settings.continuationToken, { maxPageSize: settings.maxPageSize, - ...listSegmentOptions + ...updatedOptions }); } }; } /** - * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * + * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list containers operation. + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. */ - async *listSegments(marker, options = {}) { - let listContainersSegmentResponse; + async *listHierarchySegments(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; if (!!marker || marker === void 0) { do { - listContainersSegmentResponse = await this.listContainersSegment(marker, options); - listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; - marker = listContainersSegmentResponse.continuationToken; - yield await listContainersSegmentResponse; + listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(delimiter, marker, options); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield await listBlobsHierarchySegmentResponse; } while (marker); } } /** - * Returns an AsyncIterableIterator for Container Items + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * - * @param options - Options to list containers operation. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - async *listItems(options = {}) { + async *listItemsByHierarchy(delimiter, options = {}) { let marker; - for await (const segment of this.listSegments(marker, options)) { - yield* segment.containerItems; + for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(delimiter, marker, options)) { + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield { + kind: "prefix", + ...prefix + }; + } + } + for (const blob of segment.blobItems) { + yield { kind: "blob", ...blob }; + } } } /** - * Returns an async iterable iterator to list all the containers + * Returns an async iterable iterator to list all the blobs by hierarchy. * under the specified account. * - * .byPage() returns an async iterable iterator to list the containers in pages. + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * - * ```ts snippet:BlobServiceClientListContainers + * ```ts snippet:ReadmeSampleListBlobsByHierarchy * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -89921,84 +89830,133 @@ var require_BlobServiceClient = __commonJS({ * new DefaultAzureCredential(), * ); * - * // Use for await to iterate the containers + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax * let i = 1; - * for await (const container of blobServiceClient.listContainers()) { - * console.log(`Container ${i++}: ${container.name}`); + * const blobs = containerClient.listBlobsByHierarchy("/"); + * for await (const blob of blobs) { + * if (blob.kind === "prefix") { + * console.log(`\tBlobPrefix: ${blob.name}`); + * } else { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } * } * - * // Use iter.next() to iterate the containers + * // Example using `iter.next()` syntax * i = 1; - * const iter = blobServiceClient.listContainers(); + * const iter = containerClient.listBlobsByHierarchy("/"); * let { value, done } = await iter.next(); * while (!done) { - * console.log(`Container ${i++}: ${value.name}`); + * if (value.kind === "prefix") { + * console.log(`\tBlobPrefix: ${value.name}`); + * } else { + * console.log(`\tBlobItem: name - ${value.name}`); + * } * ({ value, done } = await iter.next()); * } * - * // Use byPage() to iterate the containers + * // Example using `byPage()` syntax * i = 1; - * for await (const page of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * for (const container of page.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); + * for await (const page of containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 20 })) { + * const segment = page.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of page.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * - * // Use paging with a marker + * // Example using paging with a marker * i = 1; - * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let iterator = containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * - * // Prints 2 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); + * // Prints 2 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } - * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .listContainers() + * iterator = containerClient + * .listBlobsByHierarchy("/") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * - * // Prints 10 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); + * // Prints 10 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * - * @param options - Options to list containers. - * @returns An asyncIterableIterator that supports paging. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - listContainers(options = {}) { - if (options.prefix === "") { - options.prefix = void 0; + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); } const include = []; + if (options.includeCopy) { + include.push("copy"); + } if (options.includeDeleted) { include.push("deleted"); } if (options.includeMetadata) { include.push("metadata"); } - if (options.includeSystem) { - include.push("system"); + if (options.includeSnapshots) { + include.push("snapshots"); } - const listSegmentOptions = { + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = { ...options, ...include.length > 0 ? { include } : {} }; - const iter = this.listItems(listSegmentOptions); + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); return { /** * The next method, part of the iteration protocol */ - next() { + async next() { return iter.next(); }, /** @@ -90011,6661 +89969,6505 @@ var require_BlobServiceClient = __commonJS({ * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, { + return this.listHierarchySegments(delimiter, settings.continuationToken, { maxPageSize: settings.maxPageSize, - ...listSegmentOptions + ...updatedOptions }); } }; } /** - * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). - * - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * - * @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key - * - * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time - * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time - */ - async getUserDelegationKey(startsOn, expiresOn, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { - const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.getUserDelegationKey({ - startsOn: (0, utils_common_js_2.truncatedISO8061Date)(startsOn, false), - expiresOn: (0, utils_common_js_2.truncatedISO8061Date)(expiresOn, false) - }, { - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value - }; - const res = { - _response: response._response, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - version: response.version, - date: response.date, - errorCode: response.errorCode, - ...userDelegationKey - }; - return res; - }); - } - /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this service. - */ - getBlobBatchClient() { - return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); - } - /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); - } - if (expiresOn === void 0) { - const now = /* @__PURE__ */ new Date(); - expiresOn = new Date(now.getTime() + 3600 * 1e3); - } - const sas = (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParameters)({ - permissions, - expiresOn, - resourceTypes, - services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), - ...options - }, this.credential).toString(); - return (0, utils_common_js_1.appendToURLQuery)(this.url, sas); - } - /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateSasStringToSign(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); - } - if (expiresOn === void 0) { - const now = /* @__PURE__ */ new Date(); - expiresOn = new Date(now.getTime() + 3600 * 1e3); - } - return (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParametersInternal)({ - permissions, - expiresOn, - resourceTypes, - services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), - ...options - }, this.credential).stringToSign; - } - }; - exports2.BlobServiceClient = BlobServiceClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js -var require_BatchResponse = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js -var require_generatedModels = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.KnownEncryptionAlgorithmType = void 0; - var KnownEncryptionAlgorithmType; - (function(KnownEncryptionAlgorithmType2) { - KnownEncryptionAlgorithmType2["AES256"] = "AES256"; - })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/index.js -var require_commonjs15 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = exports2.RestError = exports2.BaseRequestPolicy = exports2.StorageOAuthScopes = exports2.newPipeline = exports2.isPipelineLike = exports2.Pipeline = exports2.getBlobServiceAccountAudience = exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = exports2.generateBlobSASQueryParameters = exports2.generateAccountSASQueryParameters = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var core_rest_pipeline_1 = require_commonjs6(); - Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { - return core_rest_pipeline_1.RestError; - } }); - tslib_1.__exportStar(require_BlobServiceClient(), exports2); - tslib_1.__exportStar(require_Clients(), exports2); - tslib_1.__exportStar(require_ContainerClient(), exports2); - tslib_1.__exportStar(require_BlobLeaseClient(), exports2); - tslib_1.__exportStar(require_AccountSASPermissions(), exports2); - tslib_1.__exportStar(require_AccountSASResourceTypes(), exports2); - tslib_1.__exportStar(require_AccountSASServices(), exports2); - var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); - Object.defineProperty(exports2, "generateAccountSASQueryParameters", { enumerable: true, get: function() { - return AccountSASSignatureValues_js_1.generateAccountSASQueryParameters; - } }); - tslib_1.__exportStar(require_BlobBatch(), exports2); - tslib_1.__exportStar(require_BlobBatchClient(), exports2); - tslib_1.__exportStar(require_BatchResponse(), exports2); - tslib_1.__exportStar(require_BlobSASPermissions(), exports2); - var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); - Object.defineProperty(exports2, "generateBlobSASQueryParameters", { enumerable: true, get: function() { - return BlobSASSignatureValues_js_1.generateBlobSASQueryParameters; - } }); - tslib_1.__exportStar(require_StorageBrowserPolicyFactory2(), exports2); - tslib_1.__exportStar(require_ContainerSASPermissions(), exports2); - tslib_1.__exportStar(require_AnonymousCredential(), exports2); - tslib_1.__exportStar(require_Credential(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredential(), exports2); - var models_js_1 = require_models2(); - Object.defineProperty(exports2, "BlockBlobTier", { enumerable: true, get: function() { - return models_js_1.BlockBlobTier; - } }); - Object.defineProperty(exports2, "PremiumPageBlobTier", { enumerable: true, get: function() { - return models_js_1.PremiumPageBlobTier; - } }); - Object.defineProperty(exports2, "StorageBlobAudience", { enumerable: true, get: function() { - return models_js_1.StorageBlobAudience; - } }); - Object.defineProperty(exports2, "getBlobServiceAccountAudience", { enumerable: true, get: function() { - return models_js_1.getBlobServiceAccountAudience; - } }); - var Pipeline_js_1 = require_Pipeline(); - Object.defineProperty(exports2, "Pipeline", { enumerable: true, get: function() { - return Pipeline_js_1.Pipeline; - } }); - Object.defineProperty(exports2, "isPipelineLike", { enumerable: true, get: function() { - return Pipeline_js_1.isPipelineLike; - } }); - Object.defineProperty(exports2, "newPipeline", { enumerable: true, get: function() { - return Pipeline_js_1.newPipeline; - } }); - Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { - return Pipeline_js_1.StorageOAuthScopes; - } }); - tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); - var RequestPolicy_js_1 = require_RequestPolicy(); - Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { - return RequestPolicy_js_1.BaseRequestPolicy; - } }); - tslib_1.__exportStar(require_AnonymousCredentialPolicy(), exports2); - tslib_1.__exportStar(require_CredentialPolicy(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy(), exports2); - tslib_1.__exportStar(require_SASQueryParameters(), exports2); - tslib_1.__exportStar(require_generatedModels(), exports2); - var log_js_1 = require_log5(); - Object.defineProperty(exports2, "logger", { enumerable: true, get: function() { - return log_js_1.logger; - } }); - } -}); - -// node_modules/@actions/cache/lib/internal/shared/errors.js -var require_errors3 = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RateLimitError = exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; - var FilesNotFoundError = class extends Error { - constructor(files = []) { - let message = "No files were found to upload"; - if (files.length > 0) { - message += `: ${files.join(", ")}`; - } - super(message); - this.files = files; - this.name = "FilesNotFoundError"; - } - }; - exports2.FilesNotFoundError = FilesNotFoundError; - var InvalidResponseError = class extends Error { - constructor(message) { - super(message); - this.name = "InvalidResponseError"; - } - }; - exports2.InvalidResponseError = InvalidResponseError; - var CacheNotFoundError = class extends Error { - constructor(message = "Cache not found") { - super(message); - this.name = "CacheNotFoundError"; - } - }; - exports2.CacheNotFoundError = CacheNotFoundError; - var GHESNotSupportedError = class extends Error { - constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { - super(message); - this.name = "GHESNotSupportedError"; - } - }; - exports2.GHESNotSupportedError = GHESNotSupportedError; - var NetworkError = class extends Error { - constructor(code) { - const message = `Unable to make request: ${code} -If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; - super(message); - this.code = code; - this.name = "NetworkError"; - } - }; - exports2.NetworkError = NetworkError; - NetworkError.isNetworkErrorCode = (code) => { - if (!code) - return false; - return [ - "ECONNRESET", - "ENOTFOUND", - "ETIMEDOUT", - "ECONNREFUSED", - "EHOSTUNREACH" - ].includes(code); - }; - var UsageError = class extends Error { - constructor() { - const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. -More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; - super(message); - this.name = "UsageError"; - } - }; - exports2.UsageError = UsageError; - UsageError.isUsageErrorMessage = (msg) => { - if (!msg) - return false; - return msg.includes("insufficient usage"); - }; - var RateLimitError = class extends Error { - constructor(message) { - super(message); - this.name = "RateLimitError"; - } - }; - exports2.RateLimitError = RateLimitError; - } -}); - -// node_modules/@actions/cache/lib/internal/uploadUtils.js -var require_uploadUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UploadProgress = void 0; - exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core14 = __importStar2(require_core()); - var storage_blob_1 = require_commonjs15(); - var errors_1 = require_errors3(); - var UploadProgress = class { - constructor(contentLength) { - this.contentLength = contentLength; - this.sentBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); - } - /** - * Sets the number of bytes sent + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. * - * @param sentBytes the number of bytes sent - */ - setSentBytes(sentBytes) { - this.sentBytes = sentBytes; - } - /** - * Returns the total number of bytes transferred. - */ - getTransferredBytes() { - return this.sentBytes; - } - /** - * Returns true if the upload is complete. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - isDone() { - return this.getTransferredBytes() === this.contentLength; + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; + return wrappedResponse; + }); } /** - * Prints the current upload stats. Once the upload completes, this will print one - * last line and then stop. + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.sentBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); } } /** - * Returns a function used to handle TransferProgressEvents. + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - onProgress() { - return (progress) => { - this.setSentBytes(progress.loadedBytes); - }; + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; + } } /** - * Starts the timer that displays the stats. + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. * - * @param delayInMs the delay between each write + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```ts snippet:ReadmeSampleFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } + * + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Example using paging with a marker + * i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = () => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = { + ...options + }; + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; - this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Stops the timer that displays the stats. As this typically indicates the upload - * is complete, this will display one last line, unless the last line has already - * been written. + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); + async getAccountInfo(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); } - }; - exports2.UploadProgress = UploadProgress; - function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - var _a; - const blobClient = new storage_blob_1.BlobClient(signedUploadURL); - const blockBlobClient = blobClient.getBlockBlobClient(); - const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); - const uploadOptions = { - blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, - concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, - // maximum number of parallel transfer workers - maxSingleShotSize: 128 * 1024 * 1024, - // 128 MiB initial transfer size - onProgress: uploadProgress.onProgress() - }; + getContainerNameFromUrl() { + let containerName; try { - uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); - const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); - if (response._response.status >= 400) { - throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { + containerName = parsedUrl.pathname.split("/")[1]; + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { + containerName = parsedUrl.pathname.split("/")[2]; + } else { + containerName = parsedUrl.pathname.split("/")[1]; } - return response; + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; } catch (error3) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); - throw error3; - } finally { - uploadProgress.stopDisplayTimer(); + throw new Error("Unable to extract containerName with provided information."); } - }); - } - } -}); - -// node_modules/@actions/cache/lib/internal/requestUtils.js -var require_requestUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve6) => { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, this.credential).toString(); + resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isSuccessStatusCode = isSuccessStatusCode; - exports2.isServerErrorStatusCode = isServerErrorStatusCode; - exports2.isRetryableStatusCode = isRetryableStatusCode; - exports2.retry = retry2; - exports2.retryTypedResponse = retryTypedResponse; - exports2.retryHttpClientResponse = retryHttpClientResponse; - var core14 = __importStar2(require_core()); - var http_client_1 = require_lib(); - var constants_1 = require_constants12(); - function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode >= 200 && statusCode < 300; - } - function isServerErrorStatusCode(statusCode) { - if (!statusCode) { - return true; - } - return statusCode >= 500; - } - function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, this.credential).stringToSign; } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.GatewayTimeout - ]; - return retryableStatusCodes.includes(statusCode); - } - function sleep(milliseconds) { - return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => setTimeout(resolve6, milliseconds)); - }); - } - function retry2(name_1, method_1, getStatusCode_1) { - return __awaiter2(this, arguments, void 0, function* (name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay, onError = void 0) { - let errorMessage = ""; - let attempt = 1; - while (attempt <= maxAttempts) { - let response = void 0; - let statusCode = void 0; - let isRetryable = false; - try { - response = yield method(); - } catch (error3) { - if (onError) { - response = onError(error3); - } - isRetryable = true; - errorMessage = error3.message; - } - if (response) { - statusCode = getStatusCode(response); - if (!isServerErrorStatusCode(statusCode)) { - return response; - } - } - if (statusCode) { - isRetryable = isRetryableStatusCode(statusCode); - errorMessage = `Cache service responded with ${statusCode}`; - } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); - break; - } - yield sleep(delay2); - attempt++; - } - throw Error(`${name} failed: ${errorMessage}`); - }); - } - function retryTypedResponse(name_1, method_1) { - return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { - return yield retry2( - name, - method, - (response) => response.statusCode, - maxAttempts, - delay2, - // If the error object contains the statusCode property, extract it and return - // an TypedResponse so it can be processed by the retry logic. - (error3) => { - if (error3 instanceof http_client_1.HttpClientError) { - return { - statusCode: error3.statusCode, - result: null, - headers: {}, - error: error3 - }; - } else { - return void 0; - } - } - ); - }); - } - function retryHttpClientResponse(name_1, method_1) { - return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { - return yield retry2(name, method, (response) => response.message.statusCode, maxAttempts, delay2); - }); - } - } -}); - -// node_modules/@azure/abort-controller/dist/index.js -var require_dist4 = __commonJS({ - "node_modules/@azure/abort-controller/dist/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var listenersMap = /* @__PURE__ */ new WeakMap(); - var abortedMap = /* @__PURE__ */ new WeakMap(); - var AbortSignal2 = class _AbortSignal { - constructor() { - this.onabort = null; - listenersMap.set(this, []); - abortedMap.set(this, false); + /** + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve6) => { + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve6((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + }); } /** - * Status of whether aborted or not. + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * - * @readonly + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - get aborted() { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** - * Creates a new AbortSignal instance that will never be aborted. + * Creates a BlobBatchClient object to conduct batch operations. * - * @readonly + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. */ - static get none() { - return new _AbortSignal(); + getBlobBatchClient() { + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); } + }; + exports2.ContainerClient = ContainerClient; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js +var require_AccountSASPermissions = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASPermissions = void 0; + var AccountSASPermissions = class _AccountSASPermissions { /** - * Added new "abort" event listener, only support "abort" event. + * Parse initializes the AccountSASPermissions fields from a string. * - * @param _type - Only support "abort" event - * @param listener - The listener to be added + * @param permissions - */ - addEventListener(_type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); + static parse(permissions) { + const accountSASPermissions = new _AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } } - const listeners = listenersMap.get(this); - listeners.push(listener); + return accountSASPermissions; } /** - * Remove "abort" event listener, only support "abort" event. + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. * - * @param _type - Only support "abort" event - * @param listener - The listener to be removed + * @param permissionLike - */ - removeEventListener(_type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); + static from(permissionLike) { + const accountSASPermissions = new _AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; } - const listeners = listenersMap.get(this); - const index = listeners.indexOf(listener); - if (index > -1) { - listeners.splice(index, 1); + if (permissionLike.write) { + accountSASPermissions.write = true; } - } - /** - * Dispatches a synthetic event to the AbortSignal. - */ - dispatchEvent(_event) { - throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - } - }; - function abortSignal(signal) { - if (signal.aborted) { - return; - } - if (signal.onabort) { - signal.onabort.call(signal); - } - const listeners = listenersMap.get(signal); - if (listeners) { - listeners.slice().forEach((listener) => { - listener.call(signal, { type: "abort" }); - }); - } - abortedMap.set(signal, true); - } - var AbortError = class extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } - }; - var AbortController2 = class { - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - constructor(parentSignals) { - this._signal = new AbortSignal2(); - if (!parentSignals) { - return; + if (permissionLike.delete) { + accountSASPermissions.delete = true; } - if (!Array.isArray(parentSignals)) { - parentSignals = arguments; + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; } - for (const parentSignal of parentSignals) { - if (parentSignal.aborted) { - this.abort(); - } else { - parentSignal.addEventListener("abort", () => { - this.abort(); - }); - } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; } /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly + * Permission to read resources and list queues and tables granted. */ - get signal() { - return this._signal; - } + read = false; /** - * Signal that any operations passed this controller's associated abort signal - * to cancel any remaining work and throw an `AbortError`. + * Permission to write resources granted. */ - abort() { - abortSignal(this._signal); - } + write = false; /** - * Creates a new AbortSignal instance that will abort after the provided ms. - * @param ms - Elapsed time in milliseconds to trigger an abort. + * Permission to delete blobs and files granted. */ - static timeout(ms) { - const signal = new AbortSignal2(); - const timer = setTimeout(abortSignal, ms, signal); - if (typeof timer.unref === "function") { - timer.unref(); - } - return signal; - } - }; - exports2.AbortController = AbortController2; - exports2.AbortError = AbortError; - exports2.AbortSignal = AbortSignal2; - } -}); - -// node_modules/@actions/cache/lib/internal/downloadUtils.js -var require_downloadUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DownloadProgress = void 0; - exports2.downloadCacheHttpClient = downloadCacheHttpClient; - exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; - exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core14 = __importStar2(require_core()); - var http_client_1 = require_lib(); - var storage_blob_1 = require_commonjs15(); - var buffer = __importStar2(require("buffer")); - var fs13 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); - var util = __importStar2(require("util")); - var utils = __importStar2(require_cacheUtils()); - var constants_1 = require_constants12(); - var requestUtils_1 = require_requestUtils(); - var abort_controller_1 = require_dist4(); - function pipeResponseToStream(response, output) { - return __awaiter2(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream2.pipeline); - yield pipeline(response.message, output); - }); - } - var DownloadProgress = class { - constructor(contentLength) { - this.contentLength = contentLength; - this.segmentIndex = 0; - this.segmentSize = 0; - this.segmentOffset = 0; - this.receivedBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); - } + delete = false; /** - * Progress to the next segment. Only call this method when the previous segment - * is complete. - * - * @param segmentSize the length of the next segment + * Permission to delete versions granted. */ - nextSegment(segmentSize) { - this.segmentOffset = this.segmentOffset + this.segmentSize; - this.segmentIndex = this.segmentIndex + 1; - this.segmentSize = segmentSize; - this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); - } + deleteVersion = false; /** - * Sets the number of bytes received for the current segment. - * - * @param receivedBytes the number of bytes received + * Permission to list blob containers, blobs, shares, directories, and files granted. */ - setReceivedBytes(receivedBytes) { - this.receivedBytes = receivedBytes; - } + list = false; /** - * Returns the total number of bytes transferred. + * Permission to add messages, table entities, and append to blobs granted. */ - getTransferredBytes() { - return this.segmentOffset + this.receivedBytes; - } + add = false; /** - * Returns true if the download is complete. + * Permission to create blobs and files granted. */ - isDone() { - return this.getTransferredBytes() === this.contentLength; - } + create = false; /** - * Prints the current download stats. Once the download completes, this will print one - * last line and then stop. + * Permissions to update messages and table entities granted. */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.segmentOffset + this.receivedBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } - } + update = false; /** - * Returns a function used to handle TransferProgressEvents. + * Permission to get and delete messages granted. */ - onProgress() { - return (progress) => { - this.setReceivedBytes(progress.loadedBytes); - }; - } + process = false; /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write + * Specfies Tag access granted. */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = () => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }; - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } + tag = false; /** - * Stops the timer that displays the stats. As this typically indicates the download - * is complete, this will display one last line, unless the last line has already - * been written. + * Permission to filter blobs. */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); - } - }; - exports2.DownloadProgress = DownloadProgress; - function downloadCacheHttpClient(archiveLocation, archivePath) { - return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs13.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.get(archiveLocation); - })); - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - const contentLengthHeader = downloadResponse.message.headers["content-length"]; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } - } else { - core14.debug("Unable to validate download, no Content-Length header"); - } - }); - } - function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - var _a; - const archiveDescriptor = yield fs13.promises.open(archivePath, "w"); - const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { - socketTimeout: options.timeoutInMs, - keepAlive: true - }); - try { - const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter2(this, void 0, void 0, function* () { - return yield httpClient.request("HEAD", archiveLocation, null, {}); - })); - const lengthHeader = res.message.headers["content-length"]; - if (lengthHeader === void 0 || lengthHeader === null) { - throw new Error("Content-Length not found on blob response"); - } - const length = parseInt(lengthHeader); - if (Number.isNaN(length)) { - throw new Error(`Could not interpret Content-Length: ${length}`); - } - const downloads = []; - const blockSize = 4 * 1024 * 1024; - for (let offset = 0; offset < length; offset += blockSize) { - const count = Math.min(blockSize, length - offset); - downloads.push({ - offset, - promiseGetter: () => __awaiter2(this, void 0, void 0, function* () { - return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); - }) - }); - } - downloads.reverse(); - let actives = 0; - let bytesDownloaded = 0; - const progress = new DownloadProgress(length); - progress.startDisplayTimer(); - const progressFn = progress.onProgress(); - const activeDownloads = []; - let nextDownload; - const waitAndWrite = () => __awaiter2(this, void 0, void 0, function* () { - const segment = yield Promise.race(Object.values(activeDownloads)); - yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); - actives--; - delete activeDownloads[segment.offset]; - bytesDownloaded += segment.count; - progressFn({ loadedBytes: bytesDownloaded }); - }); - while (nextDownload = downloads.pop()) { - activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); - actives++; - if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { - yield waitAndWrite(); - } - } - while (actives > 0) { - yield waitAndWrite(); - } - } finally { - httpClient.dispose(); - yield archiveDescriptor.close(); + filter = false; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - }); - } - function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { - return __awaiter2(this, void 0, void 0, function* () { - const retries = 5; - let failures = 0; - while (true) { - try { - const timeout = 3e4; - const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); - if (typeof result === "string") { - throw new Error("downloadSegmentRetry failed due to timeout"); - } - return result; - } catch (err) { - if (failures >= retries) { - throw err; - } - failures++; - } + if (this.write) { + permissions.push("w"); } - }); - } - function downloadSegment(httpClient, archiveLocation, offset, count) { - return __awaiter2(this, void 0, void 0, function* () { - const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter2(this, void 0, void 0, function* () { - return yield httpClient.get(archiveLocation, { - Range: `bytes=${offset}-${offset + count - 1}` - }); - })); - if (!partRes.readBodyBuffer) { - throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); + if (this.delete) { + permissions.push("d"); } - return { - offset, - count, - buffer: yield partRes.readBodyBuffer() - }; - }); - } - function downloadCacheStorageSDK(archiveLocation, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - var _a; - const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { - retryOptions: { - // Override the timeout used when downloading each 4 MB chunk - // The default is 2 min / MB, which is way too slow - tryTimeoutInMs: options.timeoutInMs - } - }); - const properties = yield client.getProperties(); - const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; - if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); - yield downloadCacheHttpClient(archiveLocation, archivePath); - } else { - const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); - const downloadProgress = new DownloadProgress(contentLength); - const fd = fs13.openSync(archivePath, "w"); - try { - downloadProgress.startDisplayTimer(); - const controller = new abort_controller_1.AbortController(); - const abortSignal = controller.signal; - while (!downloadProgress.isDone()) { - const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; - const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); - downloadProgress.nextSegment(segmentSize); - const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { - abortSignal, - concurrency: options.downloadConcurrency, - onProgress: downloadProgress.onProgress() - })); - if (result === "timeout") { - controller.abort(); - throw new Error("Aborting cache download as the download time exceeded the timeout."); - } else if (Buffer.isBuffer(result)) { - fs13.writeFileSync(fd, result); - } - } - } finally { - downloadProgress.stopDisplayTimer(); - fs13.closeSync(fd); - } + if (this.deleteVersion) { + permissions.push("x"); } - }); - } - var promiseWithTimeout = (timeoutMs, promise) => __awaiter2(void 0, void 0, void 0, function* () { - let timeoutHandle; - const timeoutPromise = new Promise((resolve6) => { - timeoutHandle = setTimeout(() => resolve6("timeout"), timeoutMs); - }); - return Promise.race([promise, timeoutPromise]).then((result) => { - clearTimeout(timeoutHandle); - return result; - }); - }); + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } + }; + exports2.AccountSASPermissions = AccountSASPermissions; } }); -// node_modules/@actions/cache/lib/options.js -var require_options = __commonJS({ - "node_modules/@actions/cache/lib/options.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js +var require_AccountSASResourceTypes = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUploadOptions = getUploadOptions; - exports2.getDownloadOptions = getDownloadOptions; - var core14 = __importStar2(require_core()); - function getUploadOptions(copy) { - const result = { - useAzureSdk: false, - uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; + exports2.AccountSASResourceTypes = void 0; + var AccountSASResourceTypes = class _AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new _AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } } - if (typeof copy.uploadConcurrency === "number") { - result.uploadConcurrency = copy.uploadConcurrency; + return accountSASResourceTypes; + } + /** + * Permission to access service level APIs granted. + */ + service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object = false; + /** + * Converts the given resource types to a string. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); } - if (typeof copy.uploadChunkSize === "number") { - result.uploadChunkSize = copy.uploadChunkSize; + if (this.container) { + resourceTypes.push("c"); } - } - result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; - result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); - return result; - } - function getDownloadOptions(copy) { - const result = { - useAzureSdk: false, - concurrentBlobDownloads: true, - downloadConcurrency: 8, - timeoutInMs: 3e4, - segmentTimeoutInMs: 6e5, - lookupOnly: false - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; + if (this.object) { + resourceTypes.push("o"); } - if (typeof copy.concurrentBlobDownloads === "boolean") { - result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + return resourceTypes.join(""); + } + }; + exports2.AccountSASResourceTypes = AccountSASResourceTypes; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js +var require_AccountSASServices = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASServices = void 0; + var AccountSASServices = class _AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new _AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } } - if (typeof copy.downloadConcurrency === "number") { - result.downloadConcurrency = copy.downloadConcurrency; + return accountSASServices; + } + /** + * Permission to access blob resources granted. + */ + blob = false; + /** + * Permission to access file resources granted. + */ + file = false; + /** + * Permission to access queue resources granted. + */ + queue = false; + /** + * Permission to access table resources granted. + */ + table = false; + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); } - if (typeof copy.timeoutInMs === "number") { - result.timeoutInMs = copy.timeoutInMs; + if (this.table) { + services.push("t"); } - if (typeof copy.segmentTimeoutInMs === "number") { - result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + if (this.queue) { + services.push("q"); } - if (typeof copy.lookupOnly === "boolean") { - result.lookupOnly = copy.lookupOnly; + if (this.file) { + services.push("f"); } + return services.join(""); } - const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; - if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { - result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; - } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); - return result; - } + }; + exports2.AccountSASServices = AccountSASServices; } }); -// node_modules/@actions/cache/lib/internal/config.js -var require_config = __commonJS({ - "node_modules/@actions/cache/lib/internal/config.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js +var require_AccountSASSignatureValues = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isGhes = isGhes; - exports2.getCacheServiceVersion = getCacheServiceVersion; - exports2.getCacheServiceURL = getCacheServiceURL; - function isGhes() { - const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); - const hostname = ghUrl.hostname.trimEnd().toUpperCase(); - const isGitHubHost = hostname === "GITHUB.COM"; - const isGheHost = hostname.endsWith(".GHE.COM"); - const isLocalHost = hostname.endsWith(".LOCALHOST"); - return !isGitHubHost && !isGheHost && !isLocalHost; - } - function getCacheServiceVersion() { - if (isGhes()) - return "v1"; - return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; + exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; + exports2.generateAccountSASQueryParametersInternal = generateAccountSASQueryParametersInternal; + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASResourceTypes_js_1 = require_AccountSASResourceTypes(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; } - function getCacheServiceURL() { - const version = getCacheServiceVersion(); - switch (version) { - case "v1": - return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; - case "v2": - return process.env["ACTIONS_RESULTS_URL"] || ""; - default: - throw new Error(`Unsupported cache service version: ${version}`); + function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - } - } -}); - -// node_modules/@actions/cache/package.json -var require_package2 = __commonJS({ - "node_modules/@actions/cache/package.json"(exports2, module2) { - module2.exports = { - name: "@actions/cache", - version: "5.0.5", - preview: true, - description: "Actions cache lib", - keywords: [ - "github", - "actions", - "cache" - ], - homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", - license: "MIT", - main: "lib/cache.js", - types: "lib/cache.d.ts", - directories: { - lib: "lib", - test: "__tests__" - }, - files: [ - "lib", - "!.DS_Store" - ], - publishConfig: { - access: "public" - }, - repository: { - type: "git", - url: "git+https://github.com/actions/toolkit.git", - directory: "packages/cache" - }, - scripts: { - "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", - test: 'echo "Error: run tests from root" && exit 1', - tsc: "tsc" - }, - bugs: { - url: "https://github.com/actions/toolkit/issues" - }, - dependencies: { - "@actions/core": "^2.0.0", - "@actions/exec": "^2.0.0", - "@actions/glob": "^0.5.1", - "@protobuf-ts/runtime-rpc": "^2.11.1", - "@actions/http-client": "^3.0.2", - "@actions/io": "^2.0.0", - "@azure/abort-controller": "^1.1.0", - "@azure/core-rest-pipeline": "^1.22.0", - "@azure/storage-blob": "^12.29.1", - semver: "^6.3.1" - }, - devDependencies: { - "@types/node": "^24.1.0", - "@types/semver": "^6.0.0", - "@protobuf-ts/plugin": "^2.9.4", - typescript: "^5.2.2" - }, - overrides: { - "uri-js": "npm:uri-js-replace@^1.0.1", - "node-fetch": "^3.3.2" + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions_js_1.AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices_js_1.AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes_js_1.AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "" + // Account SAS requires an additional newline character + ].join("\n"); + } else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "" + // Account SAS requires an additional newline character + ].join("\n"); } - }; - } -}); - -// node_modules/@actions/cache/lib/internal/shared/user-agent.js -var require_user_agent = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUserAgentString = getUserAgentString; - var packageJson = require_package2(); - function getUserAgentString() { - return `@actions/cache-${packageJson.version}`; + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), + stringToSign + }; } } }); -// node_modules/@actions/cache/lib/internal/cacheHttpClient.js -var require_cacheHttpClient = __commonJS({ - "node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js +var require_BlobServiceClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobServiceClient = void 0; + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var Pipeline_js_1 = require_Pipeline(); + var ContainerClient_js_1 = require_ContainerClient(); + var utils_common_js_1 = require_utils_common(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var utils_common_js_2 = require_utils_common(); + var tracing_js_1 = require_tracing(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var StorageClient_js_1 = require_StorageClient(); + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var BlobServiceClient = class _BlobServiceClient extends StorageClient_js_1.StorageClient { + /** + * serviceContext provided by protocol layer. + */ + serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, options) { + options = options || {}; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + const pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + return new _BlobServiceClient(extractedCreds.url, pipeline); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + const pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + constructor(url2, credentialOrPipeline, options) { + let pipeline; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (core_util_1.isNodeLike && credentialOrPipeline instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipeline)) { + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); + } else { + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); + super(url2, pipeline); + this.serviceContext = this.storageClientContext.service; + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```ts snippet:BlobServiceClientGetContainerClient + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient_js_1.ContainerClient((0, utils_common_js_1.appendToURLPath)(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. @see https://learn.microsoft.com/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse + }; }); } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + return containerClient.delete(updatedOptions); + }); + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = (0, utils_common_js_2.assertResponse)(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, + tracingOptions: updatedOptions.tracingOptions + })); + return { containerClient, containerUndeleteResponse }; + }); + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Returns a list of the containers under the specified account. + * @see https://learn.microsoft.com/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.listContainersSegment({ + abortSignal: options.abortSignal, + marker, + ...options, + include: typeof options.include === "string" ? [options.include] : options.include, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getCacheEntry = getCacheEntry; - exports2.downloadCache = downloadCache; - exports2.reserveCache = reserveCache; - exports2.saveCache = saveCache3; - var core14 = __importStar2(require_core()); - var http_client_1 = require_lib(); - var auth_1 = require_auth(); - var fs13 = __importStar2(require("fs")); - var url_1 = require("url"); - var utils = __importStar2(require_cacheUtils()); - var uploadUtils_1 = require_uploadUtils(); - var downloadUtils_1 = require_downloadUtils(); - var options_1 = require_options(); - var requestUtils_1 = require_requestUtils(); - var config_1 = require_config(); - var user_agent_1 = require_user_agent(); - function getCacheApiUrl(resource) { - const baseUrl = (0, config_1.getCacheServiceURL)(); - if (!baseUrl) { - throw new Error("Cache Service Url not found, unable to restore cache."); } - const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url2}`); - return url2; - } - function createAcceptHeader(type2, apiVersion) { - return `${type2};api-version=${apiVersion}`; - } - function getRequestOptions() { - const requestOptions = { - headers: { - Accept: createAcceptHeader("application/json", "6.0-preview.1") + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties + * + * ```ts snippet:BlobServiceClientFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * // Use for await to iterate the blobs + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Use iter.next() to iterate the blobs + * i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } + * + * // Use byPage() to iterate the blobs + * i = 1; + * for await (const page of blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Use paging with a marker + * i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = { + ...options + }; + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); + } + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + async *listSegments(marker, options = {}) { + let listContainersSegmentResponse; + if (!!marker || marker === void 0) { + do { + listContainersSegmentResponse = await this.listContainersSegment(marker, options); + listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield await listContainersSegmentResponse; + } while (marker); } - }; - return requestOptions; - } - function createHttpClient() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); - } - function getCacheEntry(keys, paths, options) { - return __awaiter2(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 204) { - if (core14.isDebug()) { - yield printCachesListForDiagnostics(keys[0], httpClient, version); - } - return null; + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + async *listItems(options = {}) { + let marker; + for await (const segment of this.listSegments(marker, options)) { + yield* segment.containerItems; } - if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { - throw new Error(`Cache service responded with ${response.statusCode}`); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * ```ts snippet:BlobServiceClientListContainers + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * // Use for await to iterate the containers + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * + * // Use iter.next() to iterate the containers + * i = 1; + * const iter = blobServiceClient.listContainers(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Container ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } + * + * // Use byPage() to iterate the containers + * i = 1; + * for await (const page of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * for (const container of page.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Use paging with a marker + * i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = void 0; } - const cacheResult = response.result; - const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; - if (!cacheDownloadUrl) { - throw new Error("Cache not found."); + const include = []; + if (options.includeDeleted) { + include.push("deleted"); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); - return cacheResult; - }); - } - function printCachesListForDiagnostics(key, httpClient, version) { - return __awaiter2(this, void 0, void 0, function* () { - const resource = `caches?key=${encodeURIComponent(key)}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 200) { - const cacheListResult = response.result; - const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; - if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key -Other caches with similar key:`); - for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); - } - } + if (options.includeMetadata) { + include.push("metadata"); } - }); - } - function downloadCache(archiveLocation, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { - if (downloadOptions.useAzureSdk) { - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); - } else if (downloadOptions.concurrentBlobDownloads) { - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); - } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); - } - } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + if (options.includeSystem) { + include.push("system"); } - }); - } - function reserveCache(key, paths, options) { - return __awaiter2(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const reserveCacheRequest = { - key, - version, - cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize - }; - const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); - })); - return response; - }); - } - function getContentRange(start, end) { - return `bytes ${start}-${end}/*`; - } - function uploadChunk(httpClient, resourceUrl, openStream, start, end) { - return __awaiter2(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); - const additionalHeaders = { - "Content-Type": "application/octet-stream", - "Content-Range": getContentRange(start, end) + const listSegmentOptions = { + ...options, + ...include.length > 0 ? { include } : {} }; - const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); - })); - if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { - throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); - } - }); - } - function uploadFile(httpClient, cacheId, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - const fileSize = utils.getArchiveFileSizeInBytes(archivePath); - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs13.openSync(archivePath, "r"); - const uploadOptions = (0, options_1.getUploadOptions)(options); - const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); - let offset = 0; - try { - yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }).on("error", (error3) => { - throw new Error(`Cache upload failed because file read failed with ${error3.message}`); - }), start, end); - } - }))); - } finally { - fs13.closeSync(fd); - } - return; - }); - } - function commitCache(httpClient, cacheId, filesize) { - return __awaiter2(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; - return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); - })); - }); - } - function saveCache3(cacheId, archivePath, signedUploadURL, options) { - return __awaiter2(this, void 0, void 0, function* () { - const uploadOptions = (0, options_1.getUploadOptions)(options); - if (uploadOptions.useAzureSdk) { - if (!signedUploadURL) { - throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); - } - yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); - } else { - const httpClient = createHttpClient(); - core14.debug("Upload cache"); - yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { - throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } - core14.info("Cache saved successfully"); - } - }); - } - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js -var require_json_typings = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isJsonObject = exports2.typeofJsonValue = void 0; - function typeofJsonValue(value) { - let t = typeof value; - if (t == "object") { - if (Array.isArray(value)) - return "array"; - if (value === null) - return "null"; + }; } - return t; - } - exports2.typeofJsonValue = typeofJsonValue; - function isJsonObject(value) { - return value !== null && typeof value == "object" && !Array.isArray(value); - } - exports2.isJsonObject = isJsonObject; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/base64.js -var require_base642 = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.base64encode = exports2.base64decode = void 0; - var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); - var decTable = []; - for (let i = 0; i < encTable.length; i++) - decTable[encTable[i].charCodeAt(0)] = i; - decTable["-".charCodeAt(0)] = encTable.indexOf("+"); - decTable["_".charCodeAt(0)] = encTable.indexOf("/"); - function base64decode(base64Str) { - let es = base64Str.length * 3 / 4; - if (base64Str[base64Str.length - 2] == "=") - es -= 2; - else if (base64Str[base64Str.length - 1] == "=") - es -= 1; - let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; - for (let i = 0; i < base64Str.length; i++) { - b = decTable[base64Str.charCodeAt(i)]; - if (b === void 0) { - switch (base64Str[i]) { - case "=": - groupPos = 0; - // reset state when padding found - case "\n": - case "\r": - case " ": - case " ": - continue; - // skip white-space, and padding - default: - throw Error(`invalid base64 string.`); - } - } - switch (groupPos) { - case 0: - p = b; - groupPos = 1; - break; - case 1: - bytes[bytePos++] = p << 2 | (b & 48) >> 4; - p = b; - groupPos = 2; - break; - case 2: - bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; - p = b; - groupPos = 3; - break; - case 3: - bytes[bytePos++] = (p & 3) << 6 | b; - groupPos = 0; - break; - } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.getUserDelegationKey({ + startsOn: (0, utils_common_js_2.truncatedISO8061Date)(startsOn, false), + expiresOn: (0, utils_common_js_2.truncatedISO8061Date)(expiresOn, false) + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value + }; + const res = { + _response: response._response, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + version: response.version, + date: response.date, + errorCode: response.errorCode, + ...userDelegationKey + }; + return res; + }); } - if (groupPos == 1) - throw Error(`invalid base64 string.`); - return bytes.subarray(0, bytePos); - } - exports2.base64decode = base64decode; - function base64encode(bytes) { - let base64 = "", groupPos = 0, b, p = 0; - for (let i = 0; i < bytes.length; i++) { - b = bytes[i]; - switch (groupPos) { - case 0: - base64 += encTable[b >> 2]; - p = (b & 3) << 4; - groupPos = 1; - break; - case 1: - base64 += encTable[p | b >> 4]; - p = (b & 15) << 2; - groupPos = 2; - break; - case 2: - base64 += encTable[p | b >> 6]; - base64 += encTable[b & 63]; - groupPos = 0; - break; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === void 0) { + const now = /* @__PURE__ */ new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1e3); } + const sas = (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParameters)({ + permissions, + expiresOn, + resourceTypes, + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).toString(); + return (0, utils_common_js_1.appendToURLQuery)(this.url, sas); } - if (groupPos) { - base64 += encTable[p]; - base64 += "="; - if (groupPos == 1) - base64 += "="; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === void 0) { + const now = /* @__PURE__ */ new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1e3); + } + return (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParametersInternal)({ + permissions, + expiresOn, + resourceTypes, + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).stringToSign; } - return base64; - } - exports2.base64encode = base64encode; + }; + exports2.BlobServiceClient = BlobServiceClient; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js -var require_protobufjs_utf8 = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js +var require_BatchResponse = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.utf8read = void 0; - var fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); - function utf8read(bytes) { - if (bytes.length < 1) - return ""; - let pos = 0, parts = [], chunk = [], i = 0, t; - let len = bytes.length; - while (pos < len) { - t = bytes[pos++]; - if (t < 128) - chunk[i++] = t; - else if (t > 191 && t < 224) - chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; - else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; - chunk[i++] = 55296 + (t >> 10); - chunk[i++] = 56320 + (t & 1023); - } else - chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; - if (i > 8191) { - parts.push(fromCharCodes(chunk)); - i = 0; - } - } - if (parts.length) { - if (i) - parts.push(fromCharCodes(chunk.slice(0, i))); - return parts.join(""); - } - return fromCharCodes(chunk.slice(0, i)); - } - exports2.utf8read = utf8read; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js -var require_binary_format_contract = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js +var require_generatedModels = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; - var UnknownFieldHandler; - (function(UnknownFieldHandler2) { - UnknownFieldHandler2.symbol = /* @__PURE__ */ Symbol.for("protobuf-ts/unknown"); - UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { - let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; - container.push({ no: fieldNo, wireType, data }); - }; - UnknownFieldHandler2.onWrite = (typeName, message, writer) => { - for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) - writer.tag(no, wireType).raw(data); - }; - UnknownFieldHandler2.list = (message, fieldNo) => { - if (is(message)) { - let all = message[UnknownFieldHandler2.symbol]; - return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; - } - return []; - }; - UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; - const is = (message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]); - })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); - function mergeBinaryOptions(a, b) { - return Object.assign(Object.assign({}, a), b); - } - exports2.mergeBinaryOptions = mergeBinaryOptions; - var WireType; - (function(WireType2) { - WireType2[WireType2["Varint"] = 0] = "Varint"; - WireType2[WireType2["Bit64"] = 1] = "Bit64"; - WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; - WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; - WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; - WireType2[WireType2["Bit32"] = 5] = "Bit32"; - })(WireType = exports2.WireType || (exports2.WireType = {})); + exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js -var require_goog_varint = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/index.js +var require_commonjs15 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; - function varint64read() { - let lowBits = 0; - let highBits = 0; - for (let shift = 0; shift < 28; shift += 7) { - let b = this.buf[this.pos++]; - lowBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } - } - let middleByte = this.buf[this.pos++]; - lowBits |= (middleByte & 15) << 28; - highBits = (middleByte & 112) >> 4; - if ((middleByte & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } - for (let shift = 3; shift <= 31; shift += 7) { - let b = this.buf[this.pos++]; - highBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } - } - throw new Error("invalid varint"); - } - exports2.varint64read = varint64read; - function varint64write(lo, hi, bytes) { - for (let i = 0; i < 28; i = i + 7) { - const shift = lo >>> i; - const hasNext = !(shift >>> 7 == 0 && hi == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; - } - } - const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; - const hasMoreBits = !(hi >> 3 == 0); - bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); - if (!hasMoreBits) { - return; - } - for (let i = 3; i < 31; i = i + 7) { - const shift = hi >>> i; - const hasNext = !(shift >>> 7 == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; - } - } - bytes.push(hi >>> 31 & 1); - } - exports2.varint64write = varint64write; - var TWO_PWR_32_DBL2 = (1 << 16) * (1 << 16); - function int64fromString(dec) { - let minus = dec[0] == "-"; - if (minus) - dec = dec.slice(1); - const base = 1e6; - let lowBits = 0; - let highBits = 0; - function add1e6digit(begin, end) { - const digit1e6 = Number(dec.slice(begin, end)); - highBits *= base; - lowBits = lowBits * base + digit1e6; - if (lowBits >= TWO_PWR_32_DBL2) { - highBits = highBits + (lowBits / TWO_PWR_32_DBL2 | 0); - lowBits = lowBits % TWO_PWR_32_DBL2; - } - } - add1e6digit(-24, -18); - add1e6digit(-18, -12); - add1e6digit(-12, -6); - add1e6digit(-6); - return [minus, lowBits, highBits]; - } - exports2.int64fromString = int64fromString; - function int64toString(bitsLow, bitsHigh) { - if (bitsHigh >>> 0 <= 2097151) { - return "" + (TWO_PWR_32_DBL2 * bitsHigh + (bitsLow >>> 0)); - } - let low = bitsLow & 16777215; - let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; - let high = bitsHigh >> 16 & 65535; - let digitA = low + mid * 6777216 + high * 6710656; - let digitB = mid + high * 8147497; - let digitC = high * 2; - let base = 1e7; - if (digitA >= base) { - digitB += Math.floor(digitA / base); - digitA %= base; - } - if (digitB >= base) { - digitC += Math.floor(digitB / base); - digitB %= base; - } - function decimalFrom1e7(digit1e7, needLeadingZeros) { - let partial = digit1e7 ? String(digit1e7) : ""; - if (needLeadingZeros) { - return "0000000".slice(partial.length) + partial; - } - return partial; - } - return decimalFrom1e7( - digitC, - /*needLeadingZeros=*/ - 0 - ) + decimalFrom1e7( - digitB, - /*needLeadingZeros=*/ - digitC - ) + // If the final 1e7 digit didn't need leading zeros, we would have - // returned via the trivial code path at the top. - decimalFrom1e7( - digitA, - /*needLeadingZeros=*/ - 1 - ); - } - exports2.int64toString = int64toString; - function varint32write(value, bytes) { - if (value >= 0) { - while (value > 127) { - bytes.push(value & 127 | 128); - value = value >>> 7; - } - bytes.push(value); - } else { - for (let i = 0; i < 9; i++) { - bytes.push(value & 127 | 128); - value = value >> 7; - } - bytes.push(1); - } - } - exports2.varint32write = varint32write; - function varint32read() { - let b = this.buf[this.pos++]; - let result = b & 127; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 127) << 7; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 127) << 14; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 127) << 21; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 15) << 28; - for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) - b = this.buf[this.pos++]; - if ((b & 128) != 0) - throw new Error("invalid varint"); - this.assertBounds(); - return result >>> 0; - } - exports2.varint32read = varint32read; + exports2.logger = exports2.RestError = exports2.BaseRequestPolicy = exports2.StorageOAuthScopes = exports2.newPipeline = exports2.isPipelineLike = exports2.Pipeline = exports2.getBlobServiceAccountAudience = exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = exports2.generateBlobSASQueryParameters = exports2.generateAccountSASQueryParameters = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var core_rest_pipeline_1 = require_commonjs6(); + Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { + return core_rest_pipeline_1.RestError; + } }); + tslib_1.__exportStar(require_BlobServiceClient(), exports2); + tslib_1.__exportStar(require_Clients(), exports2); + tslib_1.__exportStar(require_ContainerClient(), exports2); + tslib_1.__exportStar(require_BlobLeaseClient(), exports2); + tslib_1.__exportStar(require_AccountSASPermissions(), exports2); + tslib_1.__exportStar(require_AccountSASResourceTypes(), exports2); + tslib_1.__exportStar(require_AccountSASServices(), exports2); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + Object.defineProperty(exports2, "generateAccountSASQueryParameters", { enumerable: true, get: function() { + return AccountSASSignatureValues_js_1.generateAccountSASQueryParameters; + } }); + tslib_1.__exportStar(require_BlobBatch(), exports2); + tslib_1.__exportStar(require_BlobBatchClient(), exports2); + tslib_1.__exportStar(require_BatchResponse(), exports2); + tslib_1.__exportStar(require_BlobSASPermissions(), exports2); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + Object.defineProperty(exports2, "generateBlobSASQueryParameters", { enumerable: true, get: function() { + return BlobSASSignatureValues_js_1.generateBlobSASQueryParameters; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory2(), exports2); + tslib_1.__exportStar(require_ContainerSASPermissions(), exports2); + tslib_1.__exportStar(require_AnonymousCredential(), exports2); + tslib_1.__exportStar(require_Credential(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential(), exports2); + var models_js_1 = require_models2(); + Object.defineProperty(exports2, "BlockBlobTier", { enumerable: true, get: function() { + return models_js_1.BlockBlobTier; + } }); + Object.defineProperty(exports2, "PremiumPageBlobTier", { enumerable: true, get: function() { + return models_js_1.PremiumPageBlobTier; + } }); + Object.defineProperty(exports2, "StorageBlobAudience", { enumerable: true, get: function() { + return models_js_1.StorageBlobAudience; + } }); + Object.defineProperty(exports2, "getBlobServiceAccountAudience", { enumerable: true, get: function() { + return models_js_1.getBlobServiceAccountAudience; + } }); + var Pipeline_js_1 = require_Pipeline(); + Object.defineProperty(exports2, "Pipeline", { enumerable: true, get: function() { + return Pipeline_js_1.Pipeline; + } }); + Object.defineProperty(exports2, "isPipelineLike", { enumerable: true, get: function() { + return Pipeline_js_1.isPipelineLike; + } }); + Object.defineProperty(exports2, "newPipeline", { enumerable: true, get: function() { + return Pipeline_js_1.newPipeline; + } }); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return Pipeline_js_1.StorageOAuthScopes; + } }); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy(), exports2); + tslib_1.__exportStar(require_CredentialPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy(), exports2); + tslib_1.__exportStar(require_SASQueryParameters(), exports2); + tslib_1.__exportStar(require_generatedModels(), exports2); + var log_js_1 = require_log5(); + Object.defineProperty(exports2, "logger", { enumerable: true, get: function() { + return log_js_1.logger; + } }); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js -var require_pb_long = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { +// node_modules/@actions/cache/lib/internal/shared/errors.js +var require_errors3 = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; - var goog_varint_1 = require_goog_varint(); - var BI; - function detectBi() { - const dv = new DataView(new ArrayBuffer(8)); - const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; - BI = ok ? { - MIN: BigInt("-9223372036854775808"), - MAX: BigInt("9223372036854775807"), - UMIN: BigInt("0"), - UMAX: BigInt("18446744073709551615"), - C: BigInt, - V: dv - } : void 0; - } - exports2.detectBi = detectBi; - detectBi(); - function assertBi(bi) { - if (!bi) - throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); - } - var RE_DECIMAL_STR = /^-?[0-9]+$/; - var TWO_PWR_32_DBL2 = 4294967296; - var HALF_2_PWR_32 = 2147483648; - var SharedPbLong = class { - /** - * Create a new instance with the given bits. - */ - constructor(lo, hi) { - this.lo = lo | 0; - this.hi = hi | 0; - } - /** - * Is this instance equal to 0? - */ - isZero() { - return this.lo == 0 && this.hi == 0; - } - /** - * Convert to a native number. - */ - toNumber() { - let result = this.hi * TWO_PWR_32_DBL2 + (this.lo >>> 0); - if (!Number.isSafeInteger(result)) - throw new Error("cannot convert to safe number"); - return result; + exports2.RateLimitError = exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; + var FilesNotFoundError = class extends Error { + constructor(files = []) { + let message = "No files were found to upload"; + if (files.length > 0) { + message += `: ${files.join(", ")}`; + } + super(message); + this.files = files; + this.name = "FilesNotFoundError"; } }; - var PbULong = class _PbULong extends SharedPbLong { - /** - * Create instance from a `string`, `number` or `bigint`. - */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.UMIN) - throw new Error("signed value for ulong"); - if (value > BI.UMAX) - throw new Error("ulong too large"); - BI.V.setBigUint64(0, value, true); - return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); - } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) - throw new Error("signed value for ulong"); - return new _PbULong(lo, hi); - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - if (value < 0) - throw new Error("signed value for ulong"); - return new _PbULong(value, value / TWO_PWR_32_DBL2); - } - throw new Error("unknown value " + typeof value); - } - /** - * Convert to decimal string. - */ - toString() { - return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); + exports2.FilesNotFoundError = FilesNotFoundError; + var InvalidResponseError = class extends Error { + constructor(message) { + super(message); + this.name = "InvalidResponseError"; } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigUint64(0, true); + }; + exports2.InvalidResponseError = InvalidResponseError; + var CacheNotFoundError = class extends Error { + constructor(message = "Cache not found") { + super(message); + this.name = "CacheNotFoundError"; } }; - exports2.PbULong = PbULong; - PbULong.ZERO = new PbULong(0, 0); - var PbLong = class _PbLong extends SharedPbLong { - /** - * Create instance from a `string`, `number` or `bigint`. - */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.MIN) - throw new Error("signed long too small"); - if (value > BI.MAX) - throw new Error("signed long too large"); - BI.V.setBigInt64(0, value, true); - return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); - } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) { - if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) - throw new Error("signed long too small"); - } else if (hi >= HALF_2_PWR_32) - throw new Error("signed long too large"); - let pbl = new _PbLong(lo, hi); - return minus ? pbl.negate() : pbl; - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL2) : new _PbLong(-value, -value / TWO_PWR_32_DBL2).negate(); - } - throw new Error("unknown value " + typeof value); - } - /** - * Do we have a minus sign? - */ - isNegative() { - return (this.hi & HALF_2_PWR_32) !== 0; + exports2.CacheNotFoundError = CacheNotFoundError; + var GHESNotSupportedError = class extends Error { + constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { + super(message); + this.name = "GHESNotSupportedError"; } - /** - * Negate two's complement. - * Invert all the bits and add one to the result. - */ - negate() { - let hi = ~this.hi, lo = this.lo; - if (lo) - lo = ~lo + 1; - else - hi += 1; - return new _PbLong(lo, hi); + }; + exports2.GHESNotSupportedError = GHESNotSupportedError; + var NetworkError = class extends Error { + constructor(code) { + const message = `Unable to make request: ${code} +If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; + super(message); + this.code = code; + this.name = "NetworkError"; } - /** - * Convert to decimal string. - */ - toString() { - if (BI) - return this.toBigInt().toString(); - if (this.isNegative()) { - let n = this.negate(); - return "-" + goog_varint_1.int64toString(n.lo, n.hi); - } - return goog_varint_1.int64toString(this.lo, this.hi); + }; + exports2.NetworkError = NetworkError; + NetworkError.isNetworkErrorCode = (code) => { + if (!code) + return false; + return [ + "ECONNRESET", + "ENOTFOUND", + "ETIMEDOUT", + "ECONNREFUSED", + "EHOSTUNREACH" + ].includes(code); + }; + var UsageError = class extends Error { + constructor() { + const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. +More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; + super(message); + this.name = "UsageError"; } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigInt64(0, true); + }; + exports2.UsageError = UsageError; + UsageError.isUsageErrorMessage = (msg) => { + if (!msg) + return false; + return msg.includes("insufficient usage"); + }; + var RateLimitError = class extends Error { + constructor(message) { + super(message); + this.name = "RateLimitError"; } }; - exports2.PbLong = PbLong; - PbLong.ZERO = new PbLong(0, 0); + exports2.RateLimitError = RateLimitError; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js -var require_binary_reader = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { +// node_modules/@actions/cache/lib/internal/uploadUtils.js +var require_uploadUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryReader = exports2.binaryReadOptions = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var defaultsRead = { - readUnknownField: true, - readerFactory: (bytes) => new BinaryReader(bytes) - }; - function binaryReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; - } - exports2.binaryReadOptions = binaryReadOptions; - var BinaryReader = class { - constructor(buf, textDecoder) { - this.varint64 = goog_varint_1.varint64read; - this.uint32 = goog_varint_1.varint32read; - this.buf = buf; - this.len = buf.length; - this.pos = 0; - this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); - this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { - fatal: true, - ignoreBOM: true - }); - } - /** - * Reads a tag - field number and wire type. - */ - tag() { - let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; - if (fieldNo <= 0 || wireType < 0 || wireType > 5) - throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); - return [fieldNo, wireType]; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - /** - * Skip one element on the wire and return the skipped data. - * Supports WireType.StartGroup since v2.0.0-alpha.23. - */ - skip(wireType) { - let start = this.pos; - switch (wireType) { - case binary_format_contract_1.WireType.Varint: - while (this.buf[this.pos++] & 128) { - } - break; - case binary_format_contract_1.WireType.Bit64: - this.pos += 4; - case binary_format_contract_1.WireType.Bit32: - this.pos += 4; - break; - case binary_format_contract_1.WireType.LengthDelimited: - let len = this.uint32(); - this.pos += len; - break; - case binary_format_contract_1.WireType.StartGroup: - let t; - while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { - this.skip(t); - } - break; - default: - throw new Error("cant skip wire type " + wireType); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - this.assertBounds(); - return this.buf.subarray(start, this.pos); - } - /** - * Throws error if position in byte array is out of range. - */ - assertBounds() { - if (this.pos > this.len) - throw new RangeError("premature EOF"); - } - /** - * Read a `int32` field, a signed 32 bit varint. - */ - int32() { - return this.uint32() | 0; - } - /** - * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. - */ - sint32() { - let zze = this.uint32(); - return zze >>> 1 ^ -(zze & 1); + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - /** - * Read a `int64` field, a signed 64-bit varint. - */ - int64() { - return new pb_long_1.PbLong(...this.varint64()); + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UploadProgress = void 0; + exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; + var core14 = __importStar2(require_core()); + var storage_blob_1 = require_commonjs15(); + var errors_1 = require_errors3(); + var UploadProgress = class { + constructor(contentLength) { + this.contentLength = contentLength; + this.sentBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Read a `uint64` field, an unsigned 64-bit varint. + * Sets the number of bytes sent + * + * @param sentBytes the number of bytes sent */ - uint64() { - return new pb_long_1.PbULong(...this.varint64()); + setSentBytes(sentBytes) { + this.sentBytes = sentBytes; } /** - * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. + * Returns the total number of bytes transferred. */ - sint64() { - let [lo, hi] = this.varint64(); - let s = -(lo & 1); - lo = (lo >>> 1 | (hi & 1) << 31) ^ s; - hi = hi >>> 1 ^ s; - return new pb_long_1.PbLong(lo, hi); + getTransferredBytes() { + return this.sentBytes; } /** - * Read a `bool` field, a variant. + * Returns true if the upload is complete. */ - bool() { - let [lo, hi] = this.varint64(); - return lo !== 0 || hi !== 0; + isDone() { + return this.getTransferredBytes() === this.contentLength; } /** - * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. + * Prints the current upload stats. Once the upload completes, this will print one + * last line and then stop. */ - fixed32() { - return this.view.getUint32((this.pos += 4) - 4, true); + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.sentBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } } /** - * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + * Returns a function used to handle TransferProgressEvents. */ - sfixed32() { - return this.view.getInt32((this.pos += 4) - 4, true); + onProgress() { + return (progress) => { + this.setSentBytes(progress.loadedBytes); + }; } /** - * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write */ - fixed64() { - return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + * Stops the timer that displays the stats. As this typically indicates the upload + * is complete, this will display one last line, unless the last line has already + * been written. */ - sfixed64() { - return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; + } + this.display(); } - /** - * Read a `float` field, 32-bit floating point number. - */ - float() { - return this.view.getFloat32((this.pos += 4) - 4, true); + }; + exports2.UploadProgress = UploadProgress; + function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + var _a; + const blobClient = new storage_blob_1.BlobClient(signedUploadURL); + const blockBlobClient = blobClient.getBlockBlobClient(); + const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); + const uploadOptions = { + blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, + concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, + // maximum number of parallel transfer workers + maxSingleShotSize: 128 * 1024 * 1024, + // 128 MiB initial transfer size + onProgress: uploadProgress.onProgress() + }; + try { + uploadProgress.startDisplayTimer(); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); + if (response._response.status >= 400) { + throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); + } + return response; + } catch (error3) { + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + throw error3; + } finally { + uploadProgress.stopDisplayTimer(); + } + }); + } + } +}); + +// node_modules/@actions/cache/lib/internal/requestUtils.js +var require_requestUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - /** - * Read a `double` field, a 64-bit floating point number. - */ - double() { - return this.view.getFloat64((this.pos += 8) - 8, true); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - /** - * Read a `bytes` field, length-delimited arbitrary data. - */ - bytes() { - let len = this.uint32(); - let start = this.pos; - this.pos += len; - this.assertBounds(); - return this.buf.subarray(start, start + len); + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isSuccessStatusCode = isSuccessStatusCode; + exports2.isServerErrorStatusCode = isServerErrorStatusCode; + exports2.isRetryableStatusCode = isRetryableStatusCode; + exports2.retry = retry2; + exports2.retryTypedResponse = retryTypedResponse; + exports2.retryHttpClientResponse = retryHttpClientResponse; + var core14 = __importStar2(require_core()); + var http_client_1 = require_lib(); + var constants_1 = require_constants12(); + function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; } - /** - * Read a `string` field, length-delimited data converted to UTF-8 text. - */ - string() { - return this.textDecoder.decode(this.bytes()); + return statusCode >= 200 && statusCode < 300; + } + function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; } - }; - exports2.BinaryReader = BinaryReader; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/assert.js -var require_assert = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; - function assert(condition, msg) { - if (!condition) { - throw new Error(msg); + return statusCode >= 500; + } + function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); } - exports2.assert = assert; - function assertNever2(value, msg) { - throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); + function sleep(milliseconds) { + return __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve6) => setTimeout(resolve6, milliseconds)); + }); } - exports2.assertNever = assertNever2; - var FLOAT32_MAX = 34028234663852886e22; - var FLOAT32_MIN = -34028234663852886e22; - var UINT32_MAX = 4294967295; - var INT32_MAX = 2147483647; - var INT32_MIN = -2147483648; - function assertInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid int 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) - throw new Error("invalid int 32: " + arg); + function retry2(name_1, method_1, getStatusCode_1) { + return __awaiter2(this, arguments, void 0, function* (name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay, onError = void 0) { + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = void 0; + let statusCode = void 0; + let isRetryable = false; + try { + response = yield method(); + } catch (error3) { + if (onError) { + response = onError(error3); + } + isRetryable = true; + errorMessage = error3.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core14.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay2); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); } - exports2.assertInt32 = assertInt32; - function assertUInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid uint 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) - throw new Error("invalid uint 32: " + arg); + function retryTypedResponse(name_1, method_1) { + return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { + return yield retry2( + name, + method, + (response) => response.statusCode, + maxAttempts, + delay2, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error3) => { + if (error3 instanceof http_client_1.HttpClientError) { + return { + statusCode: error3.statusCode, + result: null, + headers: {}, + error: error3 + }; + } else { + return void 0; + } + } + ); + }); } - exports2.assertUInt32 = assertUInt32; - function assertFloat32(arg) { - if (typeof arg !== "number") - throw new Error("invalid float 32: " + typeof arg); - if (!Number.isFinite(arg)) - return; - if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) - throw new Error("invalid float 32: " + arg); + function retryHttpClientResponse(name_1, method_1) { + return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { + return yield retry2(name, method, (response) => response.message.statusCode, maxAttempts, delay2); + }); } - exports2.assertFloat32 = assertFloat32; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js -var require_binary_writer = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { +// node_modules/@azure/abort-controller/dist/index.js +var require_dist4 = __commonJS({ + "node_modules/@azure/abort-controller/dist/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; - var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var assert_1 = require_assert(); - var defaultsWrite = { - writeUnknownFields: true, - writerFactory: () => new BinaryWriter() - }; - function binaryWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; - } - exports2.binaryWriteOptions = binaryWriteOptions; - var BinaryWriter = class { - constructor(textEncoder) { - this.stack = []; - this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); - this.chunks = []; - this.buf = []; + var listenersMap = /* @__PURE__ */ new WeakMap(); + var abortedMap = /* @__PURE__ */ new WeakMap(); + var AbortSignal2 = class _AbortSignal { + constructor() { + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); } /** - * Return all bytes written and reset this writer. + * Status of whether aborted or not. + * + * @readonly */ - finish() { - this.chunks.push(new Uint8Array(this.buf)); - let len = 0; - for (let i = 0; i < this.chunks.length; i++) - len += this.chunks[i].length; - let bytes = new Uint8Array(len); - let offset = 0; - for (let i = 0; i < this.chunks.length; i++) { - bytes.set(this.chunks[i], offset); - offset += this.chunks[i].length; + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - this.chunks = []; - return bytes; + return abortedMap.get(this); } /** - * Start a new fork for length-delimited data like a message - * or a packed repeated field. + * Creates a new AbortSignal instance that will never be aborted. * - * Must be joined later with `join()`. - */ - fork() { - this.stack.push({ chunks: this.chunks, buf: this.buf }); - this.chunks = []; - this.buf = []; - return this; - } - /** - * Join the last fork. Write its length and bytes, then - * return to the previous state. + * @readonly */ - join() { - let chunk = this.finish(); - let prev = this.stack.pop(); - if (!prev) - throw new Error("invalid state, fork stack empty"); - this.chunks = prev.chunks; - this.buf = prev.buf; - this.uint32(chunk.byteLength); - return this.raw(chunk); + static get none() { + return new _AbortSignal(); } /** - * Writes a tag (field number and wire type). - * - * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * Added new "abort" event listener, only support "abort" event. * - * Generated code should compute the tag ahead of time and call `uint32()`. + * @param _type - Only support "abort" event + * @param listener - The listener to be added */ - tag(fieldNo, type2) { - return this.uint32((fieldNo << 3 | type2) >>> 0); + addEventListener(_type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); } /** - * Write a chunk of raw bytes. + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed */ - raw(chunk) { - if (this.buf.length) { - this.chunks.push(new Uint8Array(this.buf)); - this.buf = []; + removeEventListener(_type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); } - this.chunks.push(chunk); - return this; } /** - * Write a `uint32` value, an unsigned 32 bit varint. + * Dispatches a synthetic event to the AbortSignal. */ - uint32(value) { - assert_1.assertUInt32(value); - while (value > 127) { - this.buf.push(value & 127 | 128); - value = value >>> 7; + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } + }; + function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); + } + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } + }; + var AbortController2 = class { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal2(); + if (!parentSignals) { + return; + } + if (!Array.isArray(parentSignals)) { + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + if (parentSignal.aborted) { + this.abort(); + } else { + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } } - this.buf.push(value); - return this; } /** - * Write a `int32` value, a signed 32 bit varint. + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly */ - int32(value) { - assert_1.assertInt32(value); - goog_varint_1.varint32write(value, this.buf); - return this; + get signal() { + return this._signal; } /** - * Write a `bool` value, a variant. + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. */ - bool(value) { - this.buf.push(value ? 1 : 0); - return this; + abort() { + abortSignal(this._signal); } /** - * Write a `bytes` value, length-delimited arbitrary data. + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. */ - bytes(value) { - this.uint32(value.byteLength); - return this.raw(value); + static timeout(ms) { + const signal = new AbortSignal2(); + const timer = setTimeout(abortSignal, ms, signal); + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; } - /** - * Write a `string` value, length-delimited data converted to UTF-8 text. - */ - string(value) { - let chunk = this.textEncoder.encode(value); - this.uint32(chunk.byteLength); - return this.raw(chunk); + }; + exports2.AbortController = AbortController2; + exports2.AbortError = AbortError; + exports2.AbortSignal = AbortSignal2; + } +}); + +// node_modules/@actions/cache/lib/internal/downloadUtils.js +var require_downloadUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - /** - * Write a `float` value, 32-bit floating point number. - */ - float(value) { - assert_1.assertFloat32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setFloat32(0, value, true); - return this.raw(chunk); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - /** - * Write a `double` value, a 64-bit floating point number. - */ - double(value) { - let chunk = new Uint8Array(8); - new DataView(chunk.buffer).setFloat64(0, value, true); - return this.raw(chunk); + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DownloadProgress = void 0; + exports2.downloadCacheHttpClient = downloadCacheHttpClient; + exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; + exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; + var core14 = __importStar2(require_core()); + var http_client_1 = require_lib(); + var storage_blob_1 = require_commonjs15(); + var buffer = __importStar2(require("buffer")); + var fs13 = __importStar2(require("fs")); + var stream2 = __importStar2(require("stream")); + var util = __importStar2(require("util")); + var utils = __importStar2(require_cacheUtils()); + var constants_1 = require_constants12(); + var requestUtils_1 = require_requestUtils(); + var abort_controller_1 = require_dist4(); + function pipeResponseToStream(response, output) { + return __awaiter2(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream2.pipeline); + yield pipeline(response.message, output); + }); + } + var DownloadProgress = class { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment */ - fixed32(value) { - assert_1.assertUInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setUint32(0, value, true); - return this.raw(chunk); + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** - * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received */ - sfixed32(value) { - assert_1.assertInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setInt32(0, value, true); - return this.raw(chunk); + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; } /** - * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + * Returns the total number of bytes transferred. */ - sint32(value) { - assert_1.assertInt32(value); - value = (value << 1 ^ value >> 31) >>> 0; - goog_varint_1.varint32write(value, this.buf); - return this; + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; } /** - * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + * Returns true if the download is complete. */ - sfixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbLong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + isDone() { + return this.getTransferredBytes() === this.contentLength; } - /** - * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. - */ - fixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbULong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } } /** - * Write a `int64` value, a signed 64-bit varint. + * Returns a function used to handle TransferProgressEvents. */ - int64(value) { - let long = pb_long_1.PbLong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; } /** - * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write */ - sint64(value) { - let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; - goog_varint_1.varint64write(lo, hi, this.buf); - return this; + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Write a `uint64` value, an unsigned 64-bit varint. + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. */ - uint64(value) { - let long = pb_long_1.PbULong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; + } + this.display(); } }; - exports2.BinaryWriter = BinaryWriter; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js -var require_json_format_contract = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; - var defaultsWrite = { - emitDefaultValues: false, - enumAsInteger: false, - useProtoFieldName: false, - prettySpaces: 0 - }; - var defaultsRead = { - ignoreUnknownFields: false - }; - function jsonReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; + exports2.DownloadProgress = DownloadProgress; + function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter2(this, void 0, void 0, function* () { + const writeStream = fs13.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient("actions/cache"); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation); + })); + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } else { + core14.debug("Unable to validate download, no Content-Length header"); + } + }); } - exports2.jsonReadOptions = jsonReadOptions; - function jsonWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + var _a; + const archiveDescriptor = yield fs13.promises.open(archivePath, "w"); + const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter2(this, void 0, void 0, function* () { + return yield httpClient.request("HEAD", archiveLocation, null, {}); + })); + const lengthHeader = res.message.headers["content-length"]; + if (lengthHeader === void 0 || lengthHeader === null) { + throw new Error("Content-Length not found on blob response"); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter2(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter2(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while (nextDownload = downloads.pop()) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); } - exports2.jsonWriteOptions = jsonWriteOptions; - function mergeJsonOptions(a, b) { - var _a, _b; - let c = Object.assign(Object.assign({}, a), b); - c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; - return c; + function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter2(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 3e4; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === "string") { + throw new Error("downloadSegmentRetry failed due to timeout"); + } + return result; + } catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); } - exports2.mergeJsonOptions = mergeJsonOptions; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js -var require_message_type_contract = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MESSAGE_TYPE = void 0; - exports2.MESSAGE_TYPE = /* @__PURE__ */ Symbol.for("protobuf-ts/message-type"); + function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter2(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter2(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); + } + function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + var _a; + const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + core14.debug("Unable to determine content length, downloading file with http-client..."); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } else { + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs13.openSync(archivePath, "w"); + try { + downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + })); + if (result === "timeout") { + controller.abort(); + throw new Error("Aborting cache download as the download time exceeded the timeout."); + } else if (Buffer.isBuffer(result)) { + fs13.writeFileSync(fd, result); + } + } + } finally { + downloadProgress.stopDisplayTimer(); + fs13.closeSync(fd); + } + } + }); + } + var promiseWithTimeout = (timeoutMs, promise) => __awaiter2(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise((resolve6) => { + timeoutHandle = setTimeout(() => resolve6("timeout"), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then((result) => { + clearTimeout(timeoutHandle); + return result; + }); + }); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js -var require_lower_camel_case = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { +// node_modules/@actions/cache/lib/options.js +var require_options = __commonJS({ + "node_modules/@actions/cache/lib/options.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.lowerCamelCase = void 0; - function lowerCamelCase(snakeCase) { - let capNext = false; - const sb = []; - for (let i = 0; i < snakeCase.length; i++) { - let next = snakeCase.charAt(i); - if (next == "_") { - capNext = true; - } else if (/\d/.test(next)) { - sb.push(next); - capNext = true; - } else if (capNext) { - sb.push(next.toUpperCase()); - capNext = false; - } else if (i == 0) { - sb.push(next.toLowerCase()); - } else { - sb.push(next); + exports2.getUploadOptions = getUploadOptions; + exports2.getDownloadOptions = getDownloadOptions; + var core14 = __importStar2(require_core()); + function getUploadOptions(copy) { + const result = { + useAzureSdk: false, + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.uploadConcurrency === "number") { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === "number") { + result.uploadChunkSize = copy.uploadChunkSize; } } - return sb.join(""); + result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; + result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; + } + function getDownloadOptions(copy) { + const result = { + useAzureSdk: false, + concurrentBlobDownloads: true, + downloadConcurrency: 8, + timeoutInMs: 3e4, + segmentTimeoutInMs: 6e5, + lookupOnly: false + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.concurrentBlobDownloads === "boolean") { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } + if (typeof copy.downloadConcurrency === "number") { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === "number") { + result.timeoutInMs = copy.timeoutInMs; + } + if (typeof copy.segmentTimeoutInMs === "number") { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + if (typeof copy.lookupOnly === "boolean") { + result.lookupOnly = copy.lookupOnly; + } + } + const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; + if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; + } + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); + return result; } - exports2.lowerCamelCase = lowerCamelCase; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js -var require_reflection_info = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { +// node_modules/@actions/cache/lib/internal/config.js +var require_config = __commonJS({ + "node_modules/@actions/cache/lib/internal/config.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; - var lower_camel_case_1 = require_lower_camel_case(); - var ScalarType; - (function(ScalarType2) { - ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; - ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; - ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; - ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; - ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; - ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; - ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; - ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; - ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; - ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; - ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; - ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; - ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; - ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; - ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; - })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); - var LongType; - (function(LongType2) { - LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; - LongType2[LongType2["STRING"] = 1] = "STRING"; - LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; - })(LongType = exports2.LongType || (exports2.LongType = {})); - var RepeatType; - (function(RepeatType2) { - RepeatType2[RepeatType2["NO"] = 0] = "NO"; - RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; - RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; - })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); - function normalizeFieldInfo(field) { - var _a, _b, _c, _d; - field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); - field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); - field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; - field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; - return field; + exports2.isGhes = isGhes; + exports2.getCacheServiceVersion = getCacheServiceVersion; + exports2.getCacheServiceURL = getCacheServiceURL; + function isGhes() { + const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === "GITHUB.COM"; + const isGheHost = hostname.endsWith(".GHE.COM"); + const isLocalHost = hostname.endsWith(".LOCALHOST"); + return !isGitHubHost && !isGheHost && !isLocalHost; } - exports2.normalizeFieldInfo = normalizeFieldInfo; - function readFieldOptions(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; + function getCacheServiceVersion() { + if (isGhes()) + return "v1"; + return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; } - exports2.readFieldOptions = readFieldOptions; - function readFieldOption(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return void 0; - } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + function getCacheServiceURL() { + const version = getCacheServiceVersion(); + switch (version) { + case "v1": + return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; + case "v2": + return process.env["ACTIONS_RESULTS_URL"] || ""; + default: + throw new Error(`Unsupported cache service version: ${version}`); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - exports2.readFieldOption = readFieldOption; - function readMessageOption(messageType, extensionName, extensionType) { - const options = messageType.options; - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + } +}); + +// node_modules/@actions/cache/package.json +var require_package2 = __commonJS({ + "node_modules/@actions/cache/package.json"(exports2, module2) { + module2.exports = { + name: "@actions/cache", + version: "5.0.5", + preview: true, + description: "Actions cache lib", + keywords: [ + "github", + "actions", + "cache" + ], + homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", + license: "MIT", + main: "lib/cache.js", + types: "lib/cache.d.ts", + directories: { + lib: "lib", + test: "__tests__" + }, + files: [ + "lib", + "!.DS_Store" + ], + publishConfig: { + access: "public" + }, + repository: { + type: "git", + url: "git+https://github.com/actions/toolkit.git", + directory: "packages/cache" + }, + scripts: { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + test: 'echo "Error: run tests from root" && exit 1', + tsc: "tsc" + }, + bugs: { + url: "https://github.com/actions/toolkit/issues" + }, + dependencies: { + "@actions/core": "^2.0.0", + "@actions/exec": "^2.0.0", + "@actions/glob": "^0.5.1", + "@protobuf-ts/runtime-rpc": "^2.11.1", + "@actions/http-client": "^3.0.2", + "@actions/io": "^2.0.0", + "@azure/abort-controller": "^1.1.0", + "@azure/core-rest-pipeline": "^1.22.0", + "@azure/storage-blob": "^12.29.1", + semver: "^6.3.1" + }, + devDependencies: { + "@types/node": "^24.1.0", + "@types/semver": "^6.0.0", + "@protobuf-ts/plugin": "^2.9.4", + typescript: "^5.2.2" + }, + overrides: { + "uri-js": "npm:uri-js-replace@^1.0.1", + "node-fetch": "^3.3.2" } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - exports2.readMessageOption = readMessageOption; + }; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js -var require_oneof = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { +// node_modules/@actions/cache/lib/internal/shared/user-agent.js +var require_user_agent = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; - function isOneofGroup(any) { - if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { - return false; - } - switch (typeof any.oneofKind) { - case "string": - if (any[any.oneofKind] === void 0) - return false; - return Object.keys(any).length == 2; - case "undefined": - return Object.keys(any).length == 1; - default: - return false; - } - } - exports2.isOneofGroup = isOneofGroup; - function getOneofValue(oneof, kind) { - return oneof[kind]; - } - exports2.getOneofValue = getOneofValue; - function setOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== void 0) { - oneof[kind] = value; - } - } - exports2.setOneofValue = setOneofValue; - function setUnknownOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== void 0 && kind !== void 0) { - oneof[kind] = value; - } - } - exports2.setUnknownOneofValue = setUnknownOneofValue; - function clearOneofValue(oneof) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = void 0; - } - exports2.clearOneofValue = clearOneofValue; - function getSelectedOneofValue(oneof) { - if (oneof.oneofKind === void 0) { - return void 0; - } - return oneof[oneof.oneofKind]; + exports2.getUserAgentString = getUserAgentString; + var packageJson = require_package2(); + function getUserAgentString() { + return `@actions/cache-${packageJson.version}`; } - exports2.getSelectedOneofValue = getSelectedOneofValue; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js -var require_reflection_type_check = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { +// node_modules/@actions/cache/lib/internal/cacheHttpClient.js +var require_cacheHttpClient = __commonJS({ + "node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionTypeCheck = void 0; - var reflection_info_1 = require_reflection_info(); - var oneof_1 = require_oneof(); - var ReflectionTypeCheck = class { - constructor(info6) { - var _a; - this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - prepare() { - if (this.data) - return; - const req = [], known = [], oneofs = []; - for (let field of this.fields) { - if (field.oneof) { - if (!oneofs.includes(field.oneof)) { - oneofs.push(field.oneof); - req.push(field.oneof); - known.push(field.oneof); - } - } else { - known.push(field.localName); - switch (field.kind) { - case "scalar": - case "enum": - if (!field.opt || field.repeat) - req.push(field.localName); - break; - case "message": - if (field.repeat) - req.push(field.localName); - break; - case "map": - req.push(field.localName); - break; - } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); + } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } } - this.data = { req, known, oneofs: Object.values(oneofs) }; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCacheEntry = getCacheEntry; + exports2.downloadCache = downloadCache; + exports2.reserveCache = reserveCache; + exports2.saveCache = saveCache3; + var core14 = __importStar2(require_core()); + var http_client_1 = require_lib(); + var auth_1 = require_auth(); + var fs13 = __importStar2(require("fs")); + var url_1 = require("url"); + var utils = __importStar2(require_cacheUtils()); + var uploadUtils_1 = require_uploadUtils(); + var downloadUtils_1 = require_downloadUtils(); + var options_1 = require_options(); + var requestUtils_1 = require_requestUtils(); + var config_1 = require_config(); + var user_agent_1 = require_user_agent(); + function getCacheApiUrl(resource) { + const baseUrl = (0, config_1.getCacheServiceURL)(); + if (!baseUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); } - /** - * Is the argument a valid message as specified by the - * reflection information? - * - * Checks all field types recursively. The `depth` - * specifies how deep into the structure the check will be. - * - * With a depth of 0, only the presence of fields - * is checked. - * - * With a depth of 1 or more, the field types are checked. - * - * With a depth of 2 or more, the members of map, repeated - * and message fields are checked. - * - * Message fields will be checked recursively with depth - 1. - * - * The number of map entries / repeated values being checked - * is < depth. - */ - is(message, depth, allowExcessProperties = false) { - if (depth < 0) - return true; - if (message === null || message === void 0 || typeof message != "object") - return false; - this.prepare(); - let keys = Object.keys(message), data = this.data; - if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) - return false; - if (!allowExcessProperties) { - if (keys.some((k) => !data.known.includes(k))) - return false; + const url2 = `${baseUrl}_apis/artifactcache/${resource}`; + core14.debug(`Resource Url: ${url2}`); + return url2; + } + function createAcceptHeader(type2, apiVersion) { + return `${type2};api-version=${apiVersion}`; + } + function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader("application/json", "6.0-preview.1") } - if (depth < 1) { - return true; + }; + return requestOptions; + } + function createHttpClient() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); + } + function getCacheEntry(keys, paths, options) { + return __awaiter2(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 204) { + if (core14.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } + return null; } - for (const name of data.oneofs) { - const group = message[name]; - if (!oneof_1.isOneofGroup(group)) - return false; - if (group.oneofKind === void 0) - continue; - const field = this.fields.find((f) => f.localName === group.oneofKind); - if (!field) - return false; - if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) - return false; + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); } - for (const field of this.fields) { - if (field.oneof !== void 0) - continue; - if (!this.field(message[field.localName], field, allowExcessProperties, depth)) - return false; + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error("Cache not found."); } - return true; - } - field(arg, field, allowExcessProperties, depth) { - let repeated = field.repeat; - switch (field.kind) { - case "scalar": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, field.T, depth, field.L); - return this.scalar(arg, field.T, field.L); - case "enum": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); - return this.scalar(arg, reflection_info_1.ScalarType.INT32); - case "message": - if (arg === void 0) - return true; - if (repeated) - return this.messages(arg, field.T(), allowExcessProperties, depth); - return this.message(arg, field.T(), allowExcessProperties, depth); - case "map": - if (typeof arg != "object" || arg === null) - return false; - if (depth < 2) - return true; - if (!this.mapKeys(arg, field.K, depth)) - return false; - switch (field.V.kind) { - case "scalar": - return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); - case "enum": - return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); - case "message": - return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); + } + function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter2(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key +Other caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } - break; - } - return true; - } - message(arg, type2, allowExcessProperties, depth) { - if (allowExcessProperties) { - return type2.isAssignable(arg, depth); + } } - return type2.is(arg, depth); - } - messages(arg, type2, allowExcessProperties, depth) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (allowExcessProperties) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type2.isAssignable(arg[i], depth - 1)) - return false; + }); + } + function downloadCache(archiveLocation, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = (0, options_1.getDownloadOptions)(options); + if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { + if (downloadOptions.useAzureSdk) { + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } else if (downloadOptions.concurrentBlobDownloads) { + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } else { + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } } else { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type2.is(arg[i], depth - 1)) - return false; + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } - return true; - } - scalar(arg, type2, longType) { - let argType = typeof arg; - switch (type2) { - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - switch (longType) { - case reflection_info_1.LongType.BIGINT: - return argType == "bigint"; - case reflection_info_1.LongType.NUMBER: - return argType == "number" && !isNaN(arg); - default: - return argType == "string"; - } - case reflection_info_1.ScalarType.BOOL: - return argType == "boolean"; - case reflection_info_1.ScalarType.STRING: - return argType == "string"; - case reflection_info_1.ScalarType.BYTES: - return arg instanceof Uint8Array; - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return argType == "number" && !isNaN(arg); - default: - return argType == "number" && Number.isInteger(arg); + }); + } + function reserveCache(key, paths, options) { + return __awaiter2(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const reserveCacheRequest = { + key, + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + })); + return response; + }); + } + function getContentRange(start, end) { + return `bytes ${start}-${end}/*`; + } + function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter2(this, void 0, void 0, function* () { + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) + }; + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); + })); + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } - } - scalars(arg, type2, depth, longType) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (Array.isArray(arg)) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!this.scalar(arg[i], type2, longType)) - return false; + }); + } + function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs13.openSync(archivePath, "r"); + const uploadOptions = (0, options_1.getUploadOptions)(options); + const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core14.debug("Awaiting all uploads"); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }).on("error", (error3) => { + throw new Error(`Cache upload failed because file read failed with ${error3.message}`); + }), start, end); + } + }))); + } finally { + fs13.closeSync(fd); } - return true; - } - mapKeys(map2, type2, depth) { - let keys = Object.keys(map2); - switch (type2) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type2, depth); - case reflection_info_1.ScalarType.BOOL: - return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type2, depth); - default: - return this.scalars(keys, type2, depth, reflection_info_1.LongType.STRING); + return; + }); + } + function commitCache(httpClient, cacheId, filesize) { + return __awaiter2(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); + } + function saveCache3(cacheId, archivePath, signedUploadURL, options) { + return __awaiter2(this, void 0, void 0, function* () { + const uploadOptions = (0, options_1.getUploadOptions)(options); + if (uploadOptions.useAzureSdk) { + if (!signedUploadURL) { + throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); + } + yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); + } else { + const httpClient = createHttpClient(); + core14.debug("Upload cache"); + yield uploadFile(httpClient, cacheId, archivePath, options); + core14.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core14.info("Cache saved successfully"); } - } - }; - exports2.ReflectionTypeCheck = ReflectionTypeCheck; + }); + } } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js -var require_reflection_long_convert = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js +var require_json_typings = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionLongConvert = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionLongConvert(long, type2) { - switch (type2) { - case reflection_info_1.LongType.BIGINT: - return long.toBigInt(); - case reflection_info_1.LongType.NUMBER: - return long.toNumber(); - default: - return long.toString(); + exports2.isJsonObject = exports2.typeofJsonValue = void 0; + function typeofJsonValue(value) { + let t = typeof value; + if (t == "object") { + if (Array.isArray(value)) + return "array"; + if (value === null) + return "null"; } + return t; } - exports2.reflectionLongConvert = reflectionLongConvert; + exports2.typeofJsonValue = typeofJsonValue; + function isJsonObject(value) { + return value !== null && typeof value == "object" && !Array.isArray(value); + } + exports2.isJsonObject = isJsonObject; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js -var require_reflection_json_reader = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/base64.js +var require_base642 = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonReader = void 0; - var json_typings_1 = require_json_typings(); - var base64_1 = require_base642(); - var reflection_info_1 = require_reflection_info(); - var pb_long_1 = require_pb_long(); - var assert_1 = require_assert(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var ReflectionJsonReader = class { - constructor(info6) { - this.info = info6; - } - prepare() { - var _a; - if (this.fMap === void 0) { - this.fMap = {}; - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - for (const field of fieldsInput) { - this.fMap[field.name] = field; - this.fMap[field.jsonName] = field; - this.fMap[field.localName] = field; + exports2.base64encode = exports2.base64decode = void 0; + var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); + var decTable = []; + for (let i = 0; i < encTable.length; i++) + decTable[encTable[i].charCodeAt(0)] = i; + decTable["-".charCodeAt(0)] = encTable.indexOf("+"); + decTable["_".charCodeAt(0)] = encTable.indexOf("/"); + function base64decode(base64Str) { + let es = base64Str.length * 3 / 4; + if (base64Str[base64Str.length - 2] == "=") + es -= 2; + else if (base64Str[base64Str.length - 1] == "=") + es -= 1; + let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; + for (let i = 0; i < base64Str.length; i++) { + b = decTable[base64Str.charCodeAt(i)]; + if (b === void 0) { + switch (base64Str[i]) { + case "=": + groupPos = 0; + // reset state when padding found + case "\n": + case "\r": + case " ": + case " ": + continue; + // skip white-space, and padding + default: + throw Error(`invalid base64 string.`); } } - } - // Cannot parse JSON for #. - assert(condition, fieldName, jsonValue) { - if (!condition) { - let what = json_typings_1.typeofJsonValue(jsonValue); - if (what == "number" || what == "boolean") - what = jsonValue.toString(); - throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); + switch (groupPos) { + case 0: + p = b; + groupPos = 1; + break; + case 1: + bytes[bytePos++] = p << 2 | (b & 48) >> 4; + p = b; + groupPos = 2; + break; + case 2: + bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; + p = b; + groupPos = 3; + break; + case 3: + bytes[bytePos++] = (p & 3) << 6 | b; + groupPos = 0; + break; } } - /** - * Reads a message from canonical JSON format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. - */ - read(input, message, options) { - this.prepare(); - const oneofsHandled = []; - for (const [jsonKey, jsonValue] of Object.entries(input)) { - const field = this.fMap[jsonKey]; - if (!field) { - if (!options.ignoreUnknownFields) - throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); - continue; - } - const localName = field.localName; - let target; - if (field.oneof) { - if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { - continue; - } - if (oneofsHandled.includes(field.oneof)) - throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); - oneofsHandled.push(field.oneof); - target = message[field.oneof] = { - oneofKind: localName - }; - } else { - target = message; - } - if (field.kind == "map") { - if (jsonValue === null) { - continue; - } - this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); - const fieldObj = target[localName]; - for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { - this.assert(jsonObjValue !== null, field.name + " map value", null); - let val; - switch (field.V.kind) { - case "message": - val = field.V.T().internalJsonRead(jsonObjValue, options); - break; - case "enum": - val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); - break; - } - this.assert(val !== void 0, field.name + " map value", jsonObjValue); - let key = jsonObjKey; - if (field.K == reflection_info_1.ScalarType.BOOL) - key = key == "true" ? true : key == "false" ? false : key; - key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); - fieldObj[key] = val; - } - } else if (field.repeat) { - if (jsonValue === null) - continue; - this.assert(Array.isArray(jsonValue), field.name, jsonValue); - const fieldArr = target[localName]; - for (const jsonItem of jsonValue) { - this.assert(jsonItem !== null, field.name, null); - let val; - switch (field.kind) { - case "message": - val = field.T().internalJsonRead(jsonItem, options); - break; - case "enum": - val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonItem, field.T, field.L, field.name); - break; - } - this.assert(val !== void 0, field.name, jsonValue); - fieldArr.push(val); - } - } else { - switch (field.kind) { - case "message": - if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { - this.assert(field.oneof === void 0, field.name + " (oneof member)", null); - continue; - } - target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); - break; - case "enum": - if (jsonValue === null) - continue; - let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - target[localName] = val; - break; - case "scalar": - if (jsonValue === null) - continue; - target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); - break; - } - } + if (groupPos == 1) + throw Error(`invalid base64 string.`); + return bytes.subarray(0, bytePos); + } + exports2.base64decode = base64decode; + function base64encode(bytes) { + let base64 = "", groupPos = 0, b, p = 0; + for (let i = 0; i < bytes.length; i++) { + b = bytes[i]; + switch (groupPos) { + case 0: + base64 += encTable[b >> 2]; + p = (b & 3) << 4; + groupPos = 1; + break; + case 1: + base64 += encTable[p | b >> 4]; + p = (b & 15) << 2; + groupPos = 2; + break; + case 2: + base64 += encTable[p | b >> 6]; + base64 += encTable[b & 63]; + groupPos = 0; + break; } } - /** - * Returns `false` for unrecognized string representations. - * - * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). - */ - enum(type2, json2, fieldName, ignoreUnknownFields) { - if (type2[0] == "google.protobuf.NullValue") - assert_1.assert(json2 === null || json2 === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} only accepts null.`); - if (json2 === null) - return 0; - switch (typeof json2) { - case "number": - assert_1.assert(Number.isInteger(json2), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json2}.`); - return json2; - case "string": - let localEnumName = json2; - if (type2[2] && json2.substring(0, type2[2].length) === type2[2]) - localEnumName = json2.substring(type2[2].length); - let enumNumber = type2[1][localEnumName]; - if (typeof enumNumber === "undefined" && ignoreUnknownFields) { - return false; - } - assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} has no value for "${json2}".`); - return enumNumber; + if (groupPos) { + base64 += encTable[p]; + base64 += "="; + if (groupPos == 1) + base64 += "="; + } + return base64; + } + exports2.base64encode = base64encode; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js +var require_protobufjs_utf8 = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.utf8read = void 0; + var fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); + function utf8read(bytes) { + if (bytes.length < 1) + return ""; + let pos = 0, parts = [], chunk = [], i = 0, t; + let len = bytes.length; + while (pos < len) { + t = bytes[pos++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; + chunk[i++] = 55296 + (t >> 10); + chunk[i++] = 56320 + (t & 1023); + } else + chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; + if (i > 8191) { + parts.push(fromCharCodes(chunk)); + i = 0; } - assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json2}".`); } - scalar(json2, type2, longType, fieldName) { - let e; - try { - switch (type2) { - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - if (json2 === null) - return 0; - if (json2 === "NaN") - return Number.NaN; - if (json2 === "Infinity") - return Number.POSITIVE_INFINITY; - if (json2 === "-Infinity") - return Number.NEGATIVE_INFINITY; - if (json2 === "") { - e = "empty string"; - break; - } - if (typeof json2 == "string" && json2.trim().length !== json2.length) { - e = "extra whitespace"; - break; - } - if (typeof json2 != "string" && typeof json2 != "number") { - break; - } - let float2 = Number(json2); - if (Number.isNaN(float2)) { - e = "not a number"; - break; - } - if (!Number.isFinite(float2)) { - e = "too large or small"; - break; - } - if (type2 == reflection_info_1.ScalarType.FLOAT) - assert_1.assertFloat32(float2); - return float2; - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - if (json2 === null) - return 0; - let int32; - if (typeof json2 == "number") - int32 = json2; - else if (json2 === "") - e = "empty string"; - else if (typeof json2 == "string") { - if (json2.trim().length !== json2.length) - e = "extra whitespace"; - else - int32 = Number(json2); - } - if (int32 === void 0) - break; - if (type2 == reflection_info_1.ScalarType.UINT32) - assert_1.assertUInt32(int32); - else - assert_1.assertInt32(int32); - return int32; - // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - if (json2 === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - if (typeof json2 != "number" && typeof json2 != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json2), longType); - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.UINT64: - if (json2 === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - if (typeof json2 != "number" && typeof json2 != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json2), longType); - // bool: - case reflection_info_1.ScalarType.BOOL: - if (json2 === null) - return false; - if (typeof json2 !== "boolean") - break; - return json2; - // string: - case reflection_info_1.ScalarType.STRING: - if (json2 === null) - return ""; - if (typeof json2 !== "string") { - e = "extra whitespace"; - break; - } - try { - encodeURIComponent(json2); - } catch (e2) { - e2 = "invalid UTF8"; - break; - } - return json2; - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - if (json2 === null || json2 === "") - return new Uint8Array(0); - if (typeof json2 !== "string") - break; - return base64_1.base64decode(json2); - } - } catch (error3) { - e = error3.message; + if (parts.length) { + if (i) + parts.push(fromCharCodes(chunk.slice(0, i))); + return parts.join(""); + } + return fromCharCodes(chunk.slice(0, i)); + } + exports2.utf8read = utf8read; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js +var require_binary_format_contract = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; + var UnknownFieldHandler; + (function(UnknownFieldHandler2) { + UnknownFieldHandler2.symbol = /* @__PURE__ */ Symbol.for("protobuf-ts/unknown"); + UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { + let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; + container.push({ no: fieldNo, wireType, data }); + }; + UnknownFieldHandler2.onWrite = (typeName, message, writer) => { + for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) + writer.tag(no, wireType).raw(data); + }; + UnknownFieldHandler2.list = (message, fieldNo) => { + if (is(message)) { + let all = message[UnknownFieldHandler2.symbol]; + return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; } - this.assert(false, fieldName + (e ? " - " + e : ""), json2); - } - }; - exports2.ReflectionJsonReader = ReflectionJsonReader; + return []; + }; + UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; + const is = (message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]); + })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); + function mergeBinaryOptions(a, b) { + return Object.assign(Object.assign({}, a), b); + } + exports2.mergeBinaryOptions = mergeBinaryOptions; + var WireType; + (function(WireType2) { + WireType2[WireType2["Varint"] = 0] = "Varint"; + WireType2[WireType2["Bit64"] = 1] = "Bit64"; + WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; + WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; + WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; + WireType2[WireType2["Bit32"] = 5] = "Bit32"; + })(WireType = exports2.WireType || (exports2.WireType = {})); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js -var require_reflection_json_writer = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js +var require_goog_varint = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonWriter = void 0; - var base64_1 = require_base642(); - var pb_long_1 = require_pb_long(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var ReflectionJsonWriter = class { - constructor(info6) { - var _a; - this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; + exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; + function varint64read() { + let lowBits = 0; + let highBits = 0; + for (let shift = 0; shift < 28; shift += 7) { + let b = this.buf[this.pos++]; + lowBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } } - /** - * Converts the message to a JSON object, based on the field descriptors. - */ - write(message, options) { - const json2 = {}, source = message; - for (const field of this.fields) { - if (!field.oneof) { - let jsonValue2 = this.field(field, source[field.localName], options); - if (jsonValue2 !== void 0) - json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; - continue; - } - const group = source[field.oneof]; - if (group.oneofKind !== field.localName) - continue; - const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; - let jsonValue = this.field(field, group[field.localName], opt); - assert_1.assert(jsonValue !== void 0); - json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; + let middleByte = this.buf[this.pos++]; + lowBits |= (middleByte & 15) << 28; + highBits = (middleByte & 112) >> 4; + if ((middleByte & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } + for (let shift = 3; shift <= 31; shift += 7) { + let b = this.buf[this.pos++]; + highBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } - return json2; } - field(field, value, options) { - let jsonValue = void 0; - if (field.kind == "map") { - assert_1.assert(typeof value == "object" && value !== null); - const jsonObj = {}; - switch (field.V.kind) { - case "scalar": - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.scalar(field.V.T, entryValue, field.name, false, true); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - case "message": - const messageType = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.message(messageType, entryValue, field.name, options); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - case "enum": - const enumInfo = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); - const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - } - if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) - jsonValue = jsonObj; - } else if (field.repeat) { - assert_1.assert(Array.isArray(value)); - const jsonArr = []; - switch (field.kind) { - case "scalar": - for (let i = 0; i < value.length; i++) { - const val = this.scalar(field.T, value[i], field.name, field.opt, true); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - case "enum": - const enumInfo = field.T(); - for (let i = 0; i < value.length; i++) { - assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); - const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - case "message": - const messageType = field.T(); - for (let i = 0; i < value.length; i++) { - const val = this.message(messageType, value[i], field.name, options); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - } - if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) - jsonValue = jsonArr; - } else { - switch (field.kind) { - case "scalar": - jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); - break; - case "enum": - jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); - break; - case "message": - jsonValue = this.message(field.T(), value, field.name, options); - break; - } + throw new Error("invalid varint"); + } + exports2.varint64read = varint64read; + function varint64write(lo, hi, bytes) { + for (let i = 0; i < 28; i = i + 7) { + const shift = lo >>> i; + const hasNext = !(shift >>> 7 == 0 && hi == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; } - return jsonValue; } - /** - * Returns `null` as the default for google.protobuf.NullValue. - */ - enum(type2, value, fieldName, optional, emitDefaultValues, enumAsInteger) { - if (type2[0] == "google.protobuf.NullValue") - return !emitDefaultValues && !optional ? void 0 : null; - if (value === void 0) { - assert_1.assert(optional); - return void 0; + const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; + const hasMoreBits = !(hi >> 3 == 0); + bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); + if (!hasMoreBits) { + return; + } + for (let i = 3; i < 31; i = i + 7) { + const shift = hi >>> i; + const hasNext = !(shift >>> 7 == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; } - if (value === 0 && !emitDefaultValues && !optional) - return void 0; - assert_1.assert(typeof value == "number"); - assert_1.assert(Number.isInteger(value)); - if (enumAsInteger || !type2[1].hasOwnProperty(value)) - return value; - if (type2[2]) - return type2[2] + type2[1][value]; - return type2[1][value]; } - message(type2, value, fieldName, options) { - if (value === void 0) - return options.emitDefaultValues ? null : void 0; - return type2.internalJsonWrite(value, options); + bytes.push(hi >>> 31 & 1); + } + exports2.varint64write = varint64write; + var TWO_PWR_32_DBL2 = (1 << 16) * (1 << 16); + function int64fromString(dec) { + let minus = dec[0] == "-"; + if (minus) + dec = dec.slice(1); + const base = 1e6; + let lowBits = 0; + let highBits = 0; + function add1e6digit(begin, end) { + const digit1e6 = Number(dec.slice(begin, end)); + highBits *= base; + lowBits = lowBits * base + digit1e6; + if (lowBits >= TWO_PWR_32_DBL2) { + highBits = highBits + (lowBits / TWO_PWR_32_DBL2 | 0); + lowBits = lowBits % TWO_PWR_32_DBL2; + } } - scalar(type2, value, fieldName, optional, emitDefaultValues) { - if (value === void 0) { - assert_1.assert(optional); - return void 0; + add1e6digit(-24, -18); + add1e6digit(-18, -12); + add1e6digit(-12, -6); + add1e6digit(-6); + return [minus, lowBits, highBits]; + } + exports2.int64fromString = int64fromString; + function int64toString(bitsLow, bitsHigh) { + if (bitsHigh >>> 0 <= 2097151) { + return "" + (TWO_PWR_32_DBL2 * bitsHigh + (bitsLow >>> 0)); + } + let low = bitsLow & 16777215; + let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; + let high = bitsHigh >> 16 & 65535; + let digitA = low + mid * 6777216 + high * 6710656; + let digitB = mid + high * 8147497; + let digitC = high * 2; + let base = 1e7; + if (digitA >= base) { + digitB += Math.floor(digitA / base); + digitA %= base; + } + if (digitB >= base) { + digitC += Math.floor(digitB / base); + digitB %= base; + } + function decimalFrom1e7(digit1e7, needLeadingZeros) { + let partial = digit1e7 ? String(digit1e7) : ""; + if (needLeadingZeros) { + return "0000000".slice(partial.length) + partial; } - const ed = emitDefaultValues || optional; - switch (type2) { - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertInt32(value); - return value; - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertUInt32(value); - return value; - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.FLOAT: - assert_1.assertFloat32(value); - case reflection_info_1.ScalarType.DOUBLE: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assert(typeof value == "number"); - if (Number.isNaN(value)) - return "NaN"; - if (value === Number.POSITIVE_INFINITY) - return "Infinity"; - if (value === Number.NEGATIVE_INFINITY) - return "-Infinity"; - return value; - // string: - case reflection_info_1.ScalarType.STRING: - if (value === "") - return ed ? "" : void 0; - assert_1.assert(typeof value == "string"); - return value; - // bool: - case reflection_info_1.ScalarType.BOOL: - if (value === false) - return ed ? false : void 0; - assert_1.assert(typeof value == "boolean"); - return value; - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let ulong = pb_long_1.PbULong.from(value); - if (ulong.isZero() && !ed) - return void 0; - return ulong.toString(); - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let long = pb_long_1.PbLong.from(value); - if (long.isZero() && !ed) - return void 0; - return long.toString(); - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - assert_1.assert(value instanceof Uint8Array); - if (!value.byteLength) - return ed ? "" : void 0; - return base64_1.base64encode(value); + return partial; + } + return decimalFrom1e7( + digitC, + /*needLeadingZeros=*/ + 0 + ) + decimalFrom1e7( + digitB, + /*needLeadingZeros=*/ + digitC + ) + // If the final 1e7 digit didn't need leading zeros, we would have + // returned via the trivial code path at the top. + decimalFrom1e7( + digitA, + /*needLeadingZeros=*/ + 1 + ); + } + exports2.int64toString = int64toString; + function varint32write(value, bytes) { + if (value >= 0) { + while (value > 127) { + bytes.push(value & 127 | 128); + value = value >>> 7; + } + bytes.push(value); + } else { + for (let i = 0; i < 9; i++) { + bytes.push(value & 127 | 128); + value = value >> 7; } + bytes.push(1); } - }; - exports2.ReflectionJsonWriter = ReflectionJsonWriter; + } + exports2.varint32write = varint32write; + function varint32read() { + let b = this.buf[this.pos++]; + let result = b & 127; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 7; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 14; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 21; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 15) << 28; + for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) + b = this.buf[this.pos++]; + if ((b & 128) != 0) + throw new Error("invalid varint"); + this.assertBounds(); + return result >>> 0; + } + exports2.varint32read = varint32read; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js -var require_reflection_scalar_default = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js +var require_pb_long = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionScalarDefault = void 0; - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var pb_long_1 = require_pb_long(); - function reflectionScalarDefault(type2, longType = reflection_info_1.LongType.STRING) { - switch (type2) { - case reflection_info_1.ScalarType.BOOL: - return false; - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return 0; - case reflection_info_1.ScalarType.BYTES: - return new Uint8Array(0); - case reflection_info_1.ScalarType.STRING: - return ""; - default: - return 0; + exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; + var goog_varint_1 = require_goog_varint(); + var BI; + function detectBi() { + const dv = new DataView(new ArrayBuffer(8)); + const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; + BI = ok ? { + MIN: BigInt("-9223372036854775808"), + MAX: BigInt("9223372036854775807"), + UMIN: BigInt("0"), + UMAX: BigInt("18446744073709551615"), + C: BigInt, + V: dv + } : void 0; + } + exports2.detectBi = detectBi; + detectBi(); + function assertBi(bi) { + if (!bi) + throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); + } + var RE_DECIMAL_STR = /^-?[0-9]+$/; + var TWO_PWR_32_DBL2 = 4294967296; + var HALF_2_PWR_32 = 2147483648; + var SharedPbLong = class { + /** + * Create a new instance with the given bits. + */ + constructor(lo, hi) { + this.lo = lo | 0; + this.hi = hi | 0; + } + /** + * Is this instance equal to 0? + */ + isZero() { + return this.lo == 0 && this.hi == 0; + } + /** + * Convert to a native number. + */ + toNumber() { + let result = this.hi * TWO_PWR_32_DBL2 + (this.lo >>> 0); + if (!Number.isSafeInteger(result)) + throw new Error("cannot convert to safe number"); + return result; + } + }; + var PbULong = class _PbULong extends SharedPbLong { + /** + * Create instance from a `string`, `number` or `bigint`. + */ + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.UMIN) + throw new Error("signed value for ulong"); + if (value > BI.UMAX) + throw new Error("ulong too large"); + BI.V.setBigUint64(0, value, true); + return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) + throw new Error("signed value for ulong"); + return new _PbULong(lo, hi); + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + if (value < 0) + throw new Error("signed value for ulong"); + return new _PbULong(value, value / TWO_PWR_32_DBL2); + } + throw new Error("unknown value " + typeof value); } - } - exports2.reflectionScalarDefault = reflectionScalarDefault; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js -var require_reflection_binary_reader = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryReader = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var ReflectionBinaryReader = class { - constructor(info6) { - this.info = info6; + /** + * Convert to decimal string. + */ + toString() { + return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); } - prepare() { - var _a; - if (!this.fieldNoToField) { - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); - } + /** + * Convert to native bigint. + */ + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigUint64(0, true); } + }; + exports2.PbULong = PbULong; + PbULong.ZERO = new PbULong(0, 0); + var PbLong = class _PbLong extends SharedPbLong { /** - * Reads a message from binary format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. + * Create instance from a `string`, `number` or `bigint`. */ - read(reader, message, options, length) { - this.prepare(); - const end = length === void 0 ? reader.len : reader.pos + length; - while (reader.pos < end) { - const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); - if (!field) { - let u = options.readUnknownField; - if (u == "throw") - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); - continue; - } - let target = message, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - target = target[field.oneof]; - if (target.oneofKind !== localName) - target = message[field.oneof] = { - oneofKind: localName - }; + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.MIN) + throw new Error("signed long too small"); + if (value > BI.MAX) + throw new Error("signed long too large"); + BI.V.setBigInt64(0, value, true); + return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - let L = field.kind == "scalar" ? field.L : void 0; - if (repeated) { - let arr = target[localName]; - if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { - let e = reader.uint32() + reader.pos; - while (reader.pos < e) - arr.push(this.scalar(reader, T, L)); - } else - arr.push(this.scalar(reader, T, L)); - } else - target[localName] = this.scalar(reader, T, L); - break; - case "message": - if (repeated) { - let arr = target[localName]; - let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); - arr.push(msg); - } else - target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); - break; - case "map": - let [mapKey, mapVal] = this.mapEntry(field, reader, options); - target[localName][mapKey] = mapVal; - break; + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) { + if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) + throw new Error("signed long too small"); + } else if (hi >= HALF_2_PWR_32) + throw new Error("signed long too large"); + let pbl = new _PbLong(lo, hi); + return minus ? pbl.negate() : pbl; + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL2) : new _PbLong(-value, -value / TWO_PWR_32_DBL2).negate(); } - } + throw new Error("unknown value " + typeof value); } /** - * Read a map field, expecting key field = 1, value field = 2 + * Do we have a minus sign? */ - mapEntry(field, reader, options) { - let length = reader.uint32(); - let end = reader.pos + length; - let key = void 0; - let val = void 0; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case 1: - if (field.K == reflection_info_1.ScalarType.BOOL) - key = reader.bool().toString(); - else - key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); - break; - case 2: - switch (field.V.kind) { - case "scalar": - val = this.scalar(reader, field.V.T, field.V.L); - break; - case "enum": - val = reader.int32(); - break; - case "message": - val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); - break; - } - break; - default: - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); - } - } - if (key === void 0) { - let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); - key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; - } - if (val === void 0) - switch (field.V.kind) { - case "scalar": - val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); - break; - case "enum": - val = 0; - break; - case "message": - val = field.V.T().create(); - break; - } - return [key, val]; + isNegative() { + return (this.hi & HALF_2_PWR_32) !== 0; } - scalar(reader, type2, longType) { - switch (type2) { - case reflection_info_1.ScalarType.INT32: - return reader.int32(); - case reflection_info_1.ScalarType.STRING: - return reader.string(); - case reflection_info_1.ScalarType.BOOL: - return reader.bool(); - case reflection_info_1.ScalarType.DOUBLE: - return reader.double(); - case reflection_info_1.ScalarType.FLOAT: - return reader.float(); - case reflection_info_1.ScalarType.INT64: - return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); - case reflection_info_1.ScalarType.UINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); - case reflection_info_1.ScalarType.FIXED32: - return reader.fixed32(); - case reflection_info_1.ScalarType.BYTES: - return reader.bytes(); - case reflection_info_1.ScalarType.UINT32: - return reader.uint32(); - case reflection_info_1.ScalarType.SFIXED32: - return reader.sfixed32(); - case reflection_info_1.ScalarType.SFIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); - case reflection_info_1.ScalarType.SINT32: - return reader.sint32(); - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); + /** + * Negate two's complement. + * Invert all the bits and add one to the result. + */ + negate() { + let hi = ~this.hi, lo = this.lo; + if (lo) + lo = ~lo + 1; + else + hi += 1; + return new _PbLong(lo, hi); + } + /** + * Convert to decimal string. + */ + toString() { + if (BI) + return this.toBigInt().toString(); + if (this.isNegative()) { + let n = this.negate(); + return "-" + goog_varint_1.int64toString(n.lo, n.hi); } + return goog_varint_1.int64toString(this.lo, this.hi); + } + /** + * Convert to native bigint. + */ + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigInt64(0, true); } }; - exports2.ReflectionBinaryReader = ReflectionBinaryReader; + exports2.PbLong = PbLong; + PbLong.ZERO = new PbLong(0, 0); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js -var require_reflection_binary_writer = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js +var require_binary_reader = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryWriter = void 0; + exports2.BinaryReader = exports2.binaryReadOptions = void 0; var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); var pb_long_1 = require_pb_long(); - var ReflectionBinaryWriter = class { - constructor(info6) { - this.info = info6; - } - prepare() { - if (!this.fields) { - const fieldsInput = this.info.fields ? this.info.fields.concat() : []; - this.fields = fieldsInput.sort((a, b) => a.no - b.no); - } + var goog_varint_1 = require_goog_varint(); + var defaultsRead = { + readUnknownField: true, + readerFactory: (bytes) => new BinaryReader(bytes) + }; + function binaryReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; + } + exports2.binaryReadOptions = binaryReadOptions; + var BinaryReader = class { + constructor(buf, textDecoder) { + this.varint64 = goog_varint_1.varint64read; + this.uint32 = goog_varint_1.varint32read; + this.buf = buf; + this.len = buf.length; + this.pos = 0; + this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { + fatal: true, + ignoreBOM: true + }); } /** - * Writes the message to binary format. + * Reads a tag - field number and wire type. */ - write(message, writer, options) { - this.prepare(); - for (const field of this.fields) { - let value, emitDefault, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - const group = message[field.oneof]; - if (group.oneofKind !== localName) - continue; - value = group[localName]; - emitDefault = true; - } else { - value = message[localName]; - emitDefault = false; - } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (repeated) { - assert_1.assert(Array.isArray(value)); - if (repeated == reflection_info_1.RepeatType.PACKED) - this.packed(writer, T, field.no, value); - else - for (const item of value) - this.scalar(writer, T, field.no, item, true); - } else if (value === void 0) - assert_1.assert(field.opt); - else - this.scalar(writer, T, field.no, value, emitDefault || field.opt); - break; - case "message": - if (repeated) { - assert_1.assert(Array.isArray(value)); - for (const item of value) - this.message(writer, options, field.T(), field.no, item); - } else { - this.message(writer, options, field.T(), field.no, value); - } - break; - case "map": - assert_1.assert(typeof value == "object" && value !== null); - for (const [key, val] of Object.entries(value)) - this.mapEntry(writer, options, field, key, val); - break; - } - } - let u = options.writeUnknownFields; - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); + tag() { + let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; + if (fieldNo <= 0 || wireType < 0 || wireType > 5) + throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); + return [fieldNo, wireType]; } - mapEntry(writer, options, field, key, value) { - writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let keyValue = key; - switch (field.K) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - keyValue = Number.parseInt(key); - break; - case reflection_info_1.ScalarType.BOOL: - assert_1.assert(key == "true" || key == "false"); - keyValue = key == "true"; + /** + * Skip one element on the wire and return the skipped data. + * Supports WireType.StartGroup since v2.0.0-alpha.23. + */ + skip(wireType) { + let start = this.pos; + switch (wireType) { + case binary_format_contract_1.WireType.Varint: + while (this.buf[this.pos++] & 128) { + } break; - } - this.scalar(writer, field.K, 1, keyValue, true); - switch (field.V.kind) { - case "scalar": - this.scalar(writer, field.V.T, 2, value, true); + case binary_format_contract_1.WireType.Bit64: + this.pos += 4; + case binary_format_contract_1.WireType.Bit32: + this.pos += 4; break; - case "enum": - this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); + case binary_format_contract_1.WireType.LengthDelimited: + let len = this.uint32(); + this.pos += len; break; - case "message": - this.message(writer, options, field.V.T(), 2, value); + case binary_format_contract_1.WireType.StartGroup: + let t; + while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { + this.skip(t); + } break; + default: + throw new Error("cant skip wire type " + wireType); } - writer.join(); + this.assertBounds(); + return this.buf.subarray(start, this.pos); } - message(writer, options, handler2, fieldNo, value) { - if (value === void 0) - return; - handler2.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); - writer.join(); + /** + * Throws error if position in byte array is out of range. + */ + assertBounds() { + if (this.pos > this.len) + throw new RangeError("premature EOF"); } /** - * Write a single scalar value. + * Read a `int32` field, a signed 32 bit varint. + */ + int32() { + return this.uint32() | 0; + } + /** + * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. + */ + sint32() { + let zze = this.uint32(); + return zze >>> 1 ^ -(zze & 1); + } + /** + * Read a `int64` field, a signed 64-bit varint. + */ + int64() { + return new pb_long_1.PbLong(...this.varint64()); + } + /** + * Read a `uint64` field, an unsigned 64-bit varint. + */ + uint64() { + return new pb_long_1.PbULong(...this.varint64()); + } + /** + * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. + */ + sint64() { + let [lo, hi] = this.varint64(); + let s = -(lo & 1); + lo = (lo >>> 1 | (hi & 1) << 31) ^ s; + hi = hi >>> 1 ^ s; + return new pb_long_1.PbLong(lo, hi); + } + /** + * Read a `bool` field, a variant. + */ + bool() { + let [lo, hi] = this.varint64(); + return lo !== 0 || hi !== 0; + } + /** + * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. + */ + fixed32() { + return this.view.getUint32((this.pos += 4) - 4, true); + } + /** + * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + */ + sfixed32() { + return this.view.getInt32((this.pos += 4) - 4, true); + } + /** + * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + */ + fixed64() { + return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + */ + sfixed64() { + return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `float` field, 32-bit floating point number. + */ + float() { + return this.view.getFloat32((this.pos += 4) - 4, true); + } + /** + * Read a `double` field, a 64-bit floating point number. */ - scalar(writer, type2, fieldNo, value, emitDefault) { - let [wireType, method, isDefault] = this.scalarInfo(type2, value); - if (!isDefault || emitDefault) { - writer.tag(fieldNo, wireType); - writer[method](value); - } + double() { + return this.view.getFloat64((this.pos += 8) - 8, true); } /** - * Write an array of scalar values in packed format. + * Read a `bytes` field, length-delimited arbitrary data. */ - packed(writer, type2, fieldNo, value) { - if (!value.length) - return; - assert_1.assert(type2 !== reflection_info_1.ScalarType.BYTES && type2 !== reflection_info_1.ScalarType.STRING); - writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let [, method] = this.scalarInfo(type2); - for (let i = 0; i < value.length; i++) - writer[method](value[i]); - writer.join(); + bytes() { + let len = this.uint32(); + let start = this.pos; + this.pos += len; + this.assertBounds(); + return this.buf.subarray(start, start + len); } /** - * Get information for writing a scalar value. - * - * Returns tuple: - * [0]: appropriate WireType - * [1]: name of the appropriate method of IBinaryWriter - * [2]: whether the given value is a default value - * - * If argument `value` is omitted, [2] is always false. + * Read a `string` field, length-delimited data converted to UTF-8 text. */ - scalarInfo(type2, value) { - let t = binary_format_contract_1.WireType.Varint; - let m; - let i = value === void 0; - let d = value === 0; - switch (type2) { - case reflection_info_1.ScalarType.INT32: - m = "int32"; - break; - case reflection_info_1.ScalarType.STRING: - d = i || !value.length; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "string"; - break; - case reflection_info_1.ScalarType.BOOL: - d = value === false; - m = "bool"; - break; - case reflection_info_1.ScalarType.UINT32: - m = "uint32"; - break; - case reflection_info_1.ScalarType.DOUBLE: - t = binary_format_contract_1.WireType.Bit64; - m = "double"; - break; - case reflection_info_1.ScalarType.FLOAT: - t = binary_format_contract_1.WireType.Bit32; - m = "float"; - break; - case reflection_info_1.ScalarType.INT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "int64"; - break; - case reflection_info_1.ScalarType.UINT64: - d = i || pb_long_1.PbULong.from(value).isZero(); - m = "uint64"; - break; - case reflection_info_1.ScalarType.FIXED64: - d = i || pb_long_1.PbULong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "fixed64"; - break; - case reflection_info_1.ScalarType.BYTES: - d = i || !value.byteLength; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "bytes"; - break; - case reflection_info_1.ScalarType.FIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "fixed32"; - break; - case reflection_info_1.ScalarType.SFIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "sfixed32"; - break; - case reflection_info_1.ScalarType.SFIXED64: - d = i || pb_long_1.PbLong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "sfixed64"; - break; - case reflection_info_1.ScalarType.SINT32: - m = "sint32"; - break; - case reflection_info_1.ScalarType.SINT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "sint64"; - break; - } - return [t, m, i || d]; + string() { + return this.textDecoder.decode(this.bytes()); } }; - exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js -var require_reflection_create = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionCreate = void 0; - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var message_type_contract_1 = require_message_type_contract(); - function reflectionCreate(type2) { - const msg = type2.messagePrototype ? Object.create(type2.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type2 }); - for (let field of type2.fields) { - let name = field.localName; - if (field.opt) - continue; - if (field.oneof) - msg[field.oneof] = { oneofKind: void 0 }; - else if (field.repeat) - msg[name] = []; - else - switch (field.kind) { - case "scalar": - msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); - break; - case "enum": - msg[name] = 0; - break; - case "map": - msg[name] = {}; - break; - } - } - return msg; - } - exports2.reflectionCreate = reflectionCreate; + exports2.BinaryReader = BinaryReader; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js -var require_reflection_merge_partial = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/assert.js +var require_assert = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionMergePartial = void 0; - function reflectionMergePartial(info6, target, source) { - let fieldValue, input = source, output; - for (let field of info6.fields) { - let name = field.localName; - if (field.oneof) { - const group = input[field.oneof]; - if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { - continue; - } - fieldValue = group[name]; - output = target[field.oneof]; - output.oneofKind = group.oneofKind; - if (fieldValue == void 0) { - delete output[name]; - continue; - } - } else { - fieldValue = input[name]; - output = target; - if (fieldValue == void 0) { - continue; - } - } - if (field.repeat) - output[name].length = fieldValue.length; - switch (field.kind) { - case "scalar": - case "enum": - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = fieldValue[i]; - else - output[name] = fieldValue; - break; - case "message": - let T = field.T(); - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = T.create(fieldValue[i]); - else if (output[name] === void 0) - output[name] = T.create(fieldValue); - else - T.mergePartial(output[name], fieldValue); - break; - case "map": - switch (field.V.kind) { - case "scalar": - case "enum": - Object.assign(output[name], fieldValue); - break; - case "message": - let T2 = field.V.T(); - for (let k of Object.keys(fieldValue)) - output[name][k] = T2.create(fieldValue[k]); - break; - } - break; - } + exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; + function assert(condition, msg) { + if (!condition) { + throw new Error(msg); } } - exports2.reflectionMergePartial = reflectionMergePartial; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js -var require_reflection_equals = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionEquals = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionEquals(info6, a, b) { - if (a === b) - return true; - if (!a || !b) - return false; - for (let field of info6.fields) { - let localName = field.localName; - let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; - let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; - switch (field.kind) { - case "enum": - case "scalar": - let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) - return false; - break; - case "map": - if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) - return false; - break; - case "message": - let T = field.T(); - if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) - return false; - break; - } - } - return true; + exports2.assert = assert; + function assertNever2(value, msg) { + throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); } - exports2.reflectionEquals = reflectionEquals; - var objectValues = Object.values; - function primitiveEq(type2, a, b) { - if (a === b) - return true; - if (type2 !== reflection_info_1.ScalarType.BYTES) - return false; - let ba = a; - let bb = b; - if (ba.length !== bb.length) - return false; - for (let i = 0; i < ba.length; i++) - if (ba[i] != bb[i]) - return false; - return true; + exports2.assertNever = assertNever2; + var FLOAT32_MAX = 34028234663852886e22; + var FLOAT32_MIN = -34028234663852886e22; + var UINT32_MAX = 4294967295; + var INT32_MAX = 2147483647; + var INT32_MIN = -2147483648; + function assertInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid int 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) + throw new Error("invalid int 32: " + arg); } - function repeatedPrimitiveEq(type2, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!primitiveEq(type2, a[i], b[i])) - return false; - return true; + exports2.assertInt32 = assertInt32; + function assertUInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid uint 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) + throw new Error("invalid uint 32: " + arg); } - function repeatedMsgEq(type2, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!type2.equals(a[i], b[i])) - return false; - return true; + exports2.assertUInt32 = assertUInt32; + function assertFloat32(arg) { + if (typeof arg !== "number") + throw new Error("invalid float 32: " + typeof arg); + if (!Number.isFinite(arg)) + return; + if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) + throw new Error("invalid float 32: " + arg); } + exports2.assertFloat32 = assertFloat32; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js -var require_message_type = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js +var require_binary_writer = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_type_check_1 = require_reflection_type_check(); - var reflection_json_reader_1 = require_reflection_json_reader(); - var reflection_json_writer_1 = require_reflection_json_writer(); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - var reflection_create_1 = require_reflection_create(); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - var json_typings_1 = require_json_typings(); - var json_format_contract_1 = require_json_format_contract(); - var reflection_equals_1 = require_reflection_equals(); - var binary_writer_1 = require_binary_writer(); - var binary_reader_1 = require_binary_reader(); - var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); - var messageTypeDescriptor = baseDescriptors[message_type_contract_1.MESSAGE_TYPE] = {}; - var MessageType = class { - constructor(name, fields, options) { - this.defaultCheckDepth = 16; - this.typeName = name; - this.fields = fields.map(reflection_info_1.normalizeFieldInfo); - this.options = options !== null && options !== void 0 ? options : {}; - messageTypeDescriptor.value = this; - this.messagePrototype = Object.create(null, baseDescriptors); - this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); - this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); - this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); - this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); - this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); + exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var assert_1 = require_assert(); + var defaultsWrite = { + writeUnknownFields: true, + writerFactory: () => new BinaryWriter() + }; + function binaryWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + } + exports2.binaryWriteOptions = binaryWriteOptions; + var BinaryWriter = class { + constructor(textEncoder) { + this.stack = []; + this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); + this.chunks = []; + this.buf = []; } - create(value) { - let message = reflection_create_1.reflectionCreate(this); - if (value !== void 0) { - reflection_merge_partial_1.reflectionMergePartial(this, message, value); + /** + * Return all bytes written and reset this writer. + */ + finish() { + this.chunks.push(new Uint8Array(this.buf)); + let len = 0; + for (let i = 0; i < this.chunks.length; i++) + len += this.chunks[i].length; + let bytes = new Uint8Array(len); + let offset = 0; + for (let i = 0; i < this.chunks.length; i++) { + bytes.set(this.chunks[i], offset); + offset += this.chunks[i].length; } - return message; + this.chunks = []; + return bytes; } /** - * Clone the message. + * Start a new fork for length-delimited data like a message + * or a packed repeated field. * - * Unknown fields are discarded. + * Must be joined later with `join()`. */ - clone(message) { - let copy = this.create(); - reflection_merge_partial_1.reflectionMergePartial(this, copy, message); - return copy; + fork() { + this.stack.push({ chunks: this.chunks, buf: this.buf }); + this.chunks = []; + this.buf = []; + return this; } /** - * Determines whether two message of the same type have the same field values. - * Checks for deep equality, traversing repeated fields, oneof groups, maps - * and messages recursively. - * Will also return true if both messages are `undefined`. + * Join the last fork. Write its length and bytes, then + * return to the previous state. */ - equals(a, b) { - return reflection_equals_1.reflectionEquals(this, a, b); + join() { + let chunk = this.finish(); + let prev = this.stack.pop(); + if (!prev) + throw new Error("invalid state, fork stack empty"); + this.chunks = prev.chunks; + this.buf = prev.buf; + this.uint32(chunk.byteLength); + return this.raw(chunk); } /** - * Is the given value assignable to our message type - * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? + * Writes a tag (field number and wire type). + * + * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * + * Generated code should compute the tag ahead of time and call `uint32()`. */ - is(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, false); + tag(fieldNo, type2) { + return this.uint32((fieldNo << 3 | type2) >>> 0); } /** - * Is the given value assignable to our message type, - * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? + * Write a chunk of raw bytes. */ - isAssignable(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, true); + raw(chunk) { + if (this.buf.length) { + this.chunks.push(new Uint8Array(this.buf)); + this.buf = []; + } + this.chunks.push(chunk); + return this; } /** - * Copy partial data into the target message. + * Write a `uint32` value, an unsigned 32 bit varint. */ - mergePartial(target, source) { - reflection_merge_partial_1.reflectionMergePartial(this, target, source); + uint32(value) { + assert_1.assertUInt32(value); + while (value > 127) { + this.buf.push(value & 127 | 128); + value = value >>> 7; + } + this.buf.push(value); + return this; } /** - * Create a new message from binary format. + * Write a `int32` value, a signed 32 bit varint. */ - fromBinary(data, options) { - let opt = binary_reader_1.binaryReadOptions(options); - return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); + int32(value) { + assert_1.assertInt32(value); + goog_varint_1.varint32write(value, this.buf); + return this; } /** - * Read a new message from a JSON value. + * Write a `bool` value, a variant. */ - fromJson(json2, options) { - return this.internalJsonRead(json2, json_format_contract_1.jsonReadOptions(options)); + bool(value) { + this.buf.push(value ? 1 : 0); + return this; } /** - * Read a new message from a JSON string. - * This is equivalent to `T.fromJson(JSON.parse(json))`. + * Write a `bytes` value, length-delimited arbitrary data. */ - fromJsonString(json2, options) { - let value = JSON.parse(json2); - return this.fromJson(value, options); + bytes(value) { + this.uint32(value.byteLength); + return this.raw(value); } /** - * Write the message to canonical JSON value. + * Write a `string` value, length-delimited data converted to UTF-8 text. + */ + string(value) { + let chunk = this.textEncoder.encode(value); + this.uint32(chunk.byteLength); + return this.raw(chunk); + } + /** + * Write a `float` value, 32-bit floating point number. + */ + float(value) { + assert_1.assertFloat32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setFloat32(0, value, true); + return this.raw(chunk); + } + /** + * Write a `double` value, a 64-bit floating point number. + */ + double(value) { + let chunk = new Uint8Array(8); + new DataView(chunk.buffer).setFloat64(0, value, true); + return this.raw(chunk); + } + /** + * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + */ + fixed32(value) { + assert_1.assertUInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setUint32(0, value, true); + return this.raw(chunk); + } + /** + * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. */ - toJson(message, options) { - return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + sfixed32(value) { + assert_1.assertInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setInt32(0, value, true); + return this.raw(chunk); } /** - * Convert the message to canonical JSON string. - * This is equivalent to `JSON.stringify(T.toJson(t))` + * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. */ - toJsonString(message, options) { - var _a; - let value = this.toJson(message, options); - return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + sint32(value) { + assert_1.assertInt32(value); + value = (value << 1 ^ value >> 31) >>> 0; + goog_varint_1.varint32write(value, this.buf); + return this; } /** - * Write the message to binary format. + * Write a `fixed64` value, a signed, fixed-length 64-bit integer. */ - toBinary(message, options) { - let opt = binary_writer_1.binaryWriteOptions(options); - return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); + sfixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbLong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } /** - * This is an internal method. If you just want to read a message from - * JSON, use `fromJson()` or `fromJsonString()`. - * - * Reads JSON value and merges the fields into the target - * according to protobuf rules. If the target is omitted, - * a new instance is created first. + * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. */ - internalJsonRead(json2, options, target) { - if (json2 !== null && typeof json2 == "object" && !Array.isArray(json2)) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refJsonReader.read(json2, message, options); - return message; - } - throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json2)}.`); + fixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbULong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } /** - * This is an internal method. If you just want to write a message - * to JSON, use `toJson()` or `toJsonString(). - * - * Writes JSON value and returns it. + * Write a `int64` value, a signed 64-bit varint. */ - internalJsonWrite(message, options) { - return this.refJsonWriter.write(message, options); + int64(value) { + let long = pb_long_1.PbLong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; } /** - * This is an internal method. If you just want to write a message - * in binary format, use `toBinary()`. - * - * Serializes the message in binary format and appends it to the given - * writer. Returns passed writer. + * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. */ - internalBinaryWrite(message, writer, options) { - this.refBinWriter.write(message, writer, options); - return writer; + sint64(value) { + let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; + goog_varint_1.varint64write(lo, hi, this.buf); + return this; } /** - * This is an internal method. If you just want to read a message from - * binary data, use `fromBinary()`. - * - * Reads data from binary format and merges the fields into - * the target according to protobuf rules. If the target is - * omitted, a new instance is created first. + * Write a `uint64` value, an unsigned 64-bit varint. */ - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refBinReader.read(reader, message, options, length); - return message; + uint64(value) { + let long = pb_long_1.PbULong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; } }; - exports2.MessageType = MessageType; + exports2.BinaryWriter = BinaryWriter; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js -var require_reflection_contains_message_type = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js +var require_json_format_contract = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.containsMessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - function containsMessageType(msg) { - return msg[message_type_contract_1.MESSAGE_TYPE] != null; + exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; + var defaultsWrite = { + emitDefaultValues: false, + enumAsInteger: false, + useProtoFieldName: false, + prettySpaces: 0 + }; + var defaultsRead = { + ignoreUnknownFields: false + }; + function jsonReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } - exports2.containsMessageType = containsMessageType; + exports2.jsonReadOptions = jsonReadOptions; + function jsonWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + } + exports2.jsonWriteOptions = jsonWriteOptions; + function mergeJsonOptions(a, b) { + var _a, _b; + let c = Object.assign(Object.assign({}, a), b); + c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; + return c; + } + exports2.mergeJsonOptions = mergeJsonOptions; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js -var require_enum_object = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js +var require_message_type_contract = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; - function isEnumObject(arg) { - if (typeof arg != "object" || arg === null) { - return false; - } - if (!arg.hasOwnProperty(0)) { - return false; - } - for (let k of Object.keys(arg)) { - let num = parseInt(k); - if (!Number.isNaN(num)) { - let nam = arg[num]; - if (nam === void 0) - return false; - if (arg[nam] !== num) - return false; - } else { - let num2 = arg[k]; - if (num2 === void 0) - return false; - if (typeof num2 !== "number") - return false; - if (arg[num2] === void 0) - return false; - } - } - return true; - } - exports2.isEnumObject = isEnumObject; - function listEnumValues(enumObject) { - if (!isEnumObject(enumObject)) - throw new Error("not a typescript enum object"); - let values = []; - for (let [name, number] of Object.entries(enumObject)) - if (typeof number == "number") - values.push({ name, number }); - return values; - } - exports2.listEnumValues = listEnumValues; - function listEnumNames(enumObject) { - return listEnumValues(enumObject).map((val) => val.name); - } - exports2.listEnumNames = listEnumNames; - function listEnumNumbers(enumObject) { - return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); - } - exports2.listEnumNumbers = listEnumNumbers; + exports2.MESSAGE_TYPE = void 0; + exports2.MESSAGE_TYPE = /* @__PURE__ */ Symbol.for("protobuf-ts/message-type"); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/index.js -var require_commonjs16 = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js +var require_lower_camel_case = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var json_typings_1 = require_json_typings(); - Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: function() { - return json_typings_1.typeofJsonValue; - } }); - Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: function() { - return json_typings_1.isJsonObject; - } }); - var base64_1 = require_base642(); - Object.defineProperty(exports2, "base64decode", { enumerable: true, get: function() { - return base64_1.base64decode; - } }); - Object.defineProperty(exports2, "base64encode", { enumerable: true, get: function() { - return base64_1.base64encode; - } }); - var protobufjs_utf8_1 = require_protobufjs_utf8(); - Object.defineProperty(exports2, "utf8read", { enumerable: true, get: function() { - return protobufjs_utf8_1.utf8read; - } }); - var binary_format_contract_1 = require_binary_format_contract(); - Object.defineProperty(exports2, "WireType", { enumerable: true, get: function() { - return binary_format_contract_1.WireType; - } }); - Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: function() { - return binary_format_contract_1.mergeBinaryOptions; - } }); - Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: function() { - return binary_format_contract_1.UnknownFieldHandler; - } }); - var binary_reader_1 = require_binary_reader(); - Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: function() { - return binary_reader_1.BinaryReader; - } }); - Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: function() { - return binary_reader_1.binaryReadOptions; - } }); - var binary_writer_1 = require_binary_writer(); - Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: function() { - return binary_writer_1.BinaryWriter; - } }); - Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: function() { - return binary_writer_1.binaryWriteOptions; - } }); - var pb_long_1 = require_pb_long(); - Object.defineProperty(exports2, "PbLong", { enumerable: true, get: function() { - return pb_long_1.PbLong; - } }); - Object.defineProperty(exports2, "PbULong", { enumerable: true, get: function() { - return pb_long_1.PbULong; - } }); - var json_format_contract_1 = require_json_format_contract(); - Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: function() { - return json_format_contract_1.jsonReadOptions; - } }); - Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: function() { - return json_format_contract_1.jsonWriteOptions; - } }); - Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: function() { - return json_format_contract_1.mergeJsonOptions; - } }); - var message_type_contract_1 = require_message_type_contract(); - Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: function() { - return message_type_contract_1.MESSAGE_TYPE; - } }); - var message_type_1 = require_message_type(); - Object.defineProperty(exports2, "MessageType", { enumerable: true, get: function() { - return message_type_1.MessageType; - } }); - var reflection_info_1 = require_reflection_info(); - Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: function() { - return reflection_info_1.ScalarType; - } }); - Object.defineProperty(exports2, "LongType", { enumerable: true, get: function() { - return reflection_info_1.LongType; - } }); - Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: function() { - return reflection_info_1.RepeatType; - } }); - Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: function() { - return reflection_info_1.normalizeFieldInfo; - } }); - Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: function() { - return reflection_info_1.readFieldOptions; - } }); - Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: function() { - return reflection_info_1.readFieldOption; - } }); - Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: function() { - return reflection_info_1.readMessageOption; - } }); - var reflection_type_check_1 = require_reflection_type_check(); - Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: function() { - return reflection_type_check_1.ReflectionTypeCheck; - } }); - var reflection_create_1 = require_reflection_create(); - Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: function() { - return reflection_create_1.reflectionCreate; - } }); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: function() { - return reflection_scalar_default_1.reflectionScalarDefault; - } }); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: function() { - return reflection_merge_partial_1.reflectionMergePartial; - } }); - var reflection_equals_1 = require_reflection_equals(); - Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: function() { - return reflection_equals_1.reflectionEquals; - } }); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: function() { - return reflection_binary_reader_1.ReflectionBinaryReader; - } }); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: function() { - return reflection_binary_writer_1.ReflectionBinaryWriter; - } }); - var reflection_json_reader_1 = require_reflection_json_reader(); - Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: function() { - return reflection_json_reader_1.ReflectionJsonReader; - } }); - var reflection_json_writer_1 = require_reflection_json_writer(); - Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: function() { - return reflection_json_writer_1.ReflectionJsonWriter; - } }); - var reflection_contains_message_type_1 = require_reflection_contains_message_type(); - Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: function() { - return reflection_contains_message_type_1.containsMessageType; - } }); - var oneof_1 = require_oneof(); - Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: function() { - return oneof_1.isOneofGroup; - } }); - Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: function() { - return oneof_1.setOneofValue; - } }); - Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: function() { - return oneof_1.getOneofValue; - } }); - Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: function() { - return oneof_1.clearOneofValue; - } }); - Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: function() { - return oneof_1.getSelectedOneofValue; - } }); - var enum_object_1 = require_enum_object(); - Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: function() { - return enum_object_1.listEnumValues; - } }); - Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: function() { - return enum_object_1.listEnumNames; - } }); - Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: function() { - return enum_object_1.listEnumNumbers; - } }); - Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: function() { - return enum_object_1.isEnumObject; - } }); - var lower_camel_case_1 = require_lower_camel_case(); - Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: function() { - return lower_camel_case_1.lowerCamelCase; - } }); - var assert_1 = require_assert(); - Object.defineProperty(exports2, "assert", { enumerable: true, get: function() { - return assert_1.assert; - } }); - Object.defineProperty(exports2, "assertNever", { enumerable: true, get: function() { - return assert_1.assertNever; - } }); - Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: function() { - return assert_1.assertInt32; - } }); - Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: function() { - return assert_1.assertUInt32; - } }); - Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: function() { - return assert_1.assertFloat32; - } }); + exports2.lowerCamelCase = void 0; + function lowerCamelCase(snakeCase) { + let capNext = false; + const sb = []; + for (let i = 0; i < snakeCase.length; i++) { + let next = snakeCase.charAt(i); + if (next == "_") { + capNext = true; + } else if (/\d/.test(next)) { + sb.push(next); + capNext = true; + } else if (capNext) { + sb.push(next.toUpperCase()); + capNext = false; + } else if (i == 0) { + sb.push(next.toLowerCase()); + } else { + sb.push(next); + } + } + return sb.join(""); + } + exports2.lowerCamelCase = lowerCamelCase; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js -var require_reflection_info2 = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js +var require_reflection_info = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; - var runtime_1 = require_commonjs16(); - function normalizeMethodInfo(method, service) { - var _a, _b, _c; - let m = method; - m.service = service; - m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); - m.serverStreaming = !!m.serverStreaming; - m.clientStreaming = !!m.clientStreaming; - m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; - m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; - return m; + exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; + var lower_camel_case_1 = require_lower_camel_case(); + var ScalarType; + (function(ScalarType2) { + ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; + ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; + ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; + ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; + ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; + ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; + ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; + ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; + ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; + ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; + ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; + ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; + ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; + ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; + ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; + })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); + var LongType; + (function(LongType2) { + LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; + LongType2[LongType2["STRING"] = 1] = "STRING"; + LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; + })(LongType = exports2.LongType || (exports2.LongType = {})); + var RepeatType; + (function(RepeatType2) { + RepeatType2[RepeatType2["NO"] = 0] = "NO"; + RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; + RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; + })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); + function normalizeFieldInfo(field) { + var _a, _b, _c, _d; + field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); + field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); + field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; + field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; + return field; } - exports2.normalizeMethodInfo = normalizeMethodInfo; - function readMethodOptions(service, methodName, extensionName, extensionType) { + exports2.normalizeFieldInfo = normalizeFieldInfo; + function readFieldOptions(messageType, fieldName, extensionName, extensionType) { var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; } - exports2.readMethodOptions = readMethodOptions; - function readMethodOption(service, methodName, extensionName, extensionType) { + exports2.readFieldOptions = readFieldOptions; + function readFieldOption(messageType, fieldName, extensionName, extensionType) { var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; if (!options) { return void 0; } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + exports2.readFieldOption = readFieldOption; + function readMessageOption(messageType, extensionName, extensionType) { + const options = messageType.options; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + exports2.readMessageOption = readMessageOption; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js +var require_oneof = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; + function isOneofGroup(any) { + if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { + return false; + } + switch (typeof any.oneofKind) { + case "string": + if (any[any.oneofKind] === void 0) + return false; + return Object.keys(any).length == 2; + case "undefined": + return Object.keys(any).length == 1; + default: + return false; + } + } + exports2.isOneofGroup = isOneofGroup; + function getOneofValue(oneof, kind) { + return oneof[kind]; + } + exports2.getOneofValue = getOneofValue; + function setOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== void 0) { + oneof[kind] = value; + } + } + exports2.setOneofValue = setOneofValue; + function setUnknownOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== void 0 && kind !== void 0) { + oneof[kind] = value; } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - exports2.readMethodOption = readMethodOption; - function readServiceOption(service, extensionName, extensionType) { - const options = service.options; - if (!options) { - return void 0; + exports2.setUnknownOneofValue = setUnknownOneofValue; + function clearOneofValue(oneof) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + oneof.oneofKind = void 0; + } + exports2.clearOneofValue = clearOneofValue; + function getSelectedOneofValue(oneof) { + if (oneof.oneofKind === void 0) { + return void 0; } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; + return oneof[oneof.oneofKind]; } - exports2.readServiceOption = readServiceOption; + exports2.getSelectedOneofValue = getSelectedOneofValue; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js -var require_service_type = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js +var require_reflection_type_check = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceType = void 0; - var reflection_info_1 = require_reflection_info2(); - var ServiceType = class { - constructor(typeName, methods, options) { - this.typeName = typeName; - this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); - this.options = options !== null && options !== void 0 ? options : {}; + exports2.ReflectionTypeCheck = void 0; + var reflection_info_1 = require_reflection_info(); + var oneof_1 = require_oneof(); + var ReflectionTypeCheck = class { + constructor(info6) { + var _a; + this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; } - }; - exports2.ServiceType = ServiceType; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js -var require_rpc_error = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcError = void 0; - var RpcError = class extends Error { - constructor(message, code = "UNKNOWN", meta) { - super(message); - this.name = "RpcError"; - Object.setPrototypeOf(this, new.target.prototype); - this.code = code; - this.meta = meta !== null && meta !== void 0 ? meta : {}; + prepare() { + if (this.data) + return; + const req = [], known = [], oneofs = []; + for (let field of this.fields) { + if (field.oneof) { + if (!oneofs.includes(field.oneof)) { + oneofs.push(field.oneof); + req.push(field.oneof); + known.push(field.oneof); + } + } else { + known.push(field.localName); + switch (field.kind) { + case "scalar": + case "enum": + if (!field.opt || field.repeat) + req.push(field.localName); + break; + case "message": + if (field.repeat) + req.push(field.localName); + break; + case "map": + req.push(field.localName); + break; + } + } + } + this.data = { req, known, oneofs: Object.values(oneofs) }; } - toString() { - const l = [this.name + ": " + this.message]; - if (this.code) { - l.push(""); - l.push("Code: " + this.code); + /** + * Is the argument a valid message as specified by the + * reflection information? + * + * Checks all field types recursively. The `depth` + * specifies how deep into the structure the check will be. + * + * With a depth of 0, only the presence of fields + * is checked. + * + * With a depth of 1 or more, the field types are checked. + * + * With a depth of 2 or more, the members of map, repeated + * and message fields are checked. + * + * Message fields will be checked recursively with depth - 1. + * + * The number of map entries / repeated values being checked + * is < depth. + */ + is(message, depth, allowExcessProperties = false) { + if (depth < 0) + return true; + if (message === null || message === void 0 || typeof message != "object") + return false; + this.prepare(); + let keys = Object.keys(message), data = this.data; + if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) + return false; + if (!allowExcessProperties) { + if (keys.some((k) => !data.known.includes(k))) + return false; } - if (this.serviceName && this.methodName) { - l.push("Method: " + this.serviceName + "/" + this.methodName); + if (depth < 1) { + return true; } - let m = Object.entries(this.meta); - if (m.length) { - l.push(""); - l.push("Meta:"); - for (let [k, v] of m) { - l.push(` ${k}: ${v}`); - } + for (const name of data.oneofs) { + const group = message[name]; + if (!oneof_1.isOneofGroup(group)) + return false; + if (group.oneofKind === void 0) + continue; + const field = this.fields.find((f) => f.localName === group.oneofKind); + if (!field) + return false; + if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) + return false; + } + for (const field of this.fields) { + if (field.oneof !== void 0) + continue; + if (!this.field(message[field.localName], field, allowExcessProperties, depth)) + return false; + } + return true; + } + field(arg, field, allowExcessProperties, depth) { + let repeated = field.repeat; + switch (field.kind) { + case "scalar": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, field.T, depth, field.L); + return this.scalar(arg, field.T, field.L); + case "enum": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); + return this.scalar(arg, reflection_info_1.ScalarType.INT32); + case "message": + if (arg === void 0) + return true; + if (repeated) + return this.messages(arg, field.T(), allowExcessProperties, depth); + return this.message(arg, field.T(), allowExcessProperties, depth); + case "map": + if (typeof arg != "object" || arg === null) + return false; + if (depth < 2) + return true; + if (!this.mapKeys(arg, field.K, depth)) + return false; + switch (field.V.kind) { + case "scalar": + return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); + case "enum": + return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); + case "message": + return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); + } + break; + } + return true; + } + message(arg, type2, allowExcessProperties, depth) { + if (allowExcessProperties) { + return type2.isAssignable(arg, depth); + } + return type2.is(arg, depth); + } + messages(arg, type2, allowExcessProperties, depth) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (allowExcessProperties) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type2.isAssignable(arg[i], depth - 1)) + return false; + } else { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type2.is(arg[i], depth - 1)) + return false; + } + return true; + } + scalar(arg, type2, longType) { + let argType = typeof arg; + switch (type2) { + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + switch (longType) { + case reflection_info_1.LongType.BIGINT: + return argType == "bigint"; + case reflection_info_1.LongType.NUMBER: + return argType == "number" && !isNaN(arg); + default: + return argType == "string"; + } + case reflection_info_1.ScalarType.BOOL: + return argType == "boolean"; + case reflection_info_1.ScalarType.STRING: + return argType == "string"; + case reflection_info_1.ScalarType.BYTES: + return arg instanceof Uint8Array; + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return argType == "number" && !isNaN(arg); + default: + return argType == "number" && Number.isInteger(arg); + } + } + scalars(arg, type2, depth, longType) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (Array.isArray(arg)) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!this.scalar(arg[i], type2, longType)) + return false; + } + return true; + } + mapKeys(map2, type2, depth) { + let keys = Object.keys(map2); + switch (type2) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type2, depth); + case reflection_info_1.ScalarType.BOOL: + return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type2, depth); + default: + return this.scalars(keys, type2, depth, reflection_info_1.LongType.STRING); } - return l.join("\n"); } }; - exports2.RpcError = RpcError; + exports2.ReflectionTypeCheck = ReflectionTypeCheck; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js -var require_rpc_options = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js +var require_reflection_long_convert = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeRpcOptions = void 0; - var runtime_1 = require_commonjs16(); - function mergeRpcOptions(defaults, options) { - if (!options) - return defaults; - let o = {}; - copy(defaults, o); - copy(options, o); - for (let key of Object.keys(options)) { - let val = options[key]; - switch (key) { - case "jsonOptions": - o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); - break; - case "binaryOptions": - o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); - break; - case "meta": - o.meta = {}; - copy(defaults.meta, o.meta); - copy(options.meta, o.meta); - break; - case "interceptors": - o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); - break; - } - } - return o; - } - exports2.mergeRpcOptions = mergeRpcOptions; - function copy(a, into) { - if (!a) - return; - let c = into; - for (let [k, v] of Object.entries(a)) { - if (v instanceof Date) - c[k] = new Date(v.getTime()); - else if (Array.isArray(v)) - c[k] = v.concat(); - else - c[k] = v; + exports2.reflectionLongConvert = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionLongConvert(long, type2) { + switch (type2) { + case reflection_info_1.LongType.BIGINT: + return long.toBigInt(); + case reflection_info_1.LongType.NUMBER: + return long.toNumber(); + default: + return long.toString(); } } + exports2.reflectionLongConvert = reflectionLongConvert; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js -var require_deferred = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js +var require_reflection_json_reader = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Deferred = exports2.DeferredState = void 0; - var DeferredState; - (function(DeferredState2) { - DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; - DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; - DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; - })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); - var Deferred = class { + exports2.ReflectionJsonReader = void 0; + var json_typings_1 = require_json_typings(); + var base64_1 = require_base642(); + var reflection_info_1 = require_reflection_info(); + var pb_long_1 = require_pb_long(); + var assert_1 = require_assert(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var ReflectionJsonReader = class { + constructor(info6) { + this.info = info6; + } + prepare() { + var _a; + if (this.fMap === void 0) { + this.fMap = {}; + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + for (const field of fieldsInput) { + this.fMap[field.name] = field; + this.fMap[field.jsonName] = field; + this.fMap[field.localName] = field; + } + } + } + // Cannot parse JSON for #. + assert(condition, fieldName, jsonValue) { + if (!condition) { + let what = json_typings_1.typeofJsonValue(jsonValue); + if (what == "number" || what == "boolean") + what = jsonValue.toString(); + throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); + } + } /** - * @param preventUnhandledRejectionWarning - prevents the warning - * "Unhandled Promise rejection" by adding a noop rejection handler. - * Working with calls returned from the runtime-rpc package in an - * async function usually means awaiting one call property after - * the other. This means that the "status" is not being awaited when - * an earlier await for the "headers" is rejected. This causes the - * "unhandled promise reject" warning. A more correct behaviour for - * calls might be to become aware whether at least one of the - * promises is handled and swallow the rejection warning for the - * others. + * Reads a message from canonical JSON format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. */ - constructor(preventUnhandledRejectionWarning = true) { - this._state = DeferredState.PENDING; - this._promise = new Promise((resolve6, reject) => { - this._resolve = resolve6; - this._reject = reject; - }); - if (preventUnhandledRejectionWarning) { - this._promise.catch((_) => { - }); + read(input, message, options) { + this.prepare(); + const oneofsHandled = []; + for (const [jsonKey, jsonValue] of Object.entries(input)) { + const field = this.fMap[jsonKey]; + if (!field) { + if (!options.ignoreUnknownFields) + throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); + continue; + } + const localName = field.localName; + let target; + if (field.oneof) { + if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { + continue; + } + if (oneofsHandled.includes(field.oneof)) + throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); + oneofsHandled.push(field.oneof); + target = message[field.oneof] = { + oneofKind: localName + }; + } else { + target = message; + } + if (field.kind == "map") { + if (jsonValue === null) { + continue; + } + this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); + const fieldObj = target[localName]; + for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { + this.assert(jsonObjValue !== null, field.name + " map value", null); + let val; + switch (field.V.kind) { + case "message": + val = field.V.T().internalJsonRead(jsonObjValue, options); + break; + case "enum": + val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); + break; + } + this.assert(val !== void 0, field.name + " map value", jsonObjValue); + let key = jsonObjKey; + if (field.K == reflection_info_1.ScalarType.BOOL) + key = key == "true" ? true : key == "false" ? false : key; + key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); + fieldObj[key] = val; + } + } else if (field.repeat) { + if (jsonValue === null) + continue; + this.assert(Array.isArray(jsonValue), field.name, jsonValue); + const fieldArr = target[localName]; + for (const jsonItem of jsonValue) { + this.assert(jsonItem !== null, field.name, null); + let val; + switch (field.kind) { + case "message": + val = field.T().internalJsonRead(jsonItem, options); + break; + case "enum": + val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonItem, field.T, field.L, field.name); + break; + } + this.assert(val !== void 0, field.name, jsonValue); + fieldArr.push(val); + } + } else { + switch (field.kind) { + case "message": + if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { + this.assert(field.oneof === void 0, field.name + " (oneof member)", null); + continue; + } + target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); + break; + case "enum": + if (jsonValue === null) + continue; + let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + target[localName] = val; + break; + case "scalar": + if (jsonValue === null) + continue; + target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); + break; + } + } } } /** - * Get the current state of the promise. - */ - get state() { - return this._state; - } - /** - * Get the deferred promise. - */ - get promise() { - return this._promise; - } - /** - * Resolve the promise. Throws if the promise is already resolved or rejected. - */ - resolve(value) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); - this._resolve(value); - this._state = DeferredState.RESOLVED; - } - /** - * Reject the promise. Throws if the promise is already resolved or rejected. - */ - reject(reason) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); - this._reject(reason); - this._state = DeferredState.REJECTED; - } - /** - * Resolve the promise. Ignore if not pending. + * Returns `false` for unrecognized string representations. + * + * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). */ - resolvePending(val) { - if (this._state === DeferredState.PENDING) - this.resolve(val); + enum(type2, json2, fieldName, ignoreUnknownFields) { + if (type2[0] == "google.protobuf.NullValue") + assert_1.assert(json2 === null || json2 === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} only accepts null.`); + if (json2 === null) + return 0; + switch (typeof json2) { + case "number": + assert_1.assert(Number.isInteger(json2), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json2}.`); + return json2; + case "string": + let localEnumName = json2; + if (type2[2] && json2.substring(0, type2[2].length) === type2[2]) + localEnumName = json2.substring(type2[2].length); + let enumNumber = type2[1][localEnumName]; + if (typeof enumNumber === "undefined" && ignoreUnknownFields) { + return false; + } + assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} has no value for "${json2}".`); + return enumNumber; + } + assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json2}".`); } - /** - * Reject the promise. Ignore if not pending. - */ - rejectPending(reason) { - if (this._state === DeferredState.PENDING) - this.reject(reason); + scalar(json2, type2, longType, fieldName) { + let e; + try { + switch (type2) { + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + if (json2 === null) + return 0; + if (json2 === "NaN") + return Number.NaN; + if (json2 === "Infinity") + return Number.POSITIVE_INFINITY; + if (json2 === "-Infinity") + return Number.NEGATIVE_INFINITY; + if (json2 === "") { + e = "empty string"; + break; + } + if (typeof json2 == "string" && json2.trim().length !== json2.length) { + e = "extra whitespace"; + break; + } + if (typeof json2 != "string" && typeof json2 != "number") { + break; + } + let float2 = Number(json2); + if (Number.isNaN(float2)) { + e = "not a number"; + break; + } + if (!Number.isFinite(float2)) { + e = "too large or small"; + break; + } + if (type2 == reflection_info_1.ScalarType.FLOAT) + assert_1.assertFloat32(float2); + return float2; + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + if (json2 === null) + return 0; + let int32; + if (typeof json2 == "number") + int32 = json2; + else if (json2 === "") + e = "empty string"; + else if (typeof json2 == "string") { + if (json2.trim().length !== json2.length) + e = "extra whitespace"; + else + int32 = Number(json2); + } + if (int32 === void 0) + break; + if (type2 == reflection_info_1.ScalarType.UINT32) + assert_1.assertUInt32(int32); + else + assert_1.assertInt32(int32); + return int32; + // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + if (json2 === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + if (typeof json2 != "number" && typeof json2 != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json2), longType); + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.UINT64: + if (json2 === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + if (typeof json2 != "number" && typeof json2 != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json2), longType); + // bool: + case reflection_info_1.ScalarType.BOOL: + if (json2 === null) + return false; + if (typeof json2 !== "boolean") + break; + return json2; + // string: + case reflection_info_1.ScalarType.STRING: + if (json2 === null) + return ""; + if (typeof json2 !== "string") { + e = "extra whitespace"; + break; + } + try { + encodeURIComponent(json2); + } catch (e2) { + e2 = "invalid UTF8"; + break; + } + return json2; + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + if (json2 === null || json2 === "") + return new Uint8Array(0); + if (typeof json2 !== "string") + break; + return base64_1.base64decode(json2); + } + } catch (error3) { + e = error3.message; + } + this.assert(false, fieldName + (e ? " - " + e : ""), json2); } }; - exports2.Deferred = Deferred; + exports2.ReflectionJsonReader = ReflectionJsonReader; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js -var require_rpc_output_stream = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js +var require_reflection_json_writer = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcOutputStreamController = void 0; - var deferred_1 = require_deferred(); - var runtime_1 = require_commonjs16(); - var RpcOutputStreamController = class { - constructor() { - this._lis = { - nxt: [], - msg: [], - err: [], - cmp: [] - }; - this._closed = false; - this._itState = { q: [] }; - } - // --- RpcOutputStream callback API - onNext(callback) { - return this.addLis(callback, this._lis.nxt); - } - onMessage(callback) { - return this.addLis(callback, this._lis.msg); - } - onError(callback) { - return this.addLis(callback, this._lis.err); - } - onComplete(callback) { - return this.addLis(callback, this._lis.cmp); - } - addLis(callback, list) { - list.push(callback); - return () => { - let i = list.indexOf(callback); - if (i >= 0) - list.splice(i, 1); - }; - } - // remove all listeners - clearLis() { - for (let l of Object.values(this._lis)) - l.splice(0, l.length); - } - // --- Controller API - /** - * Is this stream already closed by a completion or error? - */ - get closed() { - return this._closed !== false; - } - /** - * Emit message, close with error, or close successfully, but only one - * at a time. - * Can be used to wrap a stream by using the other stream's `onNext`. - */ - notifyNext(message, error3, complete) { - runtime_1.assert((message ? 1 : 0) + (error3 ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); - if (message) - this.notifyMessage(message); - if (error3) - this.notifyError(error3); - if (complete) - this.notifyComplete(); + exports2.ReflectionJsonWriter = void 0; + var base64_1 = require_base642(); + var pb_long_1 = require_pb_long(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var ReflectionJsonWriter = class { + constructor(info6) { + var _a; + this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; } /** - * Emits a new message. Throws if stream is closed. - * - * Triggers onNext and onMessage callbacks. + * Converts the message to a JSON object, based on the field descriptors. */ - notifyMessage(message) { - runtime_1.assert(!this.closed, "stream is closed"); - this.pushIt({ value: message, done: false }); - this._lis.msg.forEach((l) => l(message)); - this._lis.nxt.forEach((l) => l(message, void 0, false)); + write(message, options) { + const json2 = {}, source = message; + for (const field of this.fields) { + if (!field.oneof) { + let jsonValue2 = this.field(field, source[field.localName], options); + if (jsonValue2 !== void 0) + json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; + continue; + } + const group = source[field.oneof]; + if (group.oneofKind !== field.localName) + continue; + const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; + let jsonValue = this.field(field, group[field.localName], opt); + assert_1.assert(jsonValue !== void 0); + json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; + } + return json2; } - /** - * Closes the stream with an error. Throws if stream is closed. - * - * Triggers onNext and onError callbacks. - */ - notifyError(error3) { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = error3; - this.pushIt(error3); - this._lis.err.forEach((l) => l(error3)); - this._lis.nxt.forEach((l) => l(void 0, error3, false)); - this.clearLis(); + field(field, value, options) { + let jsonValue = void 0; + if (field.kind == "map") { + assert_1.assert(typeof value == "object" && value !== null); + const jsonObj = {}; + switch (field.V.kind) { + case "scalar": + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.scalar(field.V.T, entryValue, field.name, false, true); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "message": + const messageType = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.message(messageType, entryValue, field.name, options); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "enum": + const enumInfo = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); + const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + } + if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) + jsonValue = jsonObj; + } else if (field.repeat) { + assert_1.assert(Array.isArray(value)); + const jsonArr = []; + switch (field.kind) { + case "scalar": + for (let i = 0; i < value.length; i++) { + const val = this.scalar(field.T, value[i], field.name, field.opt, true); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "enum": + const enumInfo = field.T(); + for (let i = 0; i < value.length; i++) { + assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); + const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "message": + const messageType = field.T(); + for (let i = 0; i < value.length; i++) { + const val = this.message(messageType, value[i], field.name, options); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + } + if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) + jsonValue = jsonArr; + } else { + switch (field.kind) { + case "scalar": + jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + break; + case "enum": + jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + break; + case "message": + jsonValue = this.message(field.T(), value, field.name, options); + break; + } + } + return jsonValue; } /** - * Closes the stream successfully. Throws if stream is closed. - * - * Triggers onNext and onComplete callbacks. + * Returns `null` as the default for google.protobuf.NullValue. */ - notifyComplete() { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = true; - this.pushIt({ value: null, done: true }); - this._lis.cmp.forEach((l) => l()); - this._lis.nxt.forEach((l) => l(void 0, void 0, true)); - this.clearLis(); + enum(type2, value, fieldName, optional, emitDefaultValues, enumAsInteger) { + if (type2[0] == "google.protobuf.NullValue") + return !emitDefaultValues && !optional ? void 0 : null; + if (value === void 0) { + assert_1.assert(optional); + return void 0; + } + if (value === 0 && !emitDefaultValues && !optional) + return void 0; + assert_1.assert(typeof value == "number"); + assert_1.assert(Number.isInteger(value)); + if (enumAsInteger || !type2[1].hasOwnProperty(value)) + return value; + if (type2[2]) + return type2[2] + type2[1][value]; + return type2[1][value]; } - /** - * Creates an async iterator (that can be used with `for await {...}`) - * to consume the stream. - * - * Some things to note: - * - If an error occurs, the `for await` will throw it. - * - If an error occurred before the `for await` was started, `for await` - * will re-throw it. - * - If the stream is already complete, the `for await` will be empty. - * - If your `for await` consumes slower than the stream produces, - * for example because you are relaying messages in a slow operation, - * messages are queued. - */ - [Symbol.asyncIterator]() { - if (this._closed === true) - this.pushIt({ value: null, done: true }); - else if (this._closed !== false) - this.pushIt(this._closed); - return { - next: () => { - let state = this._itState; - runtime_1.assert(state, "bad state"); - runtime_1.assert(!state.p, "iterator contract broken"); - let first = state.q.shift(); - if (first) - return "value" in first ? Promise.resolve(first) : Promise.reject(first); - state.p = new deferred_1.Deferred(); - return state.p.promise; - } - }; + message(type2, value, fieldName, options) { + if (value === void 0) + return options.emitDefaultValues ? null : void 0; + return type2.internalJsonWrite(value, options); } - // "push" a new iterator result. - // this either resolves a pending promise, or enqueues the result. - pushIt(result) { - let state = this._itState; - if (state.p) { - const p = state.p; - runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); - "value" in result ? p.resolve(result) : p.reject(result); - delete state.p; - } else { - state.q.push(result); + scalar(type2, value, fieldName, optional, emitDefaultValues) { + if (value === void 0) { + assert_1.assert(optional); + return void 0; + } + const ed = emitDefaultValues || optional; + switch (type2) { + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertInt32(value); + return value; + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertUInt32(value); + return value; + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.FLOAT: + assert_1.assertFloat32(value); + case reflection_info_1.ScalarType.DOUBLE: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assert(typeof value == "number"); + if (Number.isNaN(value)) + return "NaN"; + if (value === Number.POSITIVE_INFINITY) + return "Infinity"; + if (value === Number.NEGATIVE_INFINITY) + return "-Infinity"; + return value; + // string: + case reflection_info_1.ScalarType.STRING: + if (value === "") + return ed ? "" : void 0; + assert_1.assert(typeof value == "string"); + return value; + // bool: + case reflection_info_1.ScalarType.BOOL: + if (value === false) + return ed ? false : void 0; + assert_1.assert(typeof value == "boolean"); + return value; + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let ulong = pb_long_1.PbULong.from(value); + if (ulong.isZero() && !ed) + return void 0; + return ulong.toString(); + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let long = pb_long_1.PbLong.from(value); + if (long.isZero() && !ed) + return void 0; + return long.toString(); + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + assert_1.assert(value instanceof Uint8Array); + if (!value.byteLength) + return ed ? "" : void 0; + return base64_1.base64encode(value); } } }; - exports2.RpcOutputStreamController = RpcOutputStreamController; + exports2.ReflectionJsonWriter = ReflectionJsonWriter; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js -var require_unary_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js +var require_reflection_scalar_default = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UnaryCall = void 0; - var UnaryCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request2; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; - } - /** - * If you are only interested in the final outcome of this call, - * you can await it to receive a `FinishedUnaryCall`. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - response, - status, - trailers - }; - }); + exports2.reflectionScalarDefault = void 0; + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var pb_long_1 = require_pb_long(); + function reflectionScalarDefault(type2, longType = reflection_info_1.LongType.STRING) { + switch (type2) { + case reflection_info_1.ScalarType.BOOL: + return false; + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return 0; + case reflection_info_1.ScalarType.BYTES: + return new Uint8Array(0); + case reflection_info_1.ScalarType.STRING: + return ""; + default: + return 0; } - }; - exports2.UnaryCall = UnaryCall; + } + exports2.reflectionScalarDefault = reflectionScalarDefault; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js -var require_server_streaming_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js +var require_reflection_binary_reader = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionBinaryReader = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var ReflectionBinaryReader = class { + constructor(info6) { + this.info = info6; } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + prepare() { + var _a; + if (!this.fieldNoToField) { + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerStreamingCall = void 0; - var ServerStreamingCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request2; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; } /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * You should first setup some listeners to the `request` to - * see the actual messages the server replied with. + * Reads a message from binary format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - status, - trailers - }; - }); - } - }; - exports2.ServerStreamingCall = ServerStreamingCall; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js -var require_client_streaming_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { - "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + read(reader, message, options, length) { + this.prepare(); + const end = length === void 0 ? reader.len : reader.pos + length; + while (reader.pos < end) { + const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); + if (!field) { + let u = options.readUnknownField; + if (u == "throw") + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); + continue; } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + let target = message, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + target = target[field.oneof]; + if (target.oneofKind !== localName) + target = message[field.oneof] = { + oneofKind: localName + }; + } + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + let L = field.kind == "scalar" ? field.L : void 0; + if (repeated) { + let arr = target[localName]; + if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { + let e = reader.uint32() + reader.pos; + while (reader.pos < e) + arr.push(this.scalar(reader, T, L)); + } else + arr.push(this.scalar(reader, T, L)); + } else + target[localName] = this.scalar(reader, T, L); + break; + case "message": + if (repeated) { + let arr = target[localName]; + let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); + arr.push(msg); + } else + target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + break; + case "map": + let [mapKey, mapVal] = this.mapEntry(field, reader, options); + target[localName][mapKey] = mapVal; + break; } } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ClientStreamingCall = void 0; - var ClientStreamingCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request2; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; } /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. + * Read a map field, expecting key field = 1, value field = 2 */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + mapEntry(field, reader, options) { + let length = reader.uint32(); + let end = reader.pos + length; + let key = void 0; + let val = void 0; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + if (field.K == reflection_info_1.ScalarType.BOOL) + key = reader.bool().toString(); + else + key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + break; + case 2: + switch (field.V.kind) { + case "scalar": + val = this.scalar(reader, field.V.T, field.V.L); + break; + case "enum": + val = reader.int32(); + break; + case "message": + val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); + break; + } + break; + default: + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + } + } + if (key === void 0) { + let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); + key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + } + if (val === void 0) + switch (field.V.kind) { + case "scalar": + val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); + break; + case "enum": + val = 0; + break; + case "message": + val = field.V.T().create(); + break; + } + return [key, val]; } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - response, - status, - trailers - }; - }); + scalar(reader, type2, longType) { + switch (type2) { + case reflection_info_1.ScalarType.INT32: + return reader.int32(); + case reflection_info_1.ScalarType.STRING: + return reader.string(); + case reflection_info_1.ScalarType.BOOL: + return reader.bool(); + case reflection_info_1.ScalarType.DOUBLE: + return reader.double(); + case reflection_info_1.ScalarType.FLOAT: + return reader.float(); + case reflection_info_1.ScalarType.INT64: + return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); + case reflection_info_1.ScalarType.UINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); + case reflection_info_1.ScalarType.FIXED32: + return reader.fixed32(); + case reflection_info_1.ScalarType.BYTES: + return reader.bytes(); + case reflection_info_1.ScalarType.UINT32: + return reader.uint32(); + case reflection_info_1.ScalarType.SFIXED32: + return reader.sfixed32(); + case reflection_info_1.ScalarType.SFIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); + case reflection_info_1.ScalarType.SINT32: + return reader.sint32(); + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); + } } }; - exports2.ClientStreamingCall = ClientStreamingCall; + exports2.ReflectionBinaryReader = ReflectionBinaryReader; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js -var require_duplex_streaming_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js +var require_reflection_binary_writer = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionBinaryWriter = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var pb_long_1 = require_pb_long(); + var ReflectionBinaryWriter = class { + constructor(info6) { + this.info = info6; } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + prepare() { + if (!this.fields) { + const fieldsInput = this.info.fields ? this.info.fields.concat() : []; + this.fields = fieldsInput.sort((a, b) => a.no - b.no); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DuplexStreamingCall = void 0; - var DuplexStreamingCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request2; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; } /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. + * Writes the message to binary format. */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - status, - trailers - }; - }); - } - }; - exports2.DuplexStreamingCall = DuplexStreamingCall; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js -var require_test_transport = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { - "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve6) { - resolve6(value); - }); - } - return new (P || (P = Promise))(function(resolve6, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + write(message, writer, options) { + this.prepare(); + for (const field of this.fields) { + let value, emitDefault, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + const group = message[field.oneof]; + if (group.oneofKind !== localName) + continue; + value = group[localName]; + emitDefault = true; + } else { + value = message[localName]; + emitDefault = false; + } + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (repeated) { + assert_1.assert(Array.isArray(value)); + if (repeated == reflection_info_1.RepeatType.PACKED) + this.packed(writer, T, field.no, value); + else + for (const item of value) + this.scalar(writer, T, field.no, item, true); + } else if (value === void 0) + assert_1.assert(field.opt); + else + this.scalar(writer, T, field.no, value, emitDefault || field.opt); + break; + case "message": + if (repeated) { + assert_1.assert(Array.isArray(value)); + for (const item of value) + this.message(writer, options, field.T(), field.no, item); + } else { + this.message(writer, options, field.T(), field.no, value); + } + break; + case "map": + assert_1.assert(typeof value == "object" && value !== null); + for (const [key, val] of Object.entries(value)) + this.mapEntry(writer, options, field, key, val); + break; } } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } + let u = options.writeUnknownFields; + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); + } + mapEntry(writer, options, field, key, value) { + writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let keyValue = key; + switch (field.K) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + keyValue = Number.parseInt(key); + break; + case reflection_info_1.ScalarType.BOOL: + assert_1.assert(key == "true" || key == "false"); + keyValue = key == "true"; + break; } - function step(result) { - result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + this.scalar(writer, field.K, 1, keyValue, true); + switch (field.V.kind) { + case "scalar": + this.scalar(writer, field.V.T, 2, value, true); + break; + case "enum": + this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); + break; + case "message": + this.message(writer, options, field.V.T(), 2, value); + break; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TestTransport = void 0; - var rpc_error_1 = require_rpc_error(); - var runtime_1 = require_commonjs16(); - var rpc_output_stream_1 = require_rpc_output_stream(); - var rpc_options_1 = require_rpc_options(); - var unary_call_1 = require_unary_call(); - var server_streaming_call_1 = require_server_streaming_call(); - var client_streaming_call_1 = require_client_streaming_call(); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - var TestTransport = class _TestTransport { - /** - * Initialize with mock data. Omitted fields have default value. - */ - constructor(data) { - this.suppressUncaughtRejections = true; - this.headerDelay = 10; - this.responseDelay = 50; - this.betweenResponseDelay = 10; - this.afterResponseDelay = 10; - this.data = data !== null && data !== void 0 ? data : {}; + writer.join(); + } + message(writer, options, handler2, fieldNo, value) { + if (value === void 0) + return; + handler2.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); + writer.join(); } /** - * Sent message(s) during the last operation. + * Write a single scalar value. */ - get sentMessages() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.sent; - } else if (typeof this.lastInput == "object") { - return [this.lastInput.single]; + scalar(writer, type2, fieldNo, value, emitDefault) { + let [wireType, method, isDefault] = this.scalarInfo(type2, value); + if (!isDefault || emitDefault) { + writer.tag(fieldNo, wireType); + writer[method](value); } - return []; } /** - * Sending message(s) completed? + * Write an array of scalar values in packed format. */ - get sendComplete() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.completed; - } else if (typeof this.lastInput == "object") { - return true; - } - return false; - } - // Creates a promise for response headers from the mock data. - promiseHeaders() { - var _a; - const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; - return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); - } - // Creates a promise for a single, valid, message from the mock data. - promiseSingleResponse(method) { - if (this.data.response instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.response); - } - let r; - if (Array.isArray(this.data.response)) { - runtime_1.assert(this.data.response.length > 0); - r = this.data.response[0]; - } else if (this.data.response !== void 0) { - r = this.data.response; - } else { - r = method.O.create(); - } - runtime_1.assert(method.O.is(r)); - return Promise.resolve(r); + packed(writer, type2, fieldNo, value) { + if (!value.length) + return; + assert_1.assert(type2 !== reflection_info_1.ScalarType.BYTES && type2 !== reflection_info_1.ScalarType.STRING); + writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let [, method] = this.scalarInfo(type2); + for (let i = 0; i < value.length; i++) + writer[method](value[i]); + writer.join(); } /** - * Pushes response messages from the mock data to the output stream. - * If an error response, status or trailers are mocked, the stream is - * closed with the respective error. - * Otherwise, stream is completed successfully. + * Get information for writing a scalar value. * - * The returned promise resolves when the stream is closed. It should - * not reject. If it does, code is broken. + * Returns tuple: + * [0]: appropriate WireType + * [1]: name of the appropriate method of IBinaryWriter + * [2]: whether the given value is a default value + * + * If argument `value` is omitted, [2] is always false. */ - streamResponses(method, stream2, abort) { - return __awaiter2(this, void 0, void 0, function* () { - const messages = []; - if (this.data.response === void 0) { - messages.push(method.O.create()); - } else if (Array.isArray(this.data.response)) { - for (let msg of this.data.response) { - runtime_1.assert(method.O.is(msg)); - messages.push(msg); - } - } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { - runtime_1.assert(method.O.is(this.data.response)); - messages.push(this.data.response); - } - try { - yield delay2(this.responseDelay, abort)(void 0); - } catch (error3) { - stream2.notifyError(error3); - return; - } - if (this.data.response instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.response); - return; - } - for (let msg of messages) { - stream2.notifyMessage(msg); - try { - yield delay2(this.betweenResponseDelay, abort)(void 0); - } catch (error3) { - stream2.notifyError(error3); - return; - } - } - if (this.data.status instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.status); - return; - } - if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.trailers); - return; - } - stream2.notifyComplete(); - }); - } - // Creates a promise for response status from the mock data. - promiseStatus() { - var _a; - const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; - return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); - } - // Creates a promise for response trailers from the mock data. - promiseTrailers() { - var _a; - const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; - return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); - } - maybeSuppressUncaught(...promise) { - if (this.suppressUncaughtRejections) { - for (let p of promise) { - p.catch(() => { - }); - } + scalarInfo(type2, value) { + let t = binary_format_contract_1.WireType.Varint; + let m; + let i = value === void 0; + let d = value === 0; + switch (type2) { + case reflection_info_1.ScalarType.INT32: + m = "int32"; + break; + case reflection_info_1.ScalarType.STRING: + d = i || !value.length; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "string"; + break; + case reflection_info_1.ScalarType.BOOL: + d = value === false; + m = "bool"; + break; + case reflection_info_1.ScalarType.UINT32: + m = "uint32"; + break; + case reflection_info_1.ScalarType.DOUBLE: + t = binary_format_contract_1.WireType.Bit64; + m = "double"; + break; + case reflection_info_1.ScalarType.FLOAT: + t = binary_format_contract_1.WireType.Bit32; + m = "float"; + break; + case reflection_info_1.ScalarType.INT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "int64"; + break; + case reflection_info_1.ScalarType.UINT64: + d = i || pb_long_1.PbULong.from(value).isZero(); + m = "uint64"; + break; + case reflection_info_1.ScalarType.FIXED64: + d = i || pb_long_1.PbULong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "fixed64"; + break; + case reflection_info_1.ScalarType.BYTES: + d = i || !value.byteLength; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "bytes"; + break; + case reflection_info_1.ScalarType.FIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "fixed32"; + break; + case reflection_info_1.ScalarType.SFIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "sfixed32"; + break; + case reflection_info_1.ScalarType.SFIXED64: + d = i || pb_long_1.PbLong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "sfixed64"; + break; + case reflection_info_1.ScalarType.SINT32: + m = "sint32"; + break; + case reflection_info_1.ScalarType.SINT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "sint64"; + break; } + return [t, m, i || d]; } - mergeOptions(options) { - return rpc_options_1.mergeRpcOptions({}, options); - } - unary(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { - }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); - } - serverStreaming(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); - } - clientStreaming(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { - }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); - } - duplex(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); - } - }; - exports2.TestTransport = TestTransport; - TestTransport.defaultHeaders = { - responseHeader: "test" - }; - TestTransport.defaultStatus = { - code: "OK", - detail: "all good" - }; - TestTransport.defaultTrailers = { - responseTrailer: "test" }; - function delay2(ms, abort) { - return (v) => new Promise((resolve6, reject) => { - if (abort === null || abort === void 0 ? void 0 : abort.aborted) { - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - } else { - const id = setTimeout(() => resolve6(v), ms); - if (abort) { - abort.addEventListener("abort", (ev) => { - clearTimeout(id); - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - }); + exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js +var require_reflection_create = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionCreate = void 0; + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var message_type_contract_1 = require_message_type_contract(); + function reflectionCreate(type2) { + const msg = type2.messagePrototype ? Object.create(type2.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type2 }); + for (let field of type2.fields) { + let name = field.localName; + if (field.opt) + continue; + if (field.oneof) + msg[field.oneof] = { oneofKind: void 0 }; + else if (field.repeat) + msg[name] = []; + else + switch (field.kind) { + case "scalar": + msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); + break; + case "enum": + msg[name] = 0; + break; + case "map": + msg[name] = {}; + break; } - } - }); - } - var TestInputStream = class { - constructor(data, abort) { - this._completed = false; - this._sent = []; - this.data = data; - this.abort = abort; - } - get sent() { - return this._sent; - } - get completed() { - return this._completed; - } - send(message) { - if (this.data.inputMessage instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputMessage); - } - const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; - return Promise.resolve(void 0).then(() => { - this._sent.push(message); - }).then(delay2(delayMs, this.abort)); - } - complete() { - if (this.data.inputComplete instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputComplete); - } - const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; - return Promise.resolve(void 0).then(() => { - this._completed = true; - }).then(delay2(delayMs, this.abort)); } - }; + return msg; + } + exports2.reflectionCreate = reflectionCreate; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js -var require_rpc_interceptor = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js +var require_reflection_merge_partial = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; - var runtime_1 = require_commonjs16(); - function stackIntercept(kind, transport, method, options, input) { - var _a, _b, _c, _d; - if (kind == "unary") { - let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); - for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { - const next = tail; - tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); - } - return tail(method, input, options); - } - if (kind == "serverStreaming") { - let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); - for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { - const next = tail; - tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); + exports2.reflectionMergePartial = void 0; + function reflectionMergePartial(info6, target, source) { + let fieldValue, input = source, output; + for (let field of info6.fields) { + let name = field.localName; + if (field.oneof) { + const group = input[field.oneof]; + if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { + continue; + } + fieldValue = group[name]; + output = target[field.oneof]; + output.oneofKind = group.oneofKind; + if (fieldValue == void 0) { + delete output[name]; + continue; + } + } else { + fieldValue = input[name]; + output = target; + if (fieldValue == void 0) { + continue; + } } - return tail(method, input, options); - } - if (kind == "clientStreaming") { - let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); - for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { - const next = tail; - tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); + if (field.repeat) + output[name].length = fieldValue.length; + switch (field.kind) { + case "scalar": + case "enum": + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = fieldValue[i]; + else + output[name] = fieldValue; + break; + case "message": + let T = field.T(); + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = T.create(fieldValue[i]); + else if (output[name] === void 0) + output[name] = T.create(fieldValue); + else + T.mergePartial(output[name], fieldValue); + break; + case "map": + switch (field.V.kind) { + case "scalar": + case "enum": + Object.assign(output[name], fieldValue); + break; + case "message": + let T2 = field.V.T(); + for (let k of Object.keys(fieldValue)) + output[name][k] = T2.create(fieldValue[k]); + break; + } + break; } - return tail(method, options); } - if (kind == "duplex") { - let tail = (mtd, opt) => transport.duplex(mtd, opt); - for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { - const next = tail; - tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); + } + exports2.reflectionMergePartial = reflectionMergePartial; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js +var require_reflection_equals = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionEquals = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionEquals(info6, a, b) { + if (a === b) + return true; + if (!a || !b) + return false; + for (let field of info6.fields) { + let localName = field.localName; + let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; + let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; + switch (field.kind) { + case "enum": + case "scalar": + let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) + return false; + break; + case "map": + if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) + return false; + break; + case "message": + let T = field.T(); + if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) + return false; + break; } - return tail(method, options); } - runtime_1.assertNever(kind); - } - exports2.stackIntercept = stackIntercept; - function stackUnaryInterceptors(transport, method, input, options) { - return stackIntercept("unary", transport, method, options, input); + return true; } - exports2.stackUnaryInterceptors = stackUnaryInterceptors; - function stackServerStreamingInterceptors(transport, method, input, options) { - return stackIntercept("serverStreaming", transport, method, options, input); + exports2.reflectionEquals = reflectionEquals; + var objectValues = Object.values; + function primitiveEq(type2, a, b) { + if (a === b) + return true; + if (type2 !== reflection_info_1.ScalarType.BYTES) + return false; + let ba = a; + let bb = b; + if (ba.length !== bb.length) + return false; + for (let i = 0; i < ba.length; i++) + if (ba[i] != bb[i]) + return false; + return true; } - exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; - function stackClientStreamingInterceptors(transport, method, options) { - return stackIntercept("clientStreaming", transport, method, options); + function repeatedPrimitiveEq(type2, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!primitiveEq(type2, a[i], b[i])) + return false; + return true; } - exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; - function stackDuplexStreamingInterceptors(transport, method, options) { - return stackIntercept("duplex", transport, method, options); + function repeatedMsgEq(type2, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!type2.equals(a[i], b[i])) + return false; + return true; } - exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js -var require_server_call_context = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js +var require_message_type = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerCallContextController = void 0; - var ServerCallContextController = class { - constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { - this._cancelled = false; - this._listeners = []; - this.method = method; - this.headers = headers; - this.deadline = deadline; - this.trailers = {}; - this._sendRH = sendResponseHeadersFn; - this.status = defaultStatus; + exports2.MessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_type_check_1 = require_reflection_type_check(); + var reflection_json_reader_1 = require_reflection_json_reader(); + var reflection_json_writer_1 = require_reflection_json_writer(); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + var reflection_create_1 = require_reflection_create(); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + var json_typings_1 = require_json_typings(); + var json_format_contract_1 = require_json_format_contract(); + var reflection_equals_1 = require_reflection_equals(); + var binary_writer_1 = require_binary_writer(); + var binary_reader_1 = require_binary_reader(); + var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); + var messageTypeDescriptor = baseDescriptors[message_type_contract_1.MESSAGE_TYPE] = {}; + var MessageType = class { + constructor(name, fields, options) { + this.defaultCheckDepth = 16; + this.typeName = name; + this.fields = fields.map(reflection_info_1.normalizeFieldInfo); + this.options = options !== null && options !== void 0 ? options : {}; + messageTypeDescriptor.value = this; + this.messagePrototype = Object.create(null, baseDescriptors); + this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); + this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); + this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); + this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); + this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); + } + create(value) { + let message = reflection_create_1.reflectionCreate(this); + if (value !== void 0) { + reflection_merge_partial_1.reflectionMergePartial(this, message, value); + } + return message; } /** - * Set the call cancelled. + * Clone the message. * - * Invokes all callbacks registered with onCancel() and - * sets `cancelled = true`. + * Unknown fields are discarded. */ - notifyCancelled() { - if (!this._cancelled) { - this._cancelled = true; - for (let l of this._listeners) { - l(); - } - } + clone(message) { + let copy = this.create(); + reflection_merge_partial_1.reflectionMergePartial(this, copy, message); + return copy; } /** - * Send response headers. + * Determines whether two message of the same type have the same field values. + * Checks for deep equality, traversing repeated fields, oneof groups, maps + * and messages recursively. + * Will also return true if both messages are `undefined`. */ - sendResponseHeaders(data) { - this._sendRH(data); + equals(a, b) { + return reflection_equals_1.reflectionEquals(this, a, b); } /** - * Is the call cancelled? + * Is the given value assignable to our message type + * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? + */ + is(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, false); + } + /** + * Is the given value assignable to our message type, + * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? + */ + isAssignable(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, true); + } + /** + * Copy partial data into the target message. + */ + mergePartial(target, source) { + reflection_merge_partial_1.reflectionMergePartial(this, target, source); + } + /** + * Create a new message from binary format. + */ + fromBinary(data, options) { + let opt = binary_reader_1.binaryReadOptions(options); + return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); + } + /** + * Read a new message from a JSON value. + */ + fromJson(json2, options) { + return this.internalJsonRead(json2, json_format_contract_1.jsonReadOptions(options)); + } + /** + * Read a new message from a JSON string. + * This is equivalent to `T.fromJson(JSON.parse(json))`. + */ + fromJsonString(json2, options) { + let value = JSON.parse(json2); + return this.fromJson(value, options); + } + /** + * Write the message to canonical JSON value. + */ + toJson(message, options) { + return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + } + /** + * Convert the message to canonical JSON string. + * This is equivalent to `JSON.stringify(T.toJson(t))` + */ + toJsonString(message, options) { + var _a; + let value = this.toJson(message, options); + return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + } + /** + * Write the message to binary format. + */ + toBinary(message, options) { + let opt = binary_writer_1.binaryWriteOptions(options); + return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); + } + /** + * This is an internal method. If you just want to read a message from + * JSON, use `fromJson()` or `fromJsonString()`. * - * When the client closes the connection before the server - * is done, the call is cancelled. + * Reads JSON value and merges the fields into the target + * according to protobuf rules. If the target is omitted, + * a new instance is created first. + */ + internalJsonRead(json2, options, target) { + if (json2 !== null && typeof json2 == "object" && !Array.isArray(json2)) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refJsonReader.read(json2, message, options); + return message; + } + throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json2)}.`); + } + /** + * This is an internal method. If you just want to write a message + * to JSON, use `toJson()` or `toJsonString(). * - * If you want to cancel a request on the server, throw a - * RpcError with the CANCELLED status code. + * Writes JSON value and returns it. */ - get cancelled() { - return this._cancelled; + internalJsonWrite(message, options) { + return this.refJsonWriter.write(message, options); } /** - * Add a callback for cancellation. + * This is an internal method. If you just want to write a message + * in binary format, use `toBinary()`. + * + * Serializes the message in binary format and appends it to the given + * writer. Returns passed writer. */ - onCancel(callback) { - const l = this._listeners; - l.push(callback); - return () => { - let i = l.indexOf(callback); - if (i >= 0) - l.splice(i, 1); - }; + internalBinaryWrite(message, writer, options) { + this.refBinWriter.write(message, writer, options); + return writer; + } + /** + * This is an internal method. If you just want to read a message from + * binary data, use `fromBinary()`. + * + * Reads data from binary format and merges the fields into + * the target according to protobuf rules. If the target is + * omitted, a new instance is created first. + */ + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refBinReader.read(reader, message, options, length); + return message; } }; - exports2.ServerCallContextController = ServerCallContextController; + exports2.MessageType = MessageType; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js -var require_commonjs17 = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js +var require_reflection_contains_message_type = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var service_type_1 = require_service_type(); - Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: function() { - return service_type_1.ServiceType; + exports2.containsMessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + function containsMessageType(msg) { + return msg[message_type_contract_1.MESSAGE_TYPE] != null; + } + exports2.containsMessageType = containsMessageType; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js +var require_enum_object = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; + function isEnumObject(arg) { + if (typeof arg != "object" || arg === null) { + return false; + } + if (!arg.hasOwnProperty(0)) { + return false; + } + for (let k of Object.keys(arg)) { + let num = parseInt(k); + if (!Number.isNaN(num)) { + let nam = arg[num]; + if (nam === void 0) + return false; + if (arg[nam] !== num) + return false; + } else { + let num2 = arg[k]; + if (num2 === void 0) + return false; + if (typeof num2 !== "number") + return false; + if (arg[num2] === void 0) + return false; + } + } + return true; + } + exports2.isEnumObject = isEnumObject; + function listEnumValues(enumObject) { + if (!isEnumObject(enumObject)) + throw new Error("not a typescript enum object"); + let values = []; + for (let [name, number] of Object.entries(enumObject)) + if (typeof number == "number") + values.push({ name, number }); + return values; + } + exports2.listEnumValues = listEnumValues; + function listEnumNames(enumObject) { + return listEnumValues(enumObject).map((val) => val.name); + } + exports2.listEnumNames = listEnumNames; + function listEnumNumbers(enumObject) { + return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); + } + exports2.listEnumNumbers = listEnumNumbers; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/index.js +var require_commonjs16 = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var json_typings_1 = require_json_typings(); + Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: function() { + return json_typings_1.typeofJsonValue; } }); - var reflection_info_1 = require_reflection_info2(); - Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: function() { - return reflection_info_1.readMethodOptions; + Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: function() { + return json_typings_1.isJsonObject; } }); - Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: function() { - return reflection_info_1.readMethodOption; + var base64_1 = require_base642(); + Object.defineProperty(exports2, "base64decode", { enumerable: true, get: function() { + return base64_1.base64decode; } }); - Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: function() { - return reflection_info_1.readServiceOption; + Object.defineProperty(exports2, "base64encode", { enumerable: true, get: function() { + return base64_1.base64encode; } }); - var rpc_error_1 = require_rpc_error(); - Object.defineProperty(exports2, "RpcError", { enumerable: true, get: function() { - return rpc_error_1.RpcError; + var protobufjs_utf8_1 = require_protobufjs_utf8(); + Object.defineProperty(exports2, "utf8read", { enumerable: true, get: function() { + return protobufjs_utf8_1.utf8read; } }); - var rpc_options_1 = require_rpc_options(); - Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: function() { - return rpc_options_1.mergeRpcOptions; + var binary_format_contract_1 = require_binary_format_contract(); + Object.defineProperty(exports2, "WireType", { enumerable: true, get: function() { + return binary_format_contract_1.WireType; } }); - var rpc_output_stream_1 = require_rpc_output_stream(); - Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: function() { - return rpc_output_stream_1.RpcOutputStreamController; + Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: function() { + return binary_format_contract_1.mergeBinaryOptions; } }); - var test_transport_1 = require_test_transport(); - Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: function() { - return test_transport_1.TestTransport; + Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: function() { + return binary_format_contract_1.UnknownFieldHandler; } }); - var deferred_1 = require_deferred(); - Object.defineProperty(exports2, "Deferred", { enumerable: true, get: function() { - return deferred_1.Deferred; + var binary_reader_1 = require_binary_reader(); + Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: function() { + return binary_reader_1.BinaryReader; + } }); + Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: function() { + return binary_reader_1.binaryReadOptions; + } }); + var binary_writer_1 = require_binary_writer(); + Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: function() { + return binary_writer_1.BinaryWriter; + } }); + Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: function() { + return binary_writer_1.binaryWriteOptions; + } }); + var pb_long_1 = require_pb_long(); + Object.defineProperty(exports2, "PbLong", { enumerable: true, get: function() { + return pb_long_1.PbLong; + } }); + Object.defineProperty(exports2, "PbULong", { enumerable: true, get: function() { + return pb_long_1.PbULong; + } }); + var json_format_contract_1 = require_json_format_contract(); + Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: function() { + return json_format_contract_1.jsonReadOptions; + } }); + Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: function() { + return json_format_contract_1.jsonWriteOptions; + } }); + Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: function() { + return json_format_contract_1.mergeJsonOptions; + } }); + var message_type_contract_1 = require_message_type_contract(); + Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: function() { + return message_type_contract_1.MESSAGE_TYPE; + } }); + var message_type_1 = require_message_type(); + Object.defineProperty(exports2, "MessageType", { enumerable: true, get: function() { + return message_type_1.MessageType; + } }); + var reflection_info_1 = require_reflection_info(); + Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: function() { + return reflection_info_1.ScalarType; + } }); + Object.defineProperty(exports2, "LongType", { enumerable: true, get: function() { + return reflection_info_1.LongType; + } }); + Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: function() { + return reflection_info_1.RepeatType; + } }); + Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: function() { + return reflection_info_1.normalizeFieldInfo; + } }); + Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: function() { + return reflection_info_1.readFieldOptions; + } }); + Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: function() { + return reflection_info_1.readFieldOption; + } }); + Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: function() { + return reflection_info_1.readMessageOption; + } }); + var reflection_type_check_1 = require_reflection_type_check(); + Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: function() { + return reflection_type_check_1.ReflectionTypeCheck; + } }); + var reflection_create_1 = require_reflection_create(); + Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: function() { + return reflection_create_1.reflectionCreate; + } }); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: function() { + return reflection_scalar_default_1.reflectionScalarDefault; + } }); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: function() { + return reflection_merge_partial_1.reflectionMergePartial; + } }); + var reflection_equals_1 = require_reflection_equals(); + Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: function() { + return reflection_equals_1.reflectionEquals; + } }); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: function() { + return reflection_binary_reader_1.ReflectionBinaryReader; + } }); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: function() { + return reflection_binary_writer_1.ReflectionBinaryWriter; + } }); + var reflection_json_reader_1 = require_reflection_json_reader(); + Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: function() { + return reflection_json_reader_1.ReflectionJsonReader; + } }); + var reflection_json_writer_1 = require_reflection_json_writer(); + Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: function() { + return reflection_json_writer_1.ReflectionJsonWriter; + } }); + var reflection_contains_message_type_1 = require_reflection_contains_message_type(); + Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: function() { + return reflection_contains_message_type_1.containsMessageType; + } }); + var oneof_1 = require_oneof(); + Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: function() { + return oneof_1.isOneofGroup; + } }); + Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: function() { + return oneof_1.setOneofValue; + } }); + Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: function() { + return oneof_1.getOneofValue; + } }); + Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: function() { + return oneof_1.clearOneofValue; } }); - Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: function() { - return deferred_1.DeferredState; + Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: function() { + return oneof_1.getSelectedOneofValue; } }); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: function() { - return duplex_streaming_call_1.DuplexStreamingCall; + var enum_object_1 = require_enum_object(); + Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: function() { + return enum_object_1.listEnumValues; } }); - var client_streaming_call_1 = require_client_streaming_call(); - Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: function() { - return client_streaming_call_1.ClientStreamingCall; + Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: function() { + return enum_object_1.listEnumNames; } }); - var server_streaming_call_1 = require_server_streaming_call(); - Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: function() { - return server_streaming_call_1.ServerStreamingCall; + Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: function() { + return enum_object_1.listEnumNumbers; } }); - var unary_call_1 = require_unary_call(); - Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: function() { - return unary_call_1.UnaryCall; + Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: function() { + return enum_object_1.isEnumObject; } }); - var rpc_interceptor_1 = require_rpc_interceptor(); - Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: function() { - return rpc_interceptor_1.stackIntercept; + var lower_camel_case_1 = require_lower_camel_case(); + Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: function() { + return lower_camel_case_1.lowerCamelCase; } }); - Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackDuplexStreamingInterceptors; + var assert_1 = require_assert(); + Object.defineProperty(exports2, "assert", { enumerable: true, get: function() { + return assert_1.assert; } }); - Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackClientStreamingInterceptors; + Object.defineProperty(exports2, "assertNever", { enumerable: true, get: function() { + return assert_1.assertNever; } }); - Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackServerStreamingInterceptors; + Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: function() { + return assert_1.assertInt32; } }); - Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackUnaryInterceptors; + Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: function() { + return assert_1.assertUInt32; } }); - var server_call_context_1 = require_server_call_context(); - Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: function() { - return server_call_context_1.ServerCallContextController; + Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: function() { + return assert_1.assertFloat32; } }); } }); -// node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js -var require_cachescope = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheScope = void 0; - var runtime_1 = require_commonjs16(); - var runtime_2 = require_commonjs16(); - var runtime_3 = require_commonjs16(); - var runtime_4 = require_commonjs16(); - var runtime_5 = require_commonjs16(); - var CacheScope$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.entities.v1.CacheScope", [ - { - no: 1, - name: "scope", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "permission", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - } - ]); - } - create(value) { - const message = { scope: "", permission: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string scope */ - 1: - message.scope = reader.string(); - break; - case /* int64 permission */ - 2: - message.permission = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.scope !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); - if (message.permission !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.CacheScope = new CacheScope$Type(); - } -}); - -// node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js -var require_cachemetadata = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheMetadata = void 0; - var runtime_1 = require_commonjs16(); - var runtime_2 = require_commonjs16(); - var runtime_3 = require_commonjs16(); - var runtime_4 = require_commonjs16(); - var runtime_5 = require_commonjs16(); - var cachescope_1 = require_cachescope(); - var CacheMetadata$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.entities.v1.CacheMetadata", [ - { - no: 1, - name: "repository_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { no: 2, name: "scope", kind: "message", repeat: 1, T: () => cachescope_1.CacheScope } - ]); - } - create(value) { - const message = { repositoryId: "0", scope: [] }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int64 repository_id */ - 1: - message.repositoryId = reader.int64().toString(); - break; - case /* repeated github.actions.results.entities.v1.CacheScope scope */ - 2: - message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.repositoryId !== "0") - writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); - for (let i = 0; i < message.scope.length; i++) - cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.CacheMetadata = new CacheMetadata$Type(); - } -}); - -// node_modules/@actions/cache/lib/generated/results/api/v1/cache.js -var require_cache4 = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js +var require_reflection_info2 = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheService = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; - var runtime_rpc_1 = require_commonjs17(); + exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; var runtime_1 = require_commonjs16(); - var runtime_2 = require_commonjs16(); - var runtime_3 = require_commonjs16(); - var runtime_4 = require_commonjs16(); - var runtime_5 = require_commonjs16(); - var cachemetadata_1 = require_cachemetadata(); - var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.CreateCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { key: "", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* string version */ - 3: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.version !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); - var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.CreateCacheEntryResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_upload_url", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "message", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { ok: false, signedUploadUrl: "", message: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* string signed_upload_url */ - 2: - message.signedUploadUrl = reader.string(); - break; - case /* string message */ - 3: - message.message = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedUploadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); - if (message.message !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); - var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "size_bytes", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { key: "", sizeBytes: "0", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* int64 size_bytes */ - 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.sizeBytes !== "0") - writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); - var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "entry_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 3, - name: "message", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { ok: false, entryId: "0", message: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* int64 entry_id */ - 2: - message.entryId = reader.int64().toString(); - break; - case /* string message */ - 3: - message.message = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.entryId !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); - if (message.message !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + function normalizeMethodInfo(method, service) { + var _a, _b, _c; + let m = method; + m.service = service; + m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); + m.serverStreaming = !!m.serverStreaming; + m.clientStreaming = !!m.clientStreaming; + m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; + m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; + return m; + } + exports2.normalizeMethodInfo = normalizeMethodInfo; + function readMethodOptions(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; + } + exports2.readMethodOptions = readMethodOptions; + function readMethodOption(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return void 0; } - }; - exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); - var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "restore_keys", - kind: "scalar", - repeat: 2, - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; } - create(value) { - const message = { key: "", restoreKeys: [], version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + exports2.readMethodOption = readMethodOption; + function readServiceOption(service, extensionName, extensionType) { + const options = service.options; + if (!options) { + return void 0; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ - 3: - message.restoreKeys.push(reader.string()); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + exports2.readServiceOption = readServiceOption; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js +var require_service_type = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceType = void 0; + var reflection_info_1 = require_reflection_info2(); + var ServiceType = class { + constructor(typeName, methods, options) { + this.typeName = typeName; + this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); + this.options = options !== null && options !== void 0 ? options : {}; } }; - exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); - var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_download_url", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "matched_key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + exports2.ServiceType = ServiceType; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js +var require_rpc_error = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RpcError = void 0; + var RpcError = class extends Error { + constructor(message, code = "UNKNOWN", meta) { + super(message); + this.name = "RpcError"; + Object.setPrototypeOf(this, new.target.prototype); + this.code = code; + this.meta = meta !== null && meta !== void 0 ? meta : {}; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* string signed_download_url */ - 2: - message.signedDownloadUrl = reader.string(); - break; - case /* string matched_key */ - 3: - message.matchedKey = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + toString() { + const l = [this.name + ": " + this.message]; + if (this.code) { + l.push(""); + l.push("Code: " + this.code); + } + if (this.serviceName && this.methodName) { + l.push("Method: " + this.serviceName + "/" + this.methodName); + } + let m = Object.entries(this.meta); + if (m.length) { + l.push(""); + l.push("Meta:"); + for (let [k, v] of m) { + l.push(` ${k}: ${v}`); } } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedDownloadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); - if (message.matchedKey !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + return l.join("\n"); } }; - exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); - exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ - { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, - { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, - { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse } - ]); + exports2.RpcError = RpcError; } }); -// node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js -var require_cache_twirp_client = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js +var require_rpc_options = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; - var cache_1 = require_cache4(); - var CacheServiceClientJSON = class { - constructor(rpc) { - this.rpc = rpc; - this.CreateCacheEntry.bind(this); - this.FinalizeCacheEntryUpload.bind(this); - this.GetCacheEntryDownloadURL.bind(this); + exports2.mergeRpcOptions = void 0; + var runtime_1 = require_commonjs16(); + function mergeRpcOptions(defaults, options) { + if (!options) + return defaults; + let o = {}; + copy(defaults, o); + copy(options, o); + for (let key of Object.keys(options)) { + let val = options[key]; + switch (key) { + case "jsonOptions": + o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + break; + case "binaryOptions": + o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + break; + case "meta": + o.meta = {}; + copy(defaults.meta, o.meta); + copy(options.meta, o.meta); + break; + case "interceptors": + o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + break; + } } - CreateCacheEntry(request2) { - const data = cache_1.CreateCacheEntryRequest.toJson(request2, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); - return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { - ignoreUnknownFields: true - })); + return o; + } + exports2.mergeRpcOptions = mergeRpcOptions; + function copy(a, into) { + if (!a) + return; + let c = into; + for (let [k, v] of Object.entries(a)) { + if (v instanceof Date) + c[k] = new Date(v.getTime()); + else if (Array.isArray(v)) + c[k] = v.concat(); + else + c[k] = v; } - FinalizeCacheEntryUpload(request2) { - const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request2, { - useProtoFieldName: true, - emitDefaultValues: false + } + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js +var require_deferred = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Deferred = exports2.DeferredState = void 0; + var DeferredState; + (function(DeferredState2) { + DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; + DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; + DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; + })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); + var Deferred = class { + /** + * @param preventUnhandledRejectionWarning - prevents the warning + * "Unhandled Promise rejection" by adding a noop rejection handler. + * Working with calls returned from the runtime-rpc package in an + * async function usually means awaiting one call property after + * the other. This means that the "status" is not being awaited when + * an earlier await for the "headers" is rejected. This causes the + * "unhandled promise reject" warning. A more correct behaviour for + * calls might be to become aware whether at least one of the + * promises is handled and swallow the rejection warning for the + * others. + */ + constructor(preventUnhandledRejectionWarning = true) { + this._state = DeferredState.PENDING; + this._promise = new Promise((resolve6, reject) => { + this._resolve = resolve6; + this._reject = reject; }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); - return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { - ignoreUnknownFields: true - })); + if (preventUnhandledRejectionWarning) { + this._promise.catch((_) => { + }); + } } - GetCacheEntryDownloadURL(request2) { - const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request2, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); - return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { - ignoreUnknownFields: true - })); + /** + * Get the current state of the promise. + */ + get state() { + return this._state; } - }; - exports2.CacheServiceClientJSON = CacheServiceClientJSON; - var CacheServiceClientProtobuf = class { - constructor(rpc) { - this.rpc = rpc; - this.CreateCacheEntry.bind(this); - this.FinalizeCacheEntryUpload.bind(this); - this.GetCacheEntryDownloadURL.bind(this); + /** + * Get the deferred promise. + */ + get promise() { + return this._promise; } - CreateCacheEntry(request2) { - const data = cache_1.CreateCacheEntryRequest.toBinary(request2); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); - return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); + /** + * Resolve the promise. Throws if the promise is already resolved or rejected. + */ + resolve(value) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); + this._resolve(value); + this._state = DeferredState.RESOLVED; } - FinalizeCacheEntryUpload(request2) { - const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request2); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); - return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); + /** + * Reject the promise. Throws if the promise is already resolved or rejected. + */ + reject(reason) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); + this._reject(reason); + this._state = DeferredState.REJECTED; } - GetCacheEntryDownloadURL(request2) { - const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request2); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); - return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); + /** + * Resolve the promise. Ignore if not pending. + */ + resolvePending(val) { + if (this._state === DeferredState.PENDING) + this.resolve(val); + } + /** + * Reject the promise. Ignore if not pending. + */ + rejectPending(reason) { + if (this._state === DeferredState.PENDING) + this.reject(reason); } }; - exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; + exports2.Deferred = Deferred; } }); -// node_modules/@actions/cache/lib/internal/shared/util.js -var require_util18 = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/util.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js +var require_rpc_output_stream = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.maskSigUrl = maskSigUrl; - exports2.maskSecretUrls = maskSecretUrls; - var core_1 = require_core(); - function maskSigUrl(url2) { - if (!url2) - return; - try { - const parsedUrl = new URL(url2); - const signature = parsedUrl.searchParams.get("sig"); - if (signature) { - (0, core_1.setSecret)(signature); - (0, core_1.setSecret)(encodeURIComponent(signature)); - } - } catch (error3) { - (0, core_1.debug)(`Failed to parse URL: ${url2} ${error3 instanceof Error ? error3.message : String(error3)}`); + exports2.RpcOutputStreamController = void 0; + var deferred_1 = require_deferred(); + var runtime_1 = require_commonjs16(); + var RpcOutputStreamController = class { + constructor() { + this._lis = { + nxt: [], + msg: [], + err: [], + cmp: [] + }; + this._closed = false; + this._itState = { q: [] }; + } + // --- RpcOutputStream callback API + onNext(callback) { + return this.addLis(callback, this._lis.nxt); + } + onMessage(callback) { + return this.addLis(callback, this._lis.msg); + } + onError(callback) { + return this.addLis(callback, this._lis.err); + } + onComplete(callback) { + return this.addLis(callback, this._lis.cmp); + } + addLis(callback, list) { + list.push(callback); + return () => { + let i = list.indexOf(callback); + if (i >= 0) + list.splice(i, 1); + }; + } + // remove all listeners + clearLis() { + for (let l of Object.values(this._lis)) + l.splice(0, l.length); + } + // --- Controller API + /** + * Is this stream already closed by a completion or error? + */ + get closed() { + return this._closed !== false; + } + /** + * Emit message, close with error, or close successfully, but only one + * at a time. + * Can be used to wrap a stream by using the other stream's `onNext`. + */ + notifyNext(message, error3, complete) { + runtime_1.assert((message ? 1 : 0) + (error3 ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); + if (message) + this.notifyMessage(message); + if (error3) + this.notifyError(error3); + if (complete) + this.notifyComplete(); + } + /** + * Emits a new message. Throws if stream is closed. + * + * Triggers onNext and onMessage callbacks. + */ + notifyMessage(message) { + runtime_1.assert(!this.closed, "stream is closed"); + this.pushIt({ value: message, done: false }); + this._lis.msg.forEach((l) => l(message)); + this._lis.nxt.forEach((l) => l(message, void 0, false)); } - } - function maskSecretUrls(body) { - if (typeof body !== "object" || body === null) { - (0, core_1.debug)("body is not an object or is null"); - return; + /** + * Closes the stream with an error. Throws if stream is closed. + * + * Triggers onNext and onError callbacks. + */ + notifyError(error3) { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = error3; + this.pushIt(error3); + this._lis.err.forEach((l) => l(error3)); + this._lis.nxt.forEach((l) => l(void 0, error3, false)); + this.clearLis(); } - if ("signed_upload_url" in body && typeof body.signed_upload_url === "string") { - maskSigUrl(body.signed_upload_url); + /** + * Closes the stream successfully. Throws if stream is closed. + * + * Triggers onNext and onComplete callbacks. + */ + notifyComplete() { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = true; + this.pushIt({ value: null, done: true }); + this._lis.cmp.forEach((l) => l()); + this._lis.nxt.forEach((l) => l(void 0, void 0, true)); + this.clearLis(); } - if ("signed_download_url" in body && typeof body.signed_download_url === "string") { - maskSigUrl(body.signed_download_url); + /** + * Creates an async iterator (that can be used with `for await {...}`) + * to consume the stream. + * + * Some things to note: + * - If an error occurs, the `for await` will throw it. + * - If an error occurred before the `for await` was started, `for await` + * will re-throw it. + * - If the stream is already complete, the `for await` will be empty. + * - If your `for await` consumes slower than the stream produces, + * for example because you are relaying messages in a slow operation, + * messages are queued. + */ + [Symbol.asyncIterator]() { + if (this._closed === true) + this.pushIt({ value: null, done: true }); + else if (this._closed !== false) + this.pushIt(this._closed); + return { + next: () => { + let state = this._itState; + runtime_1.assert(state, "bad state"); + runtime_1.assert(!state.p, "iterator contract broken"); + let first = state.q.shift(); + if (first) + return "value" in first ? Promise.resolve(first) : Promise.reject(first); + state.p = new deferred_1.Deferred(); + return state.p.promise; + } + }; } - } + // "push" a new iterator result. + // this either resolves a pending promise, or enqueues the result. + pushIt(result) { + let state = this._itState; + if (state.p) { + const p = state.p; + runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); + "value" in result ? p.resolve(result) : p.reject(result); + delete state.p; + } else { + state.q.push(result); + } + } + }; + exports2.RpcOutputStreamController = RpcOutputStreamController; } }); -// node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js -var require_cacheTwirpClient = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js +var require_unary_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { "use strict"; var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { @@ -96695,203 +96497,47 @@ var require_cacheTwirpClient = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.internalCacheTwirpClient = internalCacheTwirpClient; - var core_1 = require_core(); - var user_agent_1 = require_user_agent(); - var errors_1 = require_errors3(); - var config_1 = require_config(); - var cacheUtils_1 = require_cacheUtils(); - var auth_1 = require_auth(); - var http_client_1 = require_lib(); - var cache_twirp_client_1 = require_cache_twirp_client(); - var util_1 = require_util18(); - var CacheServiceClient = class { - constructor(userAgent2, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { - this.maxAttempts = 5; - this.baseRetryIntervalMilliseconds = 3e3; - this.retryMultiplier = 1.5; - const token = (0, cacheUtils_1.getRuntimeToken)(); - this.baseUrl = (0, config_1.getCacheServiceURL)(); - if (maxAttempts) { - this.maxAttempts = maxAttempts; - } - if (baseRetryIntervalMilliseconds) { - this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; - } - if (retryMultiplier) { - this.retryMultiplier = retryMultiplier; - } - this.httpClient = new http_client_1.HttpClient(userAgent2, [ - new auth_1.BearerCredentialHandler(token) - ]); - } - // This function satisfies the Rpc interface. It is compatible with the JSON - // JSON generated client. - request(service, method, contentType, data) { - return __awaiter2(this, void 0, void 0, function* () { - const url2 = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; - (0, core_1.debug)(`[Request] ${method} ${url2}`); - const headers = { - "Content-Type": contentType - }; - try { - const { body } = yield this.retryableRequest(() => __awaiter2(this, void 0, void 0, function* () { - return this.httpClient.post(url2, JSON.stringify(data), headers); - })); - return body; - } catch (error3) { - throw new Error(`Failed to ${method}: ${error3.message}`); - } - }); - } - retryableRequest(operation) { - return __awaiter2(this, void 0, void 0, function* () { - let attempt = 0; - let errorMessage = ""; - let rawBody = ""; - while (attempt < this.maxAttempts) { - let isRetryable = false; - try { - const response = yield operation(); - const statusCode = response.message.statusCode; - rawBody = yield response.readBody(); - (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); - (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); - const body = JSON.parse(rawBody); - (0, util_1.maskSecretUrls)(body); - (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); - if (this.isSuccessStatusCode(statusCode)) { - return { response, body }; - } - isRetryable = this.isRetryableHttpStatusCode(statusCode); - errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; - if (body.msg) { - if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { - throw new errors_1.UsageError(); - } - errorMessage = `${errorMessage}: ${body.msg}`; - } - if (statusCode === http_client_1.HttpCodes.TooManyRequests) { - const retryAfterHeader = response.message.headers["retry-after"]; - if (retryAfterHeader) { - const parsedSeconds = parseInt(retryAfterHeader, 10); - if (!isNaN(parsedSeconds) && parsedSeconds > 0) { - (0, core_1.warning)(`You've hit a rate limit, your rate limit will reset in ${parsedSeconds} seconds`); - } - } - throw new errors_1.RateLimitError(`Rate limited: ${errorMessage}`); - } - } catch (error3) { - if (error3 instanceof SyntaxError) { - (0, core_1.debug)(`Raw Body: ${rawBody}`); - } - if (error3 instanceof errors_1.UsageError) { - throw error3; - } - if (error3 instanceof errors_1.RateLimitError) { - throw error3; - } - if (errors_1.NetworkError.isNetworkErrorCode(error3 === null || error3 === void 0 ? void 0 : error3.code)) { - throw new errors_1.NetworkError(error3 === null || error3 === void 0 ? void 0 : error3.code); - } - isRetryable = true; - errorMessage = error3.message; - } - if (!isRetryable) { - throw new Error(`Received non-retryable error: ${errorMessage}`); - } - if (attempt + 1 === this.maxAttempts) { - throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); - } - const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); - (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); - yield this.sleep(retryTimeMilliseconds); - attempt++; - } - throw new Error(`Request failed`); - }); - } - isSuccessStatusCode(statusCode) { - if (!statusCode) - return false; - return statusCode >= 200 && statusCode < 300; + exports2.UnaryCall = void 0; + var UnaryCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request2; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; } - isRetryableHttpStatusCode(statusCode) { - if (!statusCode) - return false; - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.GatewayTimeout, - http_client_1.HttpCodes.InternalServerError, - http_client_1.HttpCodes.ServiceUnavailable - ]; - return retryableStatusCodes.includes(statusCode); + /** + * If you are only interested in the final outcome of this call, + * you can await it to receive a `FinishedUnaryCall`. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - sleep(milliseconds) { + promiseFinished() { return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve6) => setTimeout(resolve6, milliseconds)); + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + response, + status, + trailers + }; }); } - getExponentialRetryTimeMilliseconds(attempt) { - if (attempt < 0) { - throw new Error("attempt should be a positive integer"); - } - if (attempt === 0) { - return this.baseRetryIntervalMilliseconds; - } - const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); - const maxTime = minTime * this.retryMultiplier; - return Math.trunc(Math.random() * (maxTime - minTime) + minTime); - } }; - function internalCacheTwirpClient(options) { - const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); - return new cache_twirp_client_1.CacheServiceClientJSON(client); - } + exports2.UnaryCall = UnaryCall; } }); -// node_modules/@actions/cache/lib/internal/tar.js -var require_tar = __commonJS({ - "node_modules/@actions/cache/lib/internal/tar.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js +var require_server_streaming_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve6) { @@ -96920,230 +96566,117 @@ var require_tar = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.listTar = listTar; - exports2.extractTar = extractTar2; - exports2.createTar = createTar; - var exec_1 = require_exec(); - var io6 = __importStar2(require_io()); - var fs_1 = require("fs"); - var path13 = __importStar2(require("path")); - var utils = __importStar2(require_cacheUtils()); - var constants_1 = require_constants12(); - var IS_WINDOWS = process.platform === "win32"; - function getTarPath() { - return __awaiter2(this, void 0, void 0, function* () { - switch (process.platform) { - case "win32": { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else if ((0, fs_1.existsSync)(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; - } - break; - } - case "darwin": { - const gnuTar = yield io6.which("gtar", false); - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else { - return { - path: yield io6.which("tar", true), - type: constants_1.ArchiveToolType.BSD - }; - } - } - default: - break; - } - return { - path: yield io6.which("tar", true), - type: constants_1.ArchiveToolType.GNU - }; - }); - } - function getTarArgs(tarPath_1, compressionMethod_1, type_1) { - return __awaiter2(this, arguments, void 0, function* (tarPath, compressionMethod, type2, archivePath = "") { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = "cache.tar"; - const workingDirectory = getWorkingDirectory(); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (type2) { - case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); - break; - case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path13.sep}`, "g"), "/")); - break; - case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P"); - break; - } - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case "win32": - args.push("--force-local"); - break; - case "darwin": - args.push("--delay-directory-restore"); - break; + exports2.ServerStreamingCall = void 0; + var ServerStreamingCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request2; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; + } + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * You should first setup some listeners to the `request` to + * see the actual messages the server replied with. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + status, + trailers + }; + }); + } + }; + exports2.ServerStreamingCall = ServerStreamingCall; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js +var require_client_streaming_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { + "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); + } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } } - return args; - }); - } - function getCommands(compressionMethod_1, type_1) { - return __awaiter2(this, arguments, void 0, function* (compressionMethod, type2, archivePath = "") { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type2, archivePath); - const compressionArgs = type2 !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - if (BSD_TAR_ZSTD && type2 !== "create") { - args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; - } else { - args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(" ")]; - }); - } - function getWorkingDirectory() { - var _a; - return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); - } - function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter2(this, void 0, void 0, function* () { - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -d --long=30 --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/") - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -d --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/") - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; - default: - return ["-z"]; - } - }); - } - function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), - constants_1.TarFilename - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), - constants_1.TarFilename - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; - default: - return ["-z"]; - } - }); - } - function execCommands(commands, cwd) { - return __awaiter2(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield (0, exec_1.exec)(command, void 0, { - cwd, - env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) - }); - } catch (error3) { - throw new Error(`${command.split(" ")[0]} failed with error: ${error3 === null || error3 === void 0 ? void 0 : error3.message}`); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - } - function listTar(archivePath, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, "list", archivePath); - yield execCommands(commands); - }); - } - function extractTar2(archivePath, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - const workingDirectory = getWorkingDirectory(); - yield io6.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, "extract", archivePath); - yield execCommands(commands); - }); - } - function createTar(archiveFolder, sourceDirectories, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path13.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); - const commands = yield getCommands(compressionMethod, "create"); - yield execCommands(commands, archiveFolder); - }); - } + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ClientStreamingCall = void 0; + var ClientStreamingCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request2; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; + } + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + response, + status, + trailers + }; + }); + } + }; + exports2.ClientStreamingCall = ClientStreamingCall; } }); -// node_modules/@actions/cache/lib/cache.js -var require_cache5 = __commonJS({ - "node_modules/@actions/cache/lib/cache.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js +var require_duplex_streaming_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve6) { @@ -97172,1669 +96705,2136 @@ var require_cache5 = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - exports2.isFeatureAvailable = isFeatureAvailable; - exports2.restoreCache = restoreCache3; - exports2.saveCache = saveCache3; - var core14 = __importStar2(require_core()); - var path13 = __importStar2(require("path")); - var utils = __importStar2(require_cacheUtils()); - var cacheHttpClient = __importStar2(require_cacheHttpClient()); - var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); - var config_1 = require_config(); - var tar_1 = require_tar(); - var http_client_1 = require_lib(); - var ValidationError = class _ValidationError extends Error { - constructor(message) { - super(message); - this.name = "ValidationError"; - Object.setPrototypeOf(this, _ValidationError.prototype); + exports2.DuplexStreamingCall = void 0; + var DuplexStreamingCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request2; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; } - }; - exports2.ValidationError = ValidationError; - var ReserveCacheError = class _ReserveCacheError extends Error { - constructor(message) { - super(message); - this.name = "ReserveCacheError"; - Object.setPrototypeOf(this, _ReserveCacheError.prototype); + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + status, + trailers + }; + }); } }; - exports2.ReserveCacheError = ReserveCacheError; - var FinalizeCacheError = class _FinalizeCacheError extends Error { - constructor(message) { - super(message); - this.name = "FinalizeCacheError"; - Object.setPrototypeOf(this, _FinalizeCacheError.prototype); + exports2.DuplexStreamingCall = DuplexStreamingCall; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js +var require_test_transport = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { + "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - exports2.FinalizeCacheError = FinalizeCacheError; - function checkPaths(paths) { - if (!paths || paths.length === 0) { - throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.TestTransport = void 0; + var rpc_error_1 = require_rpc_error(); + var runtime_1 = require_commonjs16(); + var rpc_output_stream_1 = require_rpc_output_stream(); + var rpc_options_1 = require_rpc_options(); + var unary_call_1 = require_unary_call(); + var server_streaming_call_1 = require_server_streaming_call(); + var client_streaming_call_1 = require_client_streaming_call(); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + var TestTransport = class _TestTransport { + /** + * Initialize with mock data. Omitted fields have default value. + */ + constructor(data) { + this.suppressUncaughtRejections = true; + this.headerDelay = 10; + this.responseDelay = 50; + this.betweenResponseDelay = 10; + this.afterResponseDelay = 10; + this.data = data !== null && data !== void 0 ? data : {}; } - } - function checkKey(key) { - if (key.length > 512) { - throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + /** + * Sent message(s) during the last operation. + */ + get sentMessages() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.sent; + } else if (typeof this.lastInput == "object") { + return [this.lastInput.single]; + } + return []; } - const regex = /^[^,]*$/; - if (!regex.test(key)) { - throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + /** + * Sending message(s) completed? + */ + get sendComplete() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.completed; + } else if (typeof this.lastInput == "object") { + return true; + } + return false; } - } - function isFeatureAvailable() { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - switch (cacheServiceVersion) { - case "v2": - return !!process.env["ACTIONS_RESULTS_URL"]; - case "v1": - default: - return !!process.env["ACTIONS_CACHE_URL"]; + // Creates a promise for response headers from the mock data. + promiseHeaders() { + var _a; + const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; + return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); } - } - function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); - checkPaths(paths); - switch (cacheServiceVersion) { - case "v2": - return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); - case "v1": - default: - return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); - } - }); - } - function restoreCacheV1(paths_1, primaryKey_1, restoreKeys_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + // Creates a promise for a single, valid, message from the mock data. + promiseSingleResponse(method) { + if (this.data.response instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.response); } - for (const key of keys) { - checkKey(key); + let r; + if (Array.isArray(this.data.response)) { + runtime_1.assert(this.data.response.length > 0); + r = this.data.response[0]; + } else if (this.data.response !== void 0) { + r = this.data.response; + } else { + r = method.O.create(); } - const compressionMethod = yield utils.getCompressionMethod(); - let archivePath = ""; - try { - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod, - enableCrossOsArchive - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return void 0; - } - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); - return cacheEntry.cacheKey; - } - archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); - yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); - return cacheEntry.cacheKey; - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); - } else { - core14.warning(`Failed to restore: ${error3.message}`); + runtime_1.assert(method.O.is(r)); + return Promise.resolve(r); + } + /** + * Pushes response messages from the mock data to the output stream. + * If an error response, status or trailers are mocked, the stream is + * closed with the respective error. + * Otherwise, stream is completed successfully. + * + * The returned promise resolves when the stream is closed. It should + * not reject. If it does, code is broken. + */ + streamResponses(method, stream2, abort) { + return __awaiter2(this, void 0, void 0, function* () { + const messages = []; + if (this.data.response === void 0) { + messages.push(method.O.create()); + } else if (Array.isArray(this.data.response)) { + for (let msg of this.data.response) { + runtime_1.assert(method.O.is(msg)); + messages.push(msg); } + } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { + runtime_1.assert(method.O.is(this.data.response)); + messages.push(this.data.response); } - } finally { try { - yield utils.unlinkFile(archivePath); + yield delay2(this.responseDelay, abort)(void 0); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + stream2.notifyError(error3); + return; } - } - return void 0; - }); - } - function restoreCacheV2(paths_1, primaryKey_1, restoreKeys_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - let archivePath = ""; - try { - const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); - const compressionMethod = yield utils.getCompressionMethod(); - const request2 = { - key: primaryKey, - restoreKeys, - version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) - }; - const response = yield twirpClient.GetCacheEntryDownloadURL(request2); - if (!response.ok) { - core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); - return void 0; + if (this.data.response instanceof rpc_error_1.RpcError) { + stream2.notifyError(this.data.response); + return; } - const isRestoreKeyMatch = request2.key !== response.matchedKey; - if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); - } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + for (let msg of messages) { + stream2.notifyMessage(msg); + try { + yield delay2(this.betweenResponseDelay, abort)(void 0); + } catch (error3) { + stream2.notifyError(error3); + return; + } } - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); - return response.matchedKey; + if (this.data.status instanceof rpc_error_1.RpcError) { + stream2.notifyError(this.data.status); + return; } - archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); - yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); + if (this.data.trailers instanceof rpc_error_1.RpcError) { + stream2.notifyError(this.data.trailers); + return; } - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); - return response.matchedKey; - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); - } else { - core14.warning(`Failed to restore: ${error3.message}`); - } + stream2.notifyComplete(); + }); + } + // Creates a promise for response status from the mock data. + promiseStatus() { + var _a; + const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; + return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); + } + // Creates a promise for response trailers from the mock data. + promiseTrailers() { + var _a; + const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; + return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); + } + maybeSuppressUncaught(...promise) { + if (this.suppressUncaughtRejections) { + for (let p of promise) { + p.catch(() => { + }); } - } finally { - try { - if (archivePath) { - yield utils.unlinkFile(archivePath); - } - } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + } + } + mergeOptions(options) { + return rpc_options_1.mergeRpcOptions({}, options); + } + unary(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { + }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); + } + serverStreaming(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); + } + clientStreaming(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { + }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); + } + duplex(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); + } + }; + exports2.TestTransport = TestTransport; + TestTransport.defaultHeaders = { + responseHeader: "test" + }; + TestTransport.defaultStatus = { + code: "OK", + detail: "all good" + }; + TestTransport.defaultTrailers = { + responseTrailer: "test" + }; + function delay2(ms, abort) { + return (v) => new Promise((resolve6, reject) => { + if (abort === null || abort === void 0 ? void 0 : abort.aborted) { + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + } else { + const id = setTimeout(() => resolve6(v), ms); + if (abort) { + abort.addEventListener("abort", (ev) => { + clearTimeout(id); + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + }); } } - return void 0; }); } - function saveCache3(paths_1, key_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); - checkPaths(paths); - checkKey(key); - switch (cacheServiceVersion) { - case "v2": - return yield saveCacheV2(paths, key, options, enableCrossOsArchive); - case "v1": - default: - return yield saveCacheV1(paths, key, options, enableCrossOsArchive); + var TestInputStream = class { + constructor(data, abort) { + this._completed = false; + this._sent = []; + this.data = data; + this.abort = abort; + } + get sent() { + return this._sent; + } + get completed() { + return this._completed; + } + send(message) { + if (this.data.inputMessage instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputMessage); } - }); - } - function saveCacheV1(paths_1, key_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { - var _a, _b, _c, _d, _e; - const compressionMethod = yield utils.getCompressionMethod(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; + return Promise.resolve(void 0).then(() => { + this._sent.push(message); + }).then(delay2(delayMs, this.abort)); + } + complete() { + if (this.data.inputComplete instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputComplete); } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); - } - core14.debug("Reserving Cache"); - const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod, - enableCrossOsArchive, - cacheSize: archiveFileSize - }); - if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { - cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; - } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { - throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); - } else { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); - } - core14.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); - } else { - core14.warning(`Failed to save: ${typedError.message}`); - } - } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); - } + const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; + return Promise.resolve(void 0).then(() => { + this._completed = true; + }).then(delay2(delayMs, this.abort)); + } + }; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js +var require_rpc_interceptor = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; + var runtime_1 = require_commonjs16(); + function stackIntercept(kind, transport, method, options, input) { + var _a, _b, _c, _d; + if (kind == "unary") { + let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); + for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { + const next = tail; + tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); } - return cacheId; - }); - } - function saveCacheV2(paths_1, key_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { - options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); - const compressionMethod = yield utils.getCompressionMethod(); - const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + return tail(method, input, options); + } + if (kind == "serverStreaming") { + let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); + for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { + const next = tail; + tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); - options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); - const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); - const request2 = { - key, - version - }; - let signedUploadUrl; - try { - const response = yield twirpClient.CreateCacheEntry(request2); - if (!response.ok) { - if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); - } - throw new Error(response.message || "Response was not ok"); - } - signedUploadUrl = response.signedUploadUrl; - } catch (error3) { - core14.debug(`Failed to reserve cache: ${error3}`); - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); + return tail(method, input, options); + } + if (kind == "clientStreaming") { + let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); + for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { + const next = tail; + tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); + } + return tail(method, options); + } + if (kind == "duplex") { + let tail = (mtd, opt) => transport.duplex(mtd, opt); + for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { + const next = tail; + tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); + } + return tail(method, options); + } + runtime_1.assertNever(kind); + } + exports2.stackIntercept = stackIntercept; + function stackUnaryInterceptors(transport, method, input, options) { + return stackIntercept("unary", transport, method, options, input); + } + exports2.stackUnaryInterceptors = stackUnaryInterceptors; + function stackServerStreamingInterceptors(transport, method, input, options) { + return stackIntercept("serverStreaming", transport, method, options, input); + } + exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; + function stackClientStreamingInterceptors(transport, method, options) { + return stackIntercept("clientStreaming", transport, method, options); + } + exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; + function stackDuplexStreamingInterceptors(transport, method, options) { + return stackIntercept("duplex", transport, method, options); + } + exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js +var require_server_call_context = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServerCallContextController = void 0; + var ServerCallContextController = class { + constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { + this._cancelled = false; + this._listeners = []; + this.method = method; + this.headers = headers; + this.deadline = deadline; + this.trailers = {}; + this._sendRH = sendResponseHeadersFn; + this.status = defaultStatus; + } + /** + * Set the call cancelled. + * + * Invokes all callbacks registered with onCancel() and + * sets `cancelled = true`. + */ + notifyCancelled() { + if (!this._cancelled) { + this._cancelled = true; + for (let l of this._listeners) { + l(); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); - yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); - const finalizeRequest = { - key, - version, - sizeBytes: `${archiveFileSize}` - }; - const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); - if (!finalizeResponse.ok) { - if (finalizeResponse.message) { - throw new FinalizeCacheError(finalizeResponse.message); - } - throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); + } + } + /** + * Send response headers. + */ + sendResponseHeaders(data) { + this._sendRH(data); + } + /** + * Is the call cancelled? + * + * When the client closes the connection before the server + * is done, the call is cancelled. + * + * If you want to cancel a request on the server, throw a + * RpcError with the CANCELLED status code. + */ + get cancelled() { + return this._cancelled; + } + /** + * Add a callback for cancellation. + */ + onCancel(callback) { + const l = this._listeners; + l.push(callback); + return () => { + let i = l.indexOf(callback); + if (i >= 0) + l.splice(i, 1); + }; + } + }; + exports2.ServerCallContextController = ServerCallContextController; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js +var require_commonjs17 = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var service_type_1 = require_service_type(); + Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: function() { + return service_type_1.ServiceType; + } }); + var reflection_info_1 = require_reflection_info2(); + Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: function() { + return reflection_info_1.readMethodOptions; + } }); + Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: function() { + return reflection_info_1.readMethodOption; + } }); + Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: function() { + return reflection_info_1.readServiceOption; + } }); + var rpc_error_1 = require_rpc_error(); + Object.defineProperty(exports2, "RpcError", { enumerable: true, get: function() { + return rpc_error_1.RpcError; + } }); + var rpc_options_1 = require_rpc_options(); + Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: function() { + return rpc_options_1.mergeRpcOptions; + } }); + var rpc_output_stream_1 = require_rpc_output_stream(); + Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: function() { + return rpc_output_stream_1.RpcOutputStreamController; + } }); + var test_transport_1 = require_test_transport(); + Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: function() { + return test_transport_1.TestTransport; + } }); + var deferred_1 = require_deferred(); + Object.defineProperty(exports2, "Deferred", { enumerable: true, get: function() { + return deferred_1.Deferred; + } }); + Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: function() { + return deferred_1.DeferredState; + } }); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: function() { + return duplex_streaming_call_1.DuplexStreamingCall; + } }); + var client_streaming_call_1 = require_client_streaming_call(); + Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: function() { + return client_streaming_call_1.ClientStreamingCall; + } }); + var server_streaming_call_1 = require_server_streaming_call(); + Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: function() { + return server_streaming_call_1.ServerStreamingCall; + } }); + var unary_call_1 = require_unary_call(); + Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: function() { + return unary_call_1.UnaryCall; + } }); + var rpc_interceptor_1 = require_rpc_interceptor(); + Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: function() { + return rpc_interceptor_1.stackIntercept; + } }); + Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackDuplexStreamingInterceptors; + } }); + Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackClientStreamingInterceptors; + } }); + Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackServerStreamingInterceptors; + } }); + Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackUnaryInterceptors; + } }); + var server_call_context_1 = require_server_call_context(); + Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: function() { + return server_call_context_1.ServerCallContextController; + } }); + } +}); + +// node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js +var require_cachescope = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheScope = void 0; + var runtime_1 = require_commonjs16(); + var runtime_2 = require_commonjs16(); + var runtime_3 = require_commonjs16(); + var runtime_4 = require_commonjs16(); + var runtime_5 = require_commonjs16(); + var CacheScope$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheScope", [ + { + no: 1, + name: "scope", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "permission", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ } - cacheId = parseInt(finalizeResponse.entryId); - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); - } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); - } else { - core14.warning(`Failed to save: ${typedError.message}`); - } + ]); + } + create(value) { + const message = { scope: "", permission: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string scope */ + 1: + message.scope = reader.string(); + break; + case /* int64 permission */ + 2: + message.permission = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.scope !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); + if (message.permission !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CacheScope = new CacheScope$Type(); + } +}); + +// node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js +var require_cachemetadata = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheMetadata = void 0; + var runtime_1 = require_commonjs16(); + var runtime_2 = require_commonjs16(); + var runtime_3 = require_commonjs16(); + var runtime_4 = require_commonjs16(); + var runtime_5 = require_commonjs16(); + var cachescope_1 = require_cachescope(); + var CacheMetadata$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheMetadata", [ + { + no: 1, + name: "repository_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { no: 2, name: "scope", kind: "message", repeat: 1, T: () => cachescope_1.CacheScope } + ]); + } + create(value) { + const message = { repositoryId: "0", scope: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 repository_id */ + 1: + message.repositoryId = reader.int64().toString(); + break; + case /* repeated github.actions.results.entities.v1.CacheScope scope */ + 2: + message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - return cacheId; - }); - } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.repositoryId !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); + for (let i = 0; i < message.scope.length; i++) + cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CacheMetadata = new CacheMetadata$Type(); } }); -// node_modules/jsonschema/lib/helpers.js -var require_helpers3 = __commonJS({ - "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { +// node_modules/@actions/cache/lib/generated/results/api/v1/cache.js +var require_cache4 = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { "use strict"; - var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path13, name, argument) { - if (Array.isArray(path13)) { - this.path = path13; - this.property = path13.reduce(function(sum, item) { - return sum + makeSuffix(item); - }, "instance"); - } else if (path13 !== void 0) { - this.property = path13; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheService = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; + var runtime_rpc_1 = require_commonjs17(); + var runtime_1 = require_commonjs16(); + var runtime_2 = require_commonjs16(); + var runtime_3 = require_commonjs16(); + var runtime_4 = require_commonjs16(); + var runtime_5 = require_commonjs16(); + var cachemetadata_1 = require_cachemetadata(); + var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - if (message) { - this.message = message; + create(value) { + const message = { key: "", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - if (schema2) { - var id = schema2.$id || schema2.id; - this.schema = id || schema2; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* string version */ + 3: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; } - if (instance !== void 0) { - this.instance = instance; + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.version !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - this.name = name; - this.argument = argument; - this.stack = this.toString(); - }; - ValidationError.prototype.toString = function toString3() { - return this.property + " " + this.message; - }; - var ValidatorResult = exports2.ValidatorResult = function ValidatorResult2(instance, schema2, options, ctx) { - this.instance = instance; - this.schema = schema2; - this.options = options; - this.path = ctx.path; - this.propertyPath = ctx.propertyPath; - this.errors = []; - this.throwError = options && options.throwError; - this.throwFirst = options && options.throwFirst; - this.throwAll = options && options.throwAll; - this.disableFormat = options && options.disableFormat === true; }; - ValidatorResult.prototype.addError = function addError(detail) { - var err; - if (typeof detail == "string") { - err = new ValidationError(detail, this.instance, this.schema, this.path); - } else { - if (!detail) throw new Error("Missing error detail"); - if (!detail.message) throw new Error("Missing error message"); - if (!detail.name) throw new Error("Missing validator type"); - err = new ValidationError(detail.message, this.instance, this.schema, this.path, detail.name, detail.argument); + exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); + var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_upload_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "message", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - this.errors.push(err); - if (this.throwFirst) { - throw new ValidatorResultError(this); - } else if (this.throwError) { - throw err; + create(value) { + const message = { ok: false, signedUploadUrl: "", message: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - return err; - }; - ValidatorResult.prototype.importErrors = function importErrors(res) { - if (typeof res == "string" || res && res.validatorType) { - this.addError(res); - } else if (res && res.errors) { - this.errors = this.errors.concat(res.errors); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ + 2: + message.signedUploadUrl = reader.string(); + break; + case /* string message */ + 3: + message.message = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; } - }; - function stringizer(v, i) { - return i + ": " + v.toString() + "\n"; - } - ValidatorResult.prototype.toString = function toString3(res) { - return this.errors.map(stringizer).join(""); - }; - Object.defineProperty(ValidatorResult.prototype, "valid", { get: function() { - return !this.errors.length; - } }); - module2.exports.ValidatorResultError = ValidatorResultError; - function ValidatorResultError(result) { - if (Error.captureStackTrace) { - Error.captureStackTrace(this, ValidatorResultError); + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedUploadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); + if (message.message !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - this.instance = result.instance; - this.schema = result.schema; - this.options = result.options; - this.errors = result.errors; - } - ValidatorResultError.prototype = new Error(); - ValidatorResultError.prototype.constructor = ValidatorResultError; - ValidatorResultError.prototype.name = "Validation Error"; - var SchemaError = exports2.SchemaError = function SchemaError2(msg, schema2) { - this.message = msg; - this.schema = schema2; - Error.call(this, msg); - Error.captureStackTrace(this, SchemaError2); }; - SchemaError.prototype = Object.create( - Error.prototype, - { - constructor: { value: SchemaError, enumerable: false }, - name: { value: "SchemaError", enumerable: false } - } - ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path13, base, schemas) { - this.schema = schema2; - this.options = options; - if (Array.isArray(path13)) { - this.path = path13; - this.propertyPath = path13.reduce(function(sum, item) { - return sum + makeSuffix(item); - }, "instance"); - } else { - this.propertyPath = path13; + exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); + var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ + { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "size_bytes", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - this.base = base; - this.schemas = schemas; - }; - SchemaContext.prototype.resolve = function resolve6(target) { - return uri.resolve(this.base, target); - }; - SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path13 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); - var id = schema2.$id || schema2.id; - var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path13, base, Object.create(this.schemas)); - if (id && !ctx.schemas[base]) { - ctx.schemas[base] = schema2; + create(value) { + const message = { key: "", sizeBytes: "0", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - return ctx; - }; - var FORMAT_REGEXPS = exports2.FORMAT_REGEXPS = { - // 7.3.1. Dates, Times, and Duration - "date-time": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\.\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/, - "date": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/, - "time": /^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/, - "duration": /P(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S)|\d+(D|M(\d+D)?|Y(\d+M(\d+D)?)?)(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S))?|\d+W)/i, - // 7.3.2. Email Addresses - // TODO: fix the email production - "email": /^(?:[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+\.)*[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!\.)){0,61}[a-zA-Z0-9]?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\[(?:(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\.){3}(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\]))$/, - "idn-email": /^("(?:[!#-\[\]-\u{10FFFF}]|\\[\t -\u{10FFFF}])*"|[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*)@([!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*|\[[!-Z\^-\u{10FFFF}]*\])$/u, - // 7.3.3. Hostnames - // 7.3.4. IP Addresses - "ip-address": /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/, - // FIXME whitespace is invalid - "ipv6": /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/, - // 7.3.5. Resource Identifiers - // TODO: A more accurate regular expression for "uri" goes: - // [A-Za-z][+\-.0-9A-Za-z]*:((/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?)?#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])|/?%[0-9A-Fa-f]{2}|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*(#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?)? - "uri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, - "uri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/, - "iri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, - "iri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~-\u{10FFFF}]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~-\u{10FFFF}])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/u, - "uuid": /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i, - // 7.3.6. uri-template - "uri-template": /(%[0-9a-f]{2}|[!#$&(-;=?@\[\]_a-z~]|\{[!#&+,./;=?@|]?(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?(,(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?)*\})*/iu, - // 7.3.7. JSON Pointers - "json-pointer": /^(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*$/iu, - "relative-json-pointer": /^\d+(#|(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*)$/iu, - // hostname regex from: http://stackoverflow.com/a/1420225/5628 - "hostname": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, - "host-name": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, - "utc-millisec": function(input) { - return typeof input === "string" && parseFloat(input) === parseInt(input, 10) && !isNaN(input); - }, - // 7.3.8. regex - "regex": function(input) { - var result = true; - try { - new RegExp(input); - } catch (e) { - result = false; - } - return result; - }, - // Other definitions - // "style" was removed from JSON Schema in draft-4 and is deprecated - "style": /[\r\n\t ]*[^\r\n\t ][^:]*:[\r\n\t ]*[^\r\n\t ;]*[\r\n\t ]*;?/, - // "color" was removed from JSON Schema in draft-4 and is deprecated - "color": /^(#?([0-9A-Fa-f]{3}){1,2}\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\)))$/, - "phone": /^\+(?:[0-9] ?){6,14}[0-9]$/, - "alpha": /^[a-zA-Z]+$/, - "alphanumeric": /^[a-zA-Z0-9]+$/ - }; - FORMAT_REGEXPS.regexp = FORMAT_REGEXPS.regex; - FORMAT_REGEXPS.pattern = FORMAT_REGEXPS.regex; - FORMAT_REGEXPS.ipv4 = FORMAT_REGEXPS["ip-address"]; - exports2.isFormat = function isFormat(input, format, validator) { - if (typeof input === "string" && FORMAT_REGEXPS[format] !== void 0) { - if (FORMAT_REGEXPS[format] instanceof RegExp) { - return FORMAT_REGEXPS[format].test(input); - } - if (typeof FORMAT_REGEXPS[format] === "function") { - return FORMAT_REGEXPS[format](input); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* int64 size_bytes */ + 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - } else if (validator && validator.customFormats && typeof validator.customFormats[format] === "function") { - return validator.customFormats[format](input); - } - return true; - }; - var makeSuffix = exports2.makeSuffix = function makeSuffix2(key) { - key = key.toString(); - if (!key.match(/[.\s\[\]]/) && !key.match(/^[\d]/)) { - return "." + key; + return message; } - if (key.match(/^\d+$/)) { - return "[" + key + "]"; + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.sizeBytes !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return "[" + JSON.stringify(key) + "]"; }; - exports2.deepCompareStrict = function deepCompareStrict(a, b) { - if (typeof a !== typeof b) { - return false; + exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); + var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "entry_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 3, + name: "message", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - if (Array.isArray(a)) { - if (!Array.isArray(b)) { - return false; - } - if (a.length !== b.length) { - return false; + create(value) { + const message = { ok: false, entryId: "0", message: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ + 2: + message.entryId = reader.int64().toString(); + break; + case /* string message */ + 3: + message.message = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - return a.every(function(v, i) { - return deepCompareStrict(a[i], b[i]); - }); + return message; } - if (typeof a === "object") { - if (!a || !b) { - return a === b; - } - var aKeys = Object.keys(a); - var bKeys = Object.keys(b); - if (aKeys.length !== bKeys.length) { - return false; - } - return aKeys.every(function(v) { - return deepCompareStrict(a[v], b[v]); - }); + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.entryId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); + if (message.message !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return a === b; }; - function deepMerger(target, dst, e, i) { - if (typeof e === "object") { - dst[i] = deepMerge(target[i], e); - } else { - if (target.indexOf(e) === -1) { - dst.push(e); - } + exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); + var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ + { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - } - function copyist(src, dst, key) { - dst[key] = src[key]; - } - function copyistWithDeepMerge(target, src, dst, key) { - if (typeof src[key] !== "object" || !src[key]) { - dst[key] = src[key]; - } else { - if (!target[key]) { - dst[key] = src[key]; - } else { - dst[key] = deepMerge(target[key], src[key]); - } + create(value) { + const message = { key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - } - function deepMerge(target, src) { - var array = Array.isArray(src); - var dst = array && [] || {}; - if (array) { - target = target || []; - dst = dst.concat(target); - src.forEach(deepMerger.bind(null, target, dst)); - } else { - if (target && typeof target === "object") { - Object.keys(target).forEach(copyist.bind(null, target, dst)); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - Object.keys(src).forEach(copyistWithDeepMerge.bind(null, target, src, dst)); + return message; } - return dst; - } - module2.exports.deepMerge = deepMerge; - exports2.objectGetPath = function objectGetPath(o, s) { - var parts = s.split("/").slice(1); - var k; - while (typeof (k = parts.shift()) == "string") { - var n = decodeURIComponent(k.replace(/~0/, "~").replace(/~1/g, "/")); - if (!(n in o)) return; - o = o[n]; + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return o; - }; - function pathEncoder(v) { - return "/" + encodeURIComponent(v).replace(/~/g, "%7E"); - } - exports2.encodePath = function encodePointer(a) { - return a.map(pathEncoder).join(""); }; - exports2.getDecimalPlaces = function getDecimalPlaces(number) { - var decimalPlaces = 0; - if (isNaN(number)) return decimalPlaces; - if (typeof number !== "number") { - number = Number(number); + exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); + var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_download_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "matched_key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - var parts = number.toString().split("e"); - if (parts.length === 2) { - if (parts[1][0] !== "-") { - return decimalPlaces; - } else { - decimalPlaces = Number(parts[1].slice(1)); + create(value) { + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* string signed_download_url */ + 2: + message.signedDownloadUrl = reader.string(); + break; + case /* string matched_key */ + 3: + message.matchedKey = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - var decimalParts = parts[0].split("."); - if (decimalParts.length === 2) { - decimalPlaces += decimalParts[1].length; + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedDownloadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); + if (message.matchedKey !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return decimalPlaces; - }; - exports2.isSchema = function isSchema(val) { - return typeof val === "object" && val || typeof val === "boolean"; }; + exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); + exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ + { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, + { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, + { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse } + ]); } }); -// node_modules/jsonschema/lib/attribute.js -var require_attribute = __commonJS({ - "node_modules/jsonschema/lib/attribute.js"(exports2, module2) { +// node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js +var require_cache_twirp_client = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js"(exports2) { "use strict"; - var helpers = require_helpers3(); - var ValidatorResult = helpers.ValidatorResult; - var SchemaError = helpers.SchemaError; - var attribute = {}; - attribute.ignoreProperties = { - // informative properties - "id": true, - "default": true, - "description": true, - "title": true, - // arguments to other properties - "additionalItems": true, - "then": true, - "else": true, - // special-handled properties - "$schema": true, - "$ref": true, - "extends": true - }; - var validators = attribute.validators = {}; - validators.type = function validateType(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; + var cache_1 = require_cache4(); + var CacheServiceClientJSON = class { + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); } - var result = new ValidatorResult(instance, schema2, options, ctx); - var types = Array.isArray(schema2.type) ? schema2.type : [schema2.type]; - if (!types.some(this.testType.bind(this, instance, schema2, options, ctx))) { - var list = types.map(function(v) { - if (!v) return; - var id = v.$id || v.id; - return id ? "<" + id + ">" : v + ""; + CreateCacheEntry(request2) { + const data = cache_1.CreateCacheEntryRequest.toJson(request2, { + useProtoFieldName: true, + emitDefaultValues: false }); - result.addError({ - name: "type", - argument: list, - message: "is not of a type(s) " + list + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + FinalizeCacheEntryUpload(request2) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request2, { + useProtoFieldName: true, + emitDefaultValues: false }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + GetCacheEntryDownloadURL(request2) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request2, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - return result; }; - function testSchemaNoThrow(instance, options, ctx, callback, schema2) { - var throwError2 = options.throwError; - var throwAll = options.throwAll; - options.throwError = false; - options.throwAll = false; - var res = this.validateSchema(instance, schema2, options, ctx); - options.throwError = throwError2; - options.throwAll = throwAll; - if (!res.valid && callback instanceof Function) { - callback(res); + exports2.CacheServiceClientJSON = CacheServiceClientJSON; + var CacheServiceClientProtobuf = class { + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); } - return res.valid; - } - validators.anyOf = function validateAnyOf(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; + CreateCacheEntry(request2) { + const data = cache_1.CreateCacheEntryRequest.toBinary(request2); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); } - var result = new ValidatorResult(instance, schema2, options, ctx); - var inner = new ValidatorResult(instance, schema2, options, ctx); - if (!Array.isArray(schema2.anyOf)) { - throw new SchemaError("anyOf must be an array"); + FinalizeCacheEntryUpload(request2) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request2); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); } - if (!schema2.anyOf.some( - testSchemaNoThrow.bind( - this, - instance, - options, - ctx, - function(res) { - inner.importErrors(res); - } - ) - )) { - var list = schema2.anyOf.map(function(v, i) { - var id = v.$id || v.id; - if (id) return "<" + id + ">"; - return v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; - }); - if (options.nestedErrors) { - result.importErrors(inner); - } - result.addError({ - name: "anyOf", - argument: list, - message: "is not any of " + list.join(",") - }); + GetCacheEntryDownloadURL(request2) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request2); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); } - return result; }; - validators.allOf = function validateAllOf(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; + exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; + } +}); + +// node_modules/@actions/cache/lib/internal/shared/util.js +var require_util18 = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/util.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.maskSigUrl = maskSigUrl; + exports2.maskSecretUrls = maskSecretUrls; + var core_1 = require_core(); + function maskSigUrl(url2) { + if (!url2) + return; + try { + const parsedUrl = new URL(url2); + const signature = parsedUrl.searchParams.get("sig"); + if (signature) { + (0, core_1.setSecret)(signature); + (0, core_1.setSecret)(encodeURIComponent(signature)); + } + } catch (error3) { + (0, core_1.debug)(`Failed to parse URL: ${url2} ${error3 instanceof Error ? error3.message : String(error3)}`); } - if (!Array.isArray(schema2.allOf)) { - throw new SchemaError("allOf must be an array"); + } + function maskSecretUrls(body) { + if (typeof body !== "object" || body === null) { + (0, core_1.debug)("body is not an object or is null"); + return; } - var result = new ValidatorResult(instance, schema2, options, ctx); - var self2 = this; - schema2.allOf.forEach(function(v, i) { - var valid3 = self2.validateSchema(instance, v, options, ctx); - if (!valid3.valid) { - var id = v.$id || v.id; - var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; - result.addError({ - name: "allOf", - argument: { id: msg, length: valid3.errors.length, valid: valid3 }, - message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:" - }); - result.importErrors(valid3); + if ("signed_upload_url" in body && typeof body.signed_upload_url === "string") { + maskSigUrl(body.signed_upload_url); + } + if ("signed_download_url" in body && typeof body.signed_download_url === "string") { + maskSigUrl(body.signed_download_url); + } + } + } +}); + +// node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js +var require_cacheTwirpClient = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { + "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); + } + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - return result; }; - validators.oneOf = function validateOneOf(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.internalCacheTwirpClient = internalCacheTwirpClient; + var core_1 = require_core(); + var user_agent_1 = require_user_agent(); + var errors_1 = require_errors3(); + var config_1 = require_config(); + var cacheUtils_1 = require_cacheUtils(); + var auth_1 = require_auth(); + var http_client_1 = require_lib(); + var cache_twirp_client_1 = require_cache_twirp_client(); + var util_1 = require_util18(); + var CacheServiceClient = class { + constructor(userAgent2, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { + this.maxAttempts = 5; + this.baseRetryIntervalMilliseconds = 3e3; + this.retryMultiplier = 1.5; + const token = (0, cacheUtils_1.getRuntimeToken)(); + this.baseUrl = (0, config_1.getCacheServiceURL)(); + if (maxAttempts) { + this.maxAttempts = maxAttempts; + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier; + } + this.httpClient = new http_client_1.HttpClient(userAgent2, [ + new auth_1.BearerCredentialHandler(token) + ]); } - if (!Array.isArray(schema2.oneOf)) { - throw new SchemaError("oneOf must be an array"); + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + request(service, method, contentType, data) { + return __awaiter2(this, void 0, void 0, function* () { + const url2 = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; + (0, core_1.debug)(`[Request] ${method} ${url2}`); + const headers = { + "Content-Type": contentType + }; + try { + const { body } = yield this.retryableRequest(() => __awaiter2(this, void 0, void 0, function* () { + return this.httpClient.post(url2, JSON.stringify(data), headers); + })); + return body; + } catch (error3) { + throw new Error(`Failed to ${method}: ${error3.message}`); + } + }); } - var result = new ValidatorResult(instance, schema2, options, ctx); - var inner = new ValidatorResult(instance, schema2, options, ctx); - var count = schema2.oneOf.filter( - testSchemaNoThrow.bind( - this, - instance, - options, - ctx, - function(res) { - inner.importErrors(res); + retryableRequest(operation) { + return __awaiter2(this, void 0, void 0, function* () { + let attempt = 0; + let errorMessage = ""; + let rawBody = ""; + while (attempt < this.maxAttempts) { + let isRetryable = false; + try { + const response = yield operation(); + const statusCode = response.message.statusCode; + rawBody = yield response.readBody(); + (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); + (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); + const body = JSON.parse(rawBody); + (0, util_1.maskSecretUrls)(body); + (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); + if (this.isSuccessStatusCode(statusCode)) { + return { response, body }; + } + isRetryable = this.isRetryableHttpStatusCode(statusCode); + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; + if (body.msg) { + if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { + throw new errors_1.UsageError(); + } + errorMessage = `${errorMessage}: ${body.msg}`; + } + if (statusCode === http_client_1.HttpCodes.TooManyRequests) { + const retryAfterHeader = response.message.headers["retry-after"]; + if (retryAfterHeader) { + const parsedSeconds = parseInt(retryAfterHeader, 10); + if (!isNaN(parsedSeconds) && parsedSeconds > 0) { + (0, core_1.warning)(`You've hit a rate limit, your rate limit will reset in ${parsedSeconds} seconds`); + } + } + throw new errors_1.RateLimitError(`Rate limited: ${errorMessage}`); + } + } catch (error3) { + if (error3 instanceof SyntaxError) { + (0, core_1.debug)(`Raw Body: ${rawBody}`); + } + if (error3 instanceof errors_1.UsageError) { + throw error3; + } + if (error3 instanceof errors_1.RateLimitError) { + throw error3; + } + if (errors_1.NetworkError.isNetworkErrorCode(error3 === null || error3 === void 0 ? void 0 : error3.code)) { + throw new errors_1.NetworkError(error3 === null || error3 === void 0 ? void 0 : error3.code); + } + isRetryable = true; + errorMessage = error3.message; + } + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`); + } + if (attempt + 1 === this.maxAttempts) { + throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); + } + const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); + (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); + yield this.sleep(retryTimeMilliseconds); + attempt++; } - ) - ).length; - var list = schema2.oneOf.map(function(v, i) { - var id = v.$id || v.id; - return id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; - }); - if (count !== 1) { - if (options.nestedErrors) { - result.importErrors(inner); - } - result.addError({ - name: "oneOf", - argument: list, - message: "is not exactly one from " + list.join(",") + throw new Error(`Request failed`); }); } - return result; - }; - validators.if = function validateIf(instance, schema2, options, ctx) { - if (instance === void 0) return null; - if (!helpers.isSchema(schema2.if)) throw new Error('Expected "if" keyword to be a schema'); - var ifValid = testSchemaNoThrow.call(this, instance, options, ctx, null, schema2.if); - var result = new ValidatorResult(instance, schema2, options, ctx); - var res; - if (ifValid) { - if (schema2.then === void 0) return; - if (!helpers.isSchema(schema2.then)) throw new Error('Expected "then" keyword to be a schema'); - res = this.validateSchema(instance, schema2.then, options, ctx.makeChild(schema2.then)); - result.importErrors(res); - } else { - if (schema2.else === void 0) return; - if (!helpers.isSchema(schema2.else)) throw new Error('Expected "else" keyword to be a schema'); - res = this.validateSchema(instance, schema2.else, options, ctx.makeChild(schema2.else)); - result.importErrors(res); + isSuccessStatusCode(statusCode) { + if (!statusCode) + return false; + return statusCode >= 200 && statusCode < 300; } - return result; - }; - function getEnumerableProperty(object, key) { - if (Object.hasOwnProperty.call(object, key)) return object[key]; - if (!(key in object)) return; - while (object = Object.getPrototypeOf(object)) { - if (Object.propertyIsEnumerable.call(object, key)) return object[key]; + isRetryableHttpStatusCode(statusCode) { + if (!statusCode) + return false; + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable + ]; + return retryableStatusCodes.includes(statusCode); } - } - validators.propertyNames = function validatePropertyNames(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var subschema = schema2.propertyNames !== void 0 ? schema2.propertyNames : {}; - if (!helpers.isSchema(subschema)) throw new SchemaError('Expected "propertyNames" to be a schema (object or boolean)'); - for (var property in instance) { - if (getEnumerableProperty(instance, property) !== void 0) { - var res = this.validateSchema(property, subschema, options, ctx.makeChild(subschema)); - result.importErrors(res); - } + sleep(milliseconds) { + return __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve6) => setTimeout(resolve6, milliseconds)); + }); } - return result; - }; - validators.properties = function validateProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var properties = schema2.properties || {}; - for (var property in properties) { - var subschema = properties[property]; - if (subschema === void 0) { - continue; - } else if (subschema === null) { - throw new SchemaError('Unexpected null, expected schema in "properties"'); + getExponentialRetryTimeMilliseconds(attempt) { + if (attempt < 0) { + throw new Error("attempt should be a positive integer"); } - if (typeof options.preValidateProperty == "function") { - options.preValidateProperty(instance, property, subschema, options, ctx); + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds; } - var prop = getEnumerableProperty(instance, property); - var res = this.validateSchema(prop, subschema, options, ctx.makeChild(subschema, property)); - if (res.instance !== result.instance[property]) result.instance[property] = res.instance; - result.importErrors(res); + const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); + const maxTime = minTime * this.retryMultiplier; + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } - return result; }; - function testAdditionalProperty(instance, schema2, options, ctx, property, result) { - if (!this.types.object(instance)) return; - if (schema2.properties && schema2.properties[property] !== void 0) { - return; + function internalCacheTwirpClient(options) { + const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); + return new cache_twirp_client_1.CacheServiceClientJSON(client); + } + } +}); + +// node_modules/@actions/cache/lib/internal/tar.js +var require_tar = __commonJS({ + "node_modules/@actions/cache/lib/internal/tar.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - if (schema2.additionalProperties === false) { - result.addError({ - name: "additionalProperties", - argument: property, - message: "is not allowed to have the additional property " + JSON.stringify(property) - }); - } else { - var additionalProperties = schema2.additionalProperties || {}; - if (typeof options.preValidateProperty == "function") { - options.preValidateProperty(instance, property, additionalProperties, options, ctx); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - var res = this.validateSchema(instance[property], additionalProperties, options, ctx.makeChild(additionalProperties, property)); - if (res.instance !== result.instance[property]) result.instance[property] = res.instance; - result.importErrors(res); + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - } - validators.patternProperties = function validatePatternProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var patternProperties = schema2.patternProperties || {}; - for (var property in instance) { - var test = true; - for (var pattern in patternProperties) { - var subschema = patternProperties[pattern]; - if (subschema === void 0) { - continue; - } else if (subschema === null) { - throw new SchemaError('Unexpected null, expected schema in "patternProperties"'); + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } + } + function rejected(value) { try { - var regexp = new RegExp(pattern, "u"); - } catch (_e) { - regexp = new RegExp(pattern); + step(generator["throw"](value)); + } catch (e) { + reject(e); } - if (!regexp.test(property)) { - continue; + } + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.listTar = listTar; + exports2.extractTar = extractTar2; + exports2.createTar = createTar; + var exec_1 = require_exec(); + var io6 = __importStar2(require_io()); + var fs_1 = require("fs"); + var path13 = __importStar2(require("path")); + var utils = __importStar2(require_cacheUtils()); + var constants_1 = require_constants12(); + var IS_WINDOWS = process.platform === "win32"; + function getTarPath() { + return __awaiter2(this, void 0, void 0, function* () { + switch (process.platform) { + case "win32": { + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + } + break; } - test = false; - if (typeof options.preValidateProperty == "function") { - options.preValidateProperty(instance, property, subschema, options, ctx); + case "darwin": { + const gnuTar = yield io6.which("gtar", false); + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else { + return { + path: yield io6.which("tar", true), + type: constants_1.ArchiveToolType.BSD + }; + } } - var res = this.validateSchema(instance[property], subschema, options, ctx.makeChild(subschema, property)); - if (res.instance !== result.instance[property]) result.instance[property] = res.instance; - result.importErrors(res); + default: + break; } - if (test) { - testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); + return { + path: yield io6.which("tar", true), + type: constants_1.ArchiveToolType.GNU + }; + }); + } + function getTarArgs(tarPath_1, compressionMethod_1, type_1) { + return __awaiter2(this, arguments, void 0, function* (tarPath, compressionMethod, type2, archivePath = "") { + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = "cache.tar"; + const workingDirectory = getWorkingDirectory(); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (type2) { + case "create": + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + break; + case "extract": + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path13.sep}`, "g"), "/")); + break; + case "list": + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), "-P"); + break; } - } - return result; - }; - validators.additionalProperties = function validateAdditionalProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - if (schema2.patternProperties) { - return null; - } - var result = new ValidatorResult(instance, schema2, options, ctx); - for (var property in instance) { - testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); - } - return result; - }; - validators.minProperties = function validateMinProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var keys = Object.keys(instance); - if (!(keys.length >= schema2.minProperties)) { - result.addError({ - name: "minProperties", - argument: schema2.minProperties, - message: "does not meet minimum property length of " + schema2.minProperties - }); - } - return result; - }; - validators.maxProperties = function validateMaxProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var keys = Object.keys(instance); - if (!(keys.length <= schema2.maxProperties)) { - result.addError({ - name: "maxProperties", - argument: schema2.maxProperties, - message: "does not meet maximum property length of " + schema2.maxProperties - }); - } - return result; - }; - validators.items = function validateItems(instance, schema2, options, ctx) { - var self2 = this; - if (!this.types.array(instance)) return; - if (schema2.items === void 0) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - instance.every(function(value, i) { - if (Array.isArray(schema2.items)) { - var items = schema2.items[i] === void 0 ? schema2.additionalItems : schema2.items[i]; + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case "win32": + args.push("--force-local"); + break; + case "darwin": + args.push("--delay-directory-restore"); + break; + } + } + return args; + }); + } + function getCommands(compressionMethod_1, type_1) { + return __awaiter2(this, arguments, void 0, function* (compressionMethod, type2, archivePath = "") { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type2, archivePath); + const compressionArgs = type2 !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + if (BSD_TAR_ZSTD && type2 !== "create") { + args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; } else { - var items = schema2.items; + args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; } - if (items === void 0) { - return true; + if (BSD_TAR_ZSTD) { + return args; } - if (items === false) { - result.addError({ - name: "items", - message: "additionalItems not permitted" - }); - return false; + return [args.join(" ")]; + }); + } + function getWorkingDirectory() { + var _a; + return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); + } + function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter2(this, void 0, void 0, function* () { + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -d --long=30 --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/") + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -d --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path13.sep}`, "g"), "/") + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; + default: + return ["-z"]; + } + }); + } + function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --long=30 --force -o", + cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), + constants_1.TarFilename + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --force -o", + cacheFileName.replace(new RegExp(`\\${path13.sep}`, "g"), "/"), + constants_1.TarFilename + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; + default: + return ["-z"]; + } + }); + } + function execCommands(commands, cwd) { + return __awaiter2(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, void 0, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) + }); + } catch (error3) { + throw new Error(`${command.split(" ")[0]} failed with error: ${error3 === null || error3 === void 0 ? void 0 : error3.message}`); + } } - var res = self2.validateSchema(value, items, options, ctx.makeChild(items, i)); - if (res.instance !== result.instance[i]) result.instance[i] = res.instance; - result.importErrors(res); - return true; }); - return result; - }; - validators.contains = function validateContains(instance, schema2, options, ctx) { - var self2 = this; - if (!this.types.array(instance)) return; - if (schema2.contains === void 0) return; - if (!helpers.isSchema(schema2.contains)) throw new Error('Expected "contains" keyword to be a schema'); - var result = new ValidatorResult(instance, schema2, options, ctx); - var count = instance.some(function(value, i) { - var res = self2.validateSchema(value, schema2.contains, options, ctx.makeChild(schema2.contains, i)); - return res.errors.length === 0; + } + function listTar(archivePath, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, "list", archivePath); + yield execCommands(commands); }); - if (count === false) { - result.addError({ - name: "contains", - argument: schema2.contains, - message: "must contain an item matching given schema" - }); + } + function extractTar2(archivePath, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + const workingDirectory = getWorkingDirectory(); + yield io6.mkdirP(workingDirectory); + const commands = yield getCommands(compressionMethod, "extract", archivePath); + yield execCommands(commands); + }); + } + function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + (0, fs_1.writeFileSync)(path13.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + const commands = yield getCommands(compressionMethod, "create"); + yield execCommands(commands, archiveFolder); + }); + } + } +}); + +// node_modules/@actions/cache/lib/cache.js +var require_cache5 = __commonJS({ + "node_modules/@actions/cache/lib/cache.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - return result; - }; - validators.minimum = function validateMinimum(instance, schema2, options, ctx) { - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (schema2.exclusiveMinimum && schema2.exclusiveMinimum === true) { - if (!(instance > schema2.minimum)) { - result.addError({ - name: "minimum", - argument: schema2.minimum, - message: "must be greater than " + schema2.minimum - }); - } - } else { - if (!(instance >= schema2.minimum)) { - result.addError({ - name: "minimum", - argument: schema2.minimum, - message: "must be greater than or equal to " + schema2.minimum - }); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve6) { + resolve6(value); + }); } - return result; - }; - validators.maximum = function validateMaximum(instance, schema2, options, ctx) { - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (schema2.exclusiveMaximum && schema2.exclusiveMaximum === true) { - if (!(instance < schema2.maximum)) { - result.addError({ - name: "maximum", - argument: schema2.maximum, - message: "must be less than " + schema2.maximum - }); + return new (P || (P = Promise))(function(resolve6, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - } else { - if (!(instance <= schema2.maximum)) { - result.addError({ - name: "maximum", - argument: schema2.maximum, - message: "must be less than or equal to " + schema2.maximum - }); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - return result; - }; - validators.exclusiveMinimum = function validateExclusiveMinimum(instance, schema2, options, ctx) { - if (typeof schema2.exclusiveMinimum === "boolean") return; - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var valid3 = instance > schema2.exclusiveMinimum; - if (!valid3) { - result.addError({ - name: "exclusiveMinimum", - argument: schema2.exclusiveMinimum, - message: "must be strictly greater than " + schema2.exclusiveMinimum - }); - } - return result; - }; - validators.exclusiveMaximum = function validateExclusiveMaximum(instance, schema2, options, ctx) { - if (typeof schema2.exclusiveMaximum === "boolean") return; - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var valid3 = instance < schema2.exclusiveMaximum; - if (!valid3) { - result.addError({ - name: "exclusiveMaximum", - argument: schema2.exclusiveMaximum, - message: "must be strictly less than " + schema2.exclusiveMaximum - }); - } - return result; - }; - var validateMultipleOfOrDivisbleBy = function validateMultipleOfOrDivisbleBy2(instance, schema2, options, ctx, validationType, errorMessage) { - if (!this.types.number(instance)) return; - var validationArgument = schema2[validationType]; - if (validationArgument == 0) { - throw new SchemaError(validationType + " cannot be zero"); - } - var result = new ValidatorResult(instance, schema2, options, ctx); - var instanceDecimals = helpers.getDecimalPlaces(instance); - var divisorDecimals = helpers.getDecimalPlaces(validationArgument); - var maxDecimals = Math.max(instanceDecimals, divisorDecimals); - var multiplier = Math.pow(10, maxDecimals); - if (Math.round(instance * multiplier) % Math.round(validationArgument * multiplier) !== 0) { - result.addError({ - name: validationType, - argument: validationArgument, - message: errorMessage + JSON.stringify(validationArgument) - }); - } - return result; - }; - validators.multipleOf = function validateMultipleOf(instance, schema2, options, ctx) { - return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "multipleOf", "is not a multiple of (divisible by) "); - }; - validators.divisibleBy = function validateDivisibleBy(instance, schema2, options, ctx) { - return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "divisibleBy", "is not divisible by (multiple of) "); + function step(result) { + result.done ? resolve6(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - validators.required = function validateRequired(instance, schema2, options, ctx) { - var result = new ValidatorResult(instance, schema2, options, ctx); - if (instance === void 0 && schema2.required === true) { - result.addError({ - name: "required", - message: "is required" - }); - } else if (this.types.object(instance) && Array.isArray(schema2.required)) { - schema2.required.forEach(function(n) { - if (getEnumerableProperty(instance, n) === void 0) { - result.addError({ - name: "required", - argument: n, - message: "requires property " + JSON.stringify(n) - }); - } - }); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; + exports2.isFeatureAvailable = isFeatureAvailable; + exports2.restoreCache = restoreCache3; + exports2.saveCache = saveCache3; + var core14 = __importStar2(require_core()); + var path13 = __importStar2(require("path")); + var utils = __importStar2(require_cacheUtils()); + var cacheHttpClient = __importStar2(require_cacheHttpClient()); + var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); + var config_1 = require_config(); + var tar_1 = require_tar(); + var http_client_1 = require_lib(); + var ValidationError = class _ValidationError extends Error { + constructor(message) { + super(message); + this.name = "ValidationError"; + Object.setPrototypeOf(this, _ValidationError.prototype); } - return result; }; - validators.pattern = function validatePattern(instance, schema2, options, ctx) { - if (!this.types.string(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var pattern = schema2.pattern; - try { - var regexp = new RegExp(pattern, "u"); - } catch (_e) { - regexp = new RegExp(pattern); - } - if (!instance.match(regexp)) { - result.addError({ - name: "pattern", - argument: schema2.pattern, - message: "does not match pattern " + JSON.stringify(schema2.pattern.toString()) - }); + exports2.ValidationError = ValidationError; + var ReserveCacheError = class _ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = "ReserveCacheError"; + Object.setPrototypeOf(this, _ReserveCacheError.prototype); } - return result; }; - validators.format = function validateFormat(instance, schema2, options, ctx) { - if (instance === void 0) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!result.disableFormat && !helpers.isFormat(instance, schema2.format, this)) { - result.addError({ - name: "format", - argument: schema2.format, - message: "does not conform to the " + JSON.stringify(schema2.format) + " format" - }); + exports2.ReserveCacheError = ReserveCacheError; + var FinalizeCacheError = class _FinalizeCacheError extends Error { + constructor(message) { + super(message); + this.name = "FinalizeCacheError"; + Object.setPrototypeOf(this, _FinalizeCacheError.prototype); } - return result; }; - validators.minLength = function validateMinLength(instance, schema2, options, ctx) { - if (!this.types.string(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var hsp = instance.match(/[\uDC00-\uDFFF]/g); - var length = instance.length - (hsp ? hsp.length : 0); - if (!(length >= schema2.minLength)) { - result.addError({ - name: "minLength", - argument: schema2.minLength, - message: "does not meet minimum length of " + schema2.minLength - }); + exports2.FinalizeCacheError = FinalizeCacheError; + function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } - return result; - }; - validators.maxLength = function validateMaxLength(instance, schema2, options, ctx) { - if (!this.types.string(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var hsp = instance.match(/[\uDC00-\uDFFF]/g); - var length = instance.length - (hsp ? hsp.length : 0); - if (!(length <= schema2.maxLength)) { - result.addError({ - name: "maxLength", - argument: schema2.maxLength, - message: "does not meet maximum length of " + schema2.maxLength - }); + } + function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); } - return result; - }; - validators.minItems = function validateMinItems(instance, schema2, options, ctx) { - if (!this.types.array(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!(instance.length >= schema2.minItems)) { - result.addError({ - name: "minItems", - argument: schema2.minItems, - message: "does not meet minimum length of " + schema2.minItems - }); + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } - return result; - }; - validators.maxItems = function validateMaxItems(instance, schema2, options, ctx) { - if (!this.types.array(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!(instance.length <= schema2.maxItems)) { - result.addError({ - name: "maxItems", - argument: schema2.maxItems, - message: "does not meet maximum length of " + schema2.maxItems - }); + } + function isFeatureAvailable() { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + switch (cacheServiceVersion) { + case "v2": + return !!process.env["ACTIONS_RESULTS_URL"]; + case "v1": + default: + return !!process.env["ACTIONS_CACHE_URL"]; } - return result; - }; - function testArrays(v, i, a) { - var j, len = a.length; - for (j = i + 1, len; j < len; j++) { - if (helpers.deepCompareStrict(v, a[j])) { - return false; + } + function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core14.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + switch (cacheServiceVersion) { + case "v2": + return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + case "v1": + default: + return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); } - } - return true; + }); } - validators.uniqueItems = function validateUniqueItems(instance, schema2, options, ctx) { - if (schema2.uniqueItems !== true) return; - if (!this.types.array(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!instance.every(testArrays)) { - result.addError({ - name: "uniqueItems", - message: "contains duplicate item" - }); - } - return result; - }; - validators.dependencies = function validateDependencies(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - for (var property in schema2.dependencies) { - if (instance[property] === void 0) { - continue; + function restoreCacheV1(paths_1, primaryKey_1, restoreKeys_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } - var dep = schema2.dependencies[property]; - var childContext = ctx.makeChild(dep, property); - if (typeof dep == "string") { - dep = [dep]; + for (const key of keys) { + checkKey(key); } - if (Array.isArray(dep)) { - dep.forEach(function(prop) { - if (instance[prop] === void 0) { - result.addError({ - // FIXME there's two different "dependencies" errors here with slightly different outputs - // Can we make these the same? Or should we create different error types? - name: "dependencies", - argument: childContext.propertyPath, - message: "property " + prop + " not found, required by " + childContext.propertyPath - }); - } + const compressionMethod = yield utils.getCompressionMethod(); + let archivePath = ""; + try { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod, + enableCrossOsArchive }); - } else { - var res = this.validateSchema(instance, dep, options, childContext); - if (result.instance !== res.instance) result.instance = res.instance; - if (res && res.errors.length) { - result.addError({ - name: "dependencies", - argument: childContext.propertyPath, - message: "does not meet dependency required by " + childContext.propertyPath - }); - result.importErrors(res); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return void 0; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core14.info("Lookup only - skipping download"); + return cacheEntry.cacheKey; + } + archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core14.debug(`Archive Path: ${archivePath}`); + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core14.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core14.info("Cache restored successfully"); + return cacheEntry.cacheKey; + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core14.error(`Failed to restore: ${error3.message}`); + } else { + core14.warning(`Failed to restore: ${error3.message}`); + } + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error3) { + core14.debug(`Failed to delete archive: ${error3}`); } } - } - return result; - }; - validators["enum"] = function validateEnum(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - if (!Array.isArray(schema2["enum"])) { - throw new SchemaError("enum expects an array", schema2); - } - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!schema2["enum"].some(helpers.deepCompareStrict.bind(null, instance))) { - result.addError({ - name: "enum", - argument: schema2["enum"], - message: "is not one of enum values: " + schema2["enum"].map(String).join(",") - }); - } - return result; - }; - validators["const"] = function validateEnum(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!helpers.deepCompareStrict(schema2["const"], instance)) { - result.addError({ - name: "const", - argument: schema2["const"], - message: "does not exactly match expected constant: " + schema2["const"] - }); - } - return result; - }; - validators.not = validators.disallow = function validateNot(instance, schema2, options, ctx) { - var self2 = this; - if (instance === void 0) return null; - var result = new ValidatorResult(instance, schema2, options, ctx); - var notTypes = schema2.not || schema2.disallow; - if (!notTypes) return null; - if (!Array.isArray(notTypes)) notTypes = [notTypes]; - notTypes.forEach(function(type2) { - if (self2.testType(instance, schema2, options, ctx, type2)) { - var id = type2 && (type2.$id || type2.id); - var schemaId = id || type2; - result.addError({ - name: "not", - argument: schemaId, - message: "is of prohibited type " + schemaId - }); - } + return void 0; }); - return result; - }; - module2.exports = attribute; - } -}); - -// node_modules/jsonschema/lib/scan.js -var require_scan = __commonJS({ - "node_modules/jsonschema/lib/scan.js"(exports2, module2) { - "use strict"; - var urilib = require("url"); - var helpers = require_helpers3(); - module2.exports.SchemaScanResult = SchemaScanResult; - function SchemaScanResult(found, ref) { - this.id = found; - this.ref = ref; } - module2.exports.scan = function scan(base, schema2) { - function scanSchema(baseuri, schema3) { - if (!schema3 || typeof schema3 != "object") return; - if (schema3.$ref) { - var resolvedUri = urilib.resolve(baseuri, schema3.$ref); - ref[resolvedUri] = ref[resolvedUri] ? ref[resolvedUri] + 1 : 0; - return; + function restoreCacheV2(paths_1, primaryKey_1, restoreKeys_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } - var id = schema3.$id || schema3.id; - var ourBase = id ? urilib.resolve(baseuri, id) : baseuri; - if (ourBase) { - if (ourBase.indexOf("#") < 0) ourBase += "#"; - if (found[ourBase]) { - if (!helpers.deepCompareStrict(found[ourBase], schema3)) { - throw new Error("Schema <" + ourBase + "> already exists with different definition"); + for (const key of keys) { + checkKey(key); + } + let archivePath = ""; + try { + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + const compressionMethod = yield utils.getCompressionMethod(); + const request2 = { + key: primaryKey, + restoreKeys, + version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) + }; + const response = yield twirpClient.GetCacheEntryDownloadURL(request2); + if (!response.ok) { + core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + return void 0; + } + const isRestoreKeyMatch = request2.key !== response.matchedKey; + if (isRestoreKeyMatch) { + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + } else { + core14.info(`Cache hit for: ${response.matchedKey}`); + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core14.info("Lookup only - skipping download"); + return response.matchedKey; + } + archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); + yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core14.info("Cache restored successfully"); + return response.matchedKey; + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core14.error(`Failed to restore: ${error3.message}`); + } else { + core14.warning(`Failed to restore: ${error3.message}`); } - return found[ourBase]; } - found[ourBase] = schema3; - if (ourBase[ourBase.length - 1] == "#") { - found[ourBase.substring(0, ourBase.length - 1)] = schema3; + } finally { + try { + if (archivePath) { + yield utils.unlinkFile(archivePath); + } + } catch (error3) { + core14.debug(`Failed to delete archive: ${error3}`); } } - scanArray(ourBase + "/items", Array.isArray(schema3.items) ? schema3.items : [schema3.items]); - scanArray(ourBase + "/extends", Array.isArray(schema3.extends) ? schema3.extends : [schema3.extends]); - scanSchema(ourBase + "/additionalItems", schema3.additionalItems); - scanObject(ourBase + "/properties", schema3.properties); - scanSchema(ourBase + "/additionalProperties", schema3.additionalProperties); - scanObject(ourBase + "/definitions", schema3.definitions); - scanObject(ourBase + "/patternProperties", schema3.patternProperties); - scanObject(ourBase + "/dependencies", schema3.dependencies); - scanArray(ourBase + "/disallow", schema3.disallow); - scanArray(ourBase + "/allOf", schema3.allOf); - scanArray(ourBase + "/anyOf", schema3.anyOf); - scanArray(ourBase + "/oneOf", schema3.oneOf); - scanSchema(ourBase + "/not", schema3.not); - } - function scanArray(baseuri, schemas) { - if (!Array.isArray(schemas)) return; - for (var i = 0; i < schemas.length; i++) { - scanSchema(baseuri + "/" + i, schemas[i]); - } - } - function scanObject(baseuri, schemas) { - if (!schemas || typeof schemas != "object") return; - for (var p in schemas) { - scanSchema(baseuri + "/" + p, schemas[p]); + return void 0; + }); + } + function saveCache3(paths_1, key_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core14.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + checkKey(key); + switch (cacheServiceVersion) { + case "v2": + return yield saveCacheV2(paths, key, options, enableCrossOsArchive); + case "v1": + default: + return yield saveCacheV1(paths, key, options, enableCrossOsArchive); } - } - var found = {}; - var ref = {}; - scanSchema(base, schema2); - return new SchemaScanResult(found, ref); - }; - } -}); - -// node_modules/jsonschema/lib/validator.js -var require_validator = __commonJS({ - "node_modules/jsonschema/lib/validator.js"(exports2, module2) { - "use strict"; - var urilib = require("url"); - var attribute = require_attribute(); - var helpers = require_helpers3(); - var scanSchema = require_scan().scan; - var ValidatorResult = helpers.ValidatorResult; - var ValidatorResultError = helpers.ValidatorResultError; - var SchemaError = helpers.SchemaError; - var SchemaContext = helpers.SchemaContext; - var anonymousBase = "/"; - var Validator3 = function Validator4() { - this.customFormats = Object.create(Validator4.prototype.customFormats); - this.schemas = {}; - this.unresolvedRefs = []; - this.types = Object.create(types); - this.attributes = Object.create(attribute.validators); - }; - Validator3.prototype.customFormats = {}; - Validator3.prototype.schemas = null; - Validator3.prototype.types = null; - Validator3.prototype.attributes = null; - Validator3.prototype.unresolvedRefs = null; - Validator3.prototype.addSchema = function addSchema(schema2, base) { - var self2 = this; - if (!schema2) { - return null; - } - var scan = scanSchema(base || anonymousBase, schema2); - var ourUri = base || schema2.$id || schema2.id; - for (var uri in scan.id) { - this.schemas[uri] = scan.id[uri]; - } - for (var uri in scan.ref) { - this.unresolvedRefs.push(uri); - } - this.unresolvedRefs = this.unresolvedRefs.filter(function(uri2) { - return typeof self2.schemas[uri2] === "undefined"; }); - return this.schemas[ourUri]; - }; - Validator3.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) { - if (!Array.isArray(schemas)) return; - for (var i = 0; i < schemas.length; i++) { - this.addSubSchema(baseuri, schemas[i]); - } - }; - Validator3.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) { - if (!schemas || typeof schemas != "object") return; - for (var p in schemas) { - this.addSubSchema(baseuri, schemas[p]); - } - }; - Validator3.prototype.setSchemas = function setSchemas(schemas) { - this.schemas = schemas; - }; - Validator3.prototype.getSchema = function getSchema(urn) { - return this.schemas[urn]; - }; - Validator3.prototype.validate = function validate(instance, schema2, options, ctx) { - if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) { - throw new SchemaError("Expected `schema` to be an object or boolean"); - } - if (!options) { - options = {}; - } - var id = schema2.$id || schema2.id; - var base = urilib.resolve(options.base || anonymousBase, id || ""); - if (!ctx) { - ctx = new SchemaContext(schema2, options, [], base, Object.create(this.schemas)); - if (!ctx.schemas[base]) { - ctx.schemas[base] = schema2; + } + function saveCacheV1(paths_1, key_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { + var _a, _b, _c, _d, _e; + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } - var found = scanSchema(base, schema2); - for (var n in found.id) { - var sch = found.id[n]; - ctx.schemas[n] = sch; + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core14.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core14.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core14.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core14.debug("Reserving Cache"); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + enableCrossOsArchive, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core14.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else if (typedError.name === ReserveCacheError.name) { + core14.info(`Failed to save: ${typedError.message}`); + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core14.error(`Failed to save: ${typedError.message}`); + } else { + core14.warning(`Failed to save: ${typedError.message}`); + } + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error3) { + core14.debug(`Failed to delete archive: ${error3}`); + } } - } - if (options.required && instance === void 0) { - var result = new ValidatorResult(instance, schema2, options, ctx); - result.addError("is required, but is undefined"); - return result; - } - var result = this.validateSchema(instance, schema2, options, ctx); - if (!result) { - throw new Error("Result undefined"); - } else if (options.throwAll && result.errors.length) { - throw new ValidatorResultError(result); - } - return result; - }; - function shouldResolve(schema2) { - var ref = typeof schema2 === "string" ? schema2 : schema2.$ref; - if (typeof ref == "string") return ref; - return false; + return cacheId; + }); } - Validator3.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) { - var result = new ValidatorResult(instance, schema2, options, ctx); - if (typeof schema2 === "boolean") { - if (schema2 === true) { - schema2 = {}; - } else if (schema2 === false) { - schema2 = { type: [] }; - } - } else if (!schema2) { - throw new Error("schema is undefined"); - } - if (schema2["extends"]) { - if (Array.isArray(schema2["extends"])) { - var schemaobj = { schema: schema2, ctx }; - schema2["extends"].forEach(this.schemaTraverser.bind(this, schemaobj)); - schema2 = schemaobj.schema; - schemaobj.schema = null; - schemaobj.ctx = null; - schemaobj = null; - } else { - schema2 = helpers.deepMerge(schema2, this.superResolve(schema2["extends"], ctx)); + function saveCacheV2(paths_1, key_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { + options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); + const compressionMethod = yield utils.getCompressionMethod(); + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } - } - var switchSchema = shouldResolve(schema2); - if (switchSchema) { - var resolved = this.resolve(schema2, switchSchema, ctx); - var subctx = new SchemaContext(resolved.subschema, options, ctx.path, resolved.switchSchema, ctx.schemas); - return this.validateSchema(instance, resolved.subschema, options, subctx); - } - var skipAttributes = options && options.skipAttributes || []; - for (var key in schema2) { - if (!attribute.ignoreProperties[key] && skipAttributes.indexOf(key) < 0) { - var validatorErr = null; - var validator = this.attributes[key]; - if (validator) { - validatorErr = validator.call(this, instance, schema2, options, ctx); - } else if (options.allowUnknownAttributes === false) { - throw new SchemaError("Unsupported attribute: " + key, schema2); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core14.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core14.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); } - if (validatorErr) { - result.importErrors(validatorErr); + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core14.debug(`File Size: ${archiveFileSize}`); + options.archiveSizeBytes = archiveFileSize; + core14.debug("Reserving Cache"); + const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); + const request2 = { + key, + version + }; + let signedUploadUrl; + try { + const response = yield twirpClient.CreateCacheEntry(request2); + if (!response.ok) { + if (response.message) { + core14.warning(`Cache reservation failed: ${response.message}`); + } + throw new Error(response.message || "Response was not ok"); + } + signedUploadUrl = response.signedUploadUrl; + } catch (error3) { + core14.debug(`Failed to reserve cache: ${error3}`); + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); + } + core14.debug(`Attempting to upload cache located at: ${archivePath}`); + yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); + const finalizeRequest = { + key, + version, + sizeBytes: `${archiveFileSize}` + }; + const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + if (!finalizeResponse.ok) { + if (finalizeResponse.message) { + throw new FinalizeCacheError(finalizeResponse.message); + } + throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); + } + cacheId = parseInt(finalizeResponse.entryId); + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else if (typedError.name === ReserveCacheError.name) { + core14.info(`Failed to save: ${typedError.message}`); + } else if (typedError.name === FinalizeCacheError.name) { + core14.warning(typedError.message); + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core14.error(`Failed to save: ${typedError.message}`); + } else { + core14.warning(`Failed to save: ${typedError.message}`); + } + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error3) { + core14.debug(`Failed to delete archive: ${error3}`); } } - } - if (typeof options.rewrite == "function") { - var value = options.rewrite.call(this, instance, schema2, options, ctx); - result.instance = value; - } - return result; - }; - Validator3.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) { - schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx)); - }; - Validator3.prototype.superResolve = function superResolve(schema2, ctx) { - var ref = shouldResolve(schema2); - if (ref) { - return this.resolve(schema2, ref, ctx).subschema; - } - return schema2; - }; - Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) { - switchSchema = ctx.resolve(switchSchema); - if (ctx.schemas[switchSchema]) { - return { subschema: ctx.schemas[switchSchema], switchSchema }; - } - var parsed = urilib.parse(switchSchema); - var fragment = parsed && parsed.hash; - var document2 = fragment && fragment.length && switchSchema.substr(0, switchSchema.length - fragment.length); - if (!document2 || !ctx.schemas[document2]) { - throw new SchemaError("no such schema <" + switchSchema + ">", schema2); - } - var subschema = helpers.objectGetPath(ctx.schemas[document2], fragment.substr(1)); - if (subschema === void 0) { - throw new SchemaError("no such schema " + fragment + " located in <" + document2 + ">", schema2); - } - return { subschema, switchSchema }; - }; - Validator3.prototype.testType = function validateType(instance, schema2, options, ctx, type2) { - if (type2 === void 0) { - return; - } else if (type2 === null) { - throw new SchemaError('Unexpected null in "type" keyword'); - } - if (typeof this.types[type2] == "function") { - return this.types[type2].call(this, instance); - } - if (type2 && typeof type2 == "object") { - var res = this.validateSchema(instance, type2, options, ctx); - return res === void 0 || !(res && res.errors.length); - } - return true; - }; - var types = Validator3.prototype.types = {}; - types.string = function testString(instance) { - return typeof instance == "string"; - }; - types.number = function testNumber(instance) { - return typeof instance == "number" && isFinite(instance); - }; - types.integer = function testInteger(instance) { - return typeof instance == "number" && instance % 1 === 0; - }; - types.boolean = function testBoolean(instance) { - return typeof instance == "boolean"; - }; - types.array = function testArray(instance) { - return Array.isArray(instance); - }; - types["null"] = function testNull(instance) { - return instance === null; - }; - types.date = function testDate(instance) { - return instance instanceof Date; - }; - types.any = function testAny(instance) { - return true; - }; - types.object = function testObject(instance) { - return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date); - }; - module2.exports = Validator3; - } -}); - -// node_modules/jsonschema/lib/index.js -var require_lib2 = __commonJS({ - "node_modules/jsonschema/lib/index.js"(exports2, module2) { - "use strict"; - var Validator3 = module2.exports.Validator = require_validator(); - module2.exports.ValidatorResult = require_helpers3().ValidatorResult; - module2.exports.ValidatorResultError = require_helpers3().ValidatorResultError; - module2.exports.ValidationError = require_helpers3().ValidationError; - module2.exports.SchemaError = require_helpers3().SchemaError; - module2.exports.SchemaScanResult = require_scan().SchemaScanResult; - module2.exports.scan = require_scan().scan; - module2.exports.validate = function(instance, schema2, options) { - var v = new Validator3(); - return v.validate(instance, schema2, options); - }; + return cacheId; + }); + } } }); @@ -105893,8 +105893,8 @@ function cacheCodeQlVersion(version) { function getCachedCodeQlVersion() { return cachedCodeQlVersion; } -async function codeQlVersionAtLeast(codeql, requiredVersion) { - return semver.gte((await codeql.getVersion()).version, requiredVersion); +async function codeQlVersionAtLeast(codeql2, requiredVersion) { + return semver.gte((await codeql2.getVersion()).version, requiredVersion); } function getBaseDatabaseOidsFilePath(config) { return path.join(config.dbLocation, BASE_DATABASE_OIDS_FILE_NAME); @@ -106411,973 +106411,359 @@ async function getGitHubVersionFromApi(apiClient, apiDetails) { if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") { return { type: "GitHub Enterprise Cloud with data residency" /* GHEC_DR */ }; } - const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER]; - return { type: "GitHub Enterprise Server" /* GHES */, version }; -} -async function getGitHubVersion() { - if (cachedGitHubVersion === void 0) { - cachedGitHubVersion = await getGitHubVersionFromApi( - getApiClient(), - getApiDetails() - ); - } - return cachedGitHubVersion; -} -async function getWorkflowRelativePath() { - const repo_nwo = getRepositoryNwo(); - const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID")); - const apiClient = getApiClient(); - const runsResponse = await apiClient.request( - "GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", - { - owner: repo_nwo.owner, - repo: repo_nwo.repo, - run_id - } - ); - const workflowUrl = runsResponse.data.workflow_url; - const requiredWorkflowRegex = /\/repos\/[^/]+\/[^/]+\/actions\/required_workflows\/[^/]+/; - if (!workflowUrl || requiredWorkflowRegex.test(workflowUrl)) { - return runsResponse.data.path; - } - const workflowResponse = await apiClient.request(`GET ${workflowUrl}`); - return workflowResponse.data.path; -} -async function getAnalysisKey() { - let analysisKey = process.env["CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */]; - if (analysisKey !== void 0) { - return analysisKey; - } - const workflowPath = await getWorkflowRelativePath(); - const jobName = getRequiredEnvParam("GITHUB_JOB"); - analysisKey = `${workflowPath}:${jobName}`; - core5.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey); - return analysisKey; -} -function computeAutomationID(analysis_key, environment) { - let automationID = `${analysis_key}/`; - const matrix = parseMatrixInput(environment); - if (matrix !== void 0) { - for (const entry of Object.entries(matrix).sort()) { - if (typeof entry[1] === "string") { - automationID += `${entry[0]}:${entry[1]}/`; - } else { - automationID += `${entry[0]}:/`; - } - } - } - return automationID; -} -function isEnablementError(msg) { - return [ - /Code Security must be enabled/i, - /Advanced Security must be enabled/i, - /Code Scanning is not enabled/i - ].some((pattern) => pattern.test(msg)); -} -function getFeatureEnablementError(message) { - return `Please verify that the necessary features are enabled: ${message}`; -} -function wrapApiConfigurationError(e) { - const httpError = asHTTPError(e); - if (httpError !== void 0) { - if ([ - /API rate limit exceeded/, - /commit not found/, - /Resource not accessible by integration/, - /ref .* not found in this repository/ - ].some((pattern) => pattern.test(httpError.message))) { - return new ConfigurationError(httpError.message); - } - if (httpError.message.includes("Bad credentials") || httpError.message.includes("Not Found") || httpError.message.includes("Requires authentication")) { - return new ConfigurationError( - "Please check that your token is valid and has the required permissions: contents: read, security-events: write" - ); - } - if (httpError.status === 403 && isEnablementError(httpError.message)) { - return new ConfigurationError( - getFeatureEnablementError(httpError.message) - ); - } - if (httpError.status === 429) { - return new ConfigurationError("API rate limit exceeded"); - } - } - return e; -} - -// src/feature-flags.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); -var semver4 = __toESM(require_semver2()); - -// src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; - -// src/overlay-database-utils.ts -var fs3 = __toESM(require("fs")); -var path3 = __toESM(require("path")); -var actionsCache = __toESM(require_cache5()); - -// src/caching-utils.ts -var core6 = __toESM(require_core()); - -// src/git-utils.ts -var core7 = __toESM(require_core()); -var toolrunner2 = __toESM(require_toolrunner()); -var io3 = __toESM(require_io()); -var semver2 = __toESM(require_semver2()); -var runGitCommand = async function(workingDirectory, args, customErrorMessage, options) { - let stdout = ""; - let stderr = ""; - core7.debug(`Running git command: git ${args.join(" ")}`); - try { - await new toolrunner2.ToolRunner(await io3.which("git", true), args, { - silent: true, - listeners: { - stdout: (data) => { - stdout += data.toString(); - }, - stderr: (data) => { - stderr += data.toString(); - } - }, - cwd: workingDirectory, - ...options - }).exec(); - return stdout; - } catch (error3) { - let reason = stderr; - if (stderr.includes("not a git repository")) { - reason = "The checkout path provided to the action does not appear to be a git repository."; - } - core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); - throw error3; - } -}; -var getCommitOid = async function(checkoutPath, ref = "HEAD") { - try { - const stdout = await runGitCommand( - checkoutPath, - ["rev-parse", ref], - "Continuing with commit SHA from user input or environment." - ); - return stdout.trim(); - } catch { - return getOptionalInput("sha") || getRequiredEnvParam("GITHUB_SHA"); - } -}; -var determineBaseBranchHeadCommitOid = async function(checkoutPathOverride) { - if (getWorkflowEventName() !== "pull_request") { - return void 0; - } - const mergeSha = getRequiredEnvParam("GITHUB_SHA"); - const checkoutPath = checkoutPathOverride ?? getOptionalInput("checkout_path"); - try { - let commitOid = ""; - let baseOid = ""; - let headOid = ""; - const stdout = await runGitCommand( - checkoutPath, - ["show", "-s", "--format=raw", mergeSha], - "Will calculate the base branch SHA on the server." - ); - for (const data of stdout.split("\n")) { - if (data.startsWith("commit ") && commitOid === "") { - commitOid = data.substring(7); - } else if (data.startsWith("parent ")) { - if (baseOid === "") { - baseOid = data.substring(7); - } else if (headOid === "") { - headOid = data.substring(7); - } - } - } - if (commitOid === mergeSha && headOid.length === 40 && baseOid.length === 40) { - return baseOid; - } - return void 0; - } catch { - return void 0; - } -}; -var decodeGitFilePath = function(filePath) { - if (filePath.startsWith('"') && filePath.endsWith('"')) { - filePath = filePath.substring(1, filePath.length - 1); - return filePath.replace( - /\\([abfnrtv\\"]|[0-7]{1,3})/g, - (_match, seq2) => { - switch (seq2[0]) { - case "a": - return "\x07"; - case "b": - return "\b"; - case "f": - return "\f"; - case "n": - return "\n"; - case "r": - return "\r"; - case "t": - return " "; - case "v": - return "\v"; - case "\\": - return "\\"; - case '"': - return '"'; - default: - return String.fromCharCode(parseInt(seq2, 8)); - } - } - ); - } - return filePath; -}; -var getFileOidsUnderPath = async function(basePath) { - const stdout = await runGitCommand( - basePath, - ["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"], - "Cannot list Git OIDs of tracked files." - ); - const fileOidMap = {}; - const regex = /^([0-9a-f]{40})_(.+)$/; - for (const line of stdout.split("\n")) { - if (line) { - const match = line.match(regex); - if (match) { - const oid = match[1]; - const path13 = decodeGitFilePath(match[2]); - fileOidMap[path13] = oid; - } else { - throw new Error(`Unexpected "git ls-files" output: ${line}`); - } - } - } - return fileOidMap; -}; -function getRefFromEnv() { - let refEnv; - try { - refEnv = getRequiredEnvParam("GITHUB_REF"); - } catch (e) { - const maybeRef = process.env["CODE_SCANNING_REF"]; - if (maybeRef === void 0 || maybeRef.length === 0) { - throw e; - } - refEnv = maybeRef; - } - return refEnv; -} -async function getRef() { - const refInput = getOptionalInput("ref"); - const shaInput = getOptionalInput("sha"); - const checkoutPath = getOptionalInput("checkout_path") || getOptionalInput("source-root") || getRequiredEnvParam("GITHUB_WORKSPACE"); - const hasRefInput = !!refInput; - const hasShaInput = !!shaInput; - if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) { - throw new ConfigurationError( - "Both 'ref' and 'sha' are required if one of them is provided." - ); - } - const ref = refInput || getRefFromEnv(); - const sha = shaInput || getRequiredEnvParam("GITHUB_SHA"); - if (refInput) { - return refInput; - } - const pull_ref_regex = /refs\/pull\/(\d+)\/merge/; - if (!pull_ref_regex.test(ref)) { - return ref; - } - const head = await getCommitOid(checkoutPath, "HEAD"); - const hasChangedRef = sha !== head && await getCommitOid( - checkoutPath, - ref.replace(/^refs\/pull\//, "refs/remotes/pull/") - ) !== head; - if (hasChangedRef) { - const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core7.debug( - `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` - ); - return newRef; - } else { - return ref; - } -} -function removeRefsHeadsPrefix(ref) { - return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref; -} -async function isAnalyzingDefaultBranch() { - if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") { - return true; - } - let currentRef = await getRef(); - currentRef = removeRefsHeadsPrefix(currentRef); - const event = getWorkflowEvent(); - let defaultBranch = event?.repository?.default_branch; - if (getWorkflowEventName() === "schedule") { - defaultBranch = removeRefsHeadsPrefix(getRefFromEnv()); - } - return currentRef === defaultBranch; -} - -// src/logging.ts -var core8 = __toESM(require_core()); -function getActionsLogger() { - return { - debug: core8.debug, - info: core8.info, - warning: core8.warning, - error: core8.error, - isDebug: core8.isDebug, - startGroup: core8.startGroup, - endGroup: core8.endGroup - }; -} -function formatDuration(durationMs) { - if (durationMs < 1e3) { - return `${durationMs}ms`; - } - if (durationMs < 60 * 1e3) { - return `${(durationMs / 1e3).toFixed(1)}s`; - } - const minutes = Math.floor(durationMs / (60 * 1e3)); - const seconds = Math.floor(durationMs % (60 * 1e3) / 1e3); - return `${minutes}m${seconds}s`; -} - -// src/overlay-database-utils.ts -var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; -var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; -var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; -async function writeBaseDatabaseOidsFile(config, sourceRoot) { - const gitFileOids = await getFileOidsUnderPath(sourceRoot); - const gitFileOidsJson = JSON.stringify(gitFileOids); - const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); - await fs3.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson); + const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER]; + return { type: "GitHub Enterprise Server" /* GHES */, version }; } -async function readBaseDatabaseOidsFile(config, logger) { - const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); - try { - const contents = await fs3.promises.readFile( - baseDatabaseOidsFilePath, - "utf-8" - ); - return JSON.parse(contents); - } catch (e) { - logger.error( - `Failed to read overlay-base file OIDs from ${baseDatabaseOidsFilePath}: ${e.message || e}` +async function getGitHubVersion() { + if (cachedGitHubVersion === void 0) { + cachedGitHubVersion = await getGitHubVersionFromApi( + getApiClient(), + getApiDetails() ); - throw e; } + return cachedGitHubVersion; } -async function writeOverlayChangesFile(config, sourceRoot, logger) { - const baseFileOids = await readBaseDatabaseOidsFile(config, logger); - const overlayFileOids = await getFileOidsUnderPath(sourceRoot); - const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); - logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` - ); - const changedFilesJson = JSON.stringify({ changes: changedFiles }); - const overlayChangesFile = path3.join( - getTemporaryDirectory(), - "overlay-changes.json" - ); - logger.debug( - `Writing overlay changed files to ${overlayChangesFile}: ${changedFilesJson}` +async function getWorkflowRelativePath() { + const repo_nwo = getRepositoryNwo(); + const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID")); + const apiClient = getApiClient(); + const runsResponse = await apiClient.request( + "GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", + { + owner: repo_nwo.owner, + repo: repo_nwo.repo, + run_id + } ); - await fs3.promises.writeFile(overlayChangesFile, changedFilesJson); - return overlayChangesFile; + const workflowUrl = runsResponse.data.workflow_url; + const requiredWorkflowRegex = /\/repos\/[^/]+\/[^/]+\/actions\/required_workflows\/[^/]+/; + if (!workflowUrl || requiredWorkflowRegex.test(workflowUrl)) { + return runsResponse.data.path; + } + const workflowResponse = await apiClient.request(`GET ${workflowUrl}`); + return workflowResponse.data.path; } -function computeChangedFiles(baseFileOids, overlayFileOids) { - const changes = []; - for (const [file, oid] of Object.entries(overlayFileOids)) { - if (!(file in baseFileOids) || baseFileOids[file] !== oid) { - changes.push(file); - } +async function getAnalysisKey() { + let analysisKey = process.env["CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */]; + if (analysisKey !== void 0) { + return analysisKey; } - for (const file of Object.keys(baseFileOids)) { - if (!(file in overlayFileOids)) { - changes.push(file); + const workflowPath = await getWorkflowRelativePath(); + const jobName = getRequiredEnvParam("GITHUB_JOB"); + analysisKey = `${workflowPath}:${jobName}`; + core5.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey); + return analysisKey; +} +function computeAutomationID(analysis_key, environment) { + let automationID = `${analysis_key}/`; + const matrix = parseMatrixInput(environment); + if (matrix !== void 0) { + for (const entry of Object.entries(matrix).sort()) { + if (typeof entry[1] === "string") { + automationID += `${entry[0]}:${entry[1]}/`; + } else { + automationID += `${entry[0]}:/`; + } } } - return changes; + return automationID; } - -// src/tools-features.ts -var semver3 = __toESM(require_semver2()); -function isSupportedToolsFeature(versionInfo, feature) { - return !!versionInfo.features && versionInfo.features[feature]; +function isEnablementError(msg) { + return [ + /Code Security must be enabled/i, + /Advanced Security must be enabled/i, + /Code Scanning is not enabled/i + ].some((pattern) => pattern.test(msg)); } - -// src/feature-flags.ts -var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_"; -var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled"; -var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0"; -var featureConfig = { - ["allow_toolcache_input" /* AllowToolcacheInput */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT", - minimumVersion: void 0 - }, - ["cleanup_trap_caches" /* CleanupTrapCaches */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES", - minimumVersion: void 0 - }, - ["cpp_dependency_installation_enabled" /* CppDependencyInstallation */]: { - defaultValue: false, - envVar: "CODEQL_EXTRACTOR_CPP_AUTOINSTALL_DEPENDENCIES", - legacyApi: true, - minimumVersion: "2.15.0" - }, - ["csharp_cache_bmn" /* CsharpCacheBuildModeNone */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_CSHARP_CACHE_BMN", - minimumVersion: void 0 - }, - ["csharp_new_cache_key" /* CsharpNewCacheKey */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_CSHARP_NEW_CACHE_KEY", - minimumVersion: void 0 - }, - ["diff_informed_queries" /* DiffInformedQueries */]: { - defaultValue: true, - envVar: "CODEQL_ACTION_DIFF_INFORMED_QUERIES", - minimumVersion: "2.21.0" - }, - ["disable_csharp_buildless" /* DisableCsharpBuildless */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_DISABLE_CSHARP_BUILDLESS", - minimumVersion: void 0 - }, - ["disable_java_buildless_enabled" /* DisableJavaBuildlessEnabled */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_DISABLE_JAVA_BUILDLESS", - legacyApi: true, - minimumVersion: void 0 - }, - ["disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */]: { - defaultValue: false, - envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS", - legacyApi: true, - minimumVersion: void 0 - }, - ["export_diagnostics_enabled" /* ExportDiagnosticsEnabled */]: { - defaultValue: true, - envVar: "CODEQL_ACTION_EXPORT_DIAGNOSTICS", - legacyApi: true, - minimumVersion: void 0 - }, - ["force_nightly" /* ForceNightly */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_FORCE_NIGHTLY", - minimumVersion: void 0 - }, - ["ignore_generated_files" /* IgnoreGeneratedFiles */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_IGNORE_GENERATED_FILES", - minimumVersion: void 0 - }, - ["improved_proxy_certificates" /* ImprovedProxyCertificates */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_IMPROVED_PROXY_CERTIFICATES", - minimumVersion: void 0 - }, - ["java_network_debugging" /* JavaNetworkDebugging */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_JAVA_NETWORK_DEBUGGING", - minimumVersion: void 0 - }, - ["overlay_analysis" /* OverlayAnalysis */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", - minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION - }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 - }, - ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 - }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, - ["overlay_analysis_java" /* OverlayAnalysisJava */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 - }, - ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 - }, - ["overlay_analysis_python" /* OverlayAnalysisPython */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 - }, - ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", - minimumVersion: void 0 - }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, - ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", - minimumVersion: void 0, - toolsFeature: "pythonDefaultIsToNotExtractStdlib" /* PythonDefaultIsToNotExtractStdlib */ - }, - ["qa_telemetry_enabled" /* QaTelemetryEnabled */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_QA_TELEMETRY", - legacyApi: true, - minimumVersion: void 0 - }, - ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 - }, - ["start_proxy_connection_checks" /* StartProxyConnectionChecks */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_START_PROXY_CONNECTION_CHECKS", - minimumVersion: void 0 - }, - ["upload_overlay_db_to_api" /* UploadOverlayDbToApi */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API", - minimumVersion: void 0, - toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ - }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, - ["validate_db_config" /* ValidateDbConfig */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", - minimumVersion: void 0 - } -}; -var FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json"; -var OfflineFeatures = class { - constructor(logger) { - this.logger = logger; - } - async getDefaultCliVersion(_variant) { - return { - cliVersion, - tagName: bundleVersion - }; - } - /** - * Gets the `FeatureConfig` for `feature`. - */ - getFeatureConfig(feature) { - return featureConfig[feature]; - } - /** - * Determines whether `feature` is enabled without consulting the GitHub API. - * - * @param feature The feature to check. - * @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the - * feature, the version of the CodeQL CLI will be checked against the minimum version. - * If the version is less than the minimum version, the feature will be considered - * disabled. If not provided, and a `minimumVersion` is specified for the feature, then - * this function will throw. - * @returns true if the feature is enabled, false otherwise. - * - * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. - */ - async getValue(feature, codeql) { - const offlineValue = await this.getOfflineValue(feature, codeql); - if (offlineValue !== void 0) { - return offlineValue; - } - return this.getDefaultValue(feature); - } - /** - * Determines whether `feature` is enabled using the CLI and environment variables. - */ - async getOfflineValue(feature, codeql) { - const config = this.getFeatureConfig(feature); - if (!codeql && config.minimumVersion) { - throw new Error( - `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.` - ); - } - if (!codeql && config.toolsFeature) { - throw new Error( - `Internal error: A required tools feature is specified for feature ${feature}, but no instance of CodeQL was provided.` - ); +function getFeatureEnablementError(message) { + return `Please verify that the necessary features are enabled: ${message}`; +} +function wrapApiConfigurationError(e) { + const httpError = asHTTPError(e); + if (httpError !== void 0) { + if ([ + /API rate limit exceeded/, + /commit not found/, + /Resource not accessible by integration/, + /ref .* not found in this repository/ + ].some((pattern) => pattern.test(httpError.message))) { + return new ConfigurationError(httpError.message); } - const envVar = (process.env[config.envVar] || "").toLocaleLowerCase(); - if (envVar === "false") { - this.logger.debug( - `Feature ${feature} is disabled via the environment variable ${config.envVar}.` + if (httpError.message.includes("Bad credentials") || httpError.message.includes("Not Found") || httpError.message.includes("Requires authentication")) { + return new ConfigurationError( + "Please check that your token is valid and has the required permissions: contents: read, security-events: write" ); - return false; - } - const minimumVersion = config.minimumVersion; - if (codeql && minimumVersion) { - if (!await codeQlVersionAtLeast(codeql, minimumVersion)) { - this.logger.debug( - `Feature ${feature} is disabled because the CodeQL CLI version is older than the minimum version ${minimumVersion}.` - ); - return false; - } else { - this.logger.debug( - `CodeQL CLI version ${(await codeql.getVersion()).version} is newer than the minimum version ${minimumVersion} for feature ${feature}.` - ); - } - } - const toolsFeature = config.toolsFeature; - if (codeql && toolsFeature) { - if (!await codeql.supportsFeature(toolsFeature)) { - this.logger.debug( - `Feature ${feature} is disabled because the CodeQL CLI version does not support the required tools feature ${toolsFeature}.` - ); - return false; - } else { - this.logger.debug( - `CodeQL CLI version ${(await codeql.getVersion()).version} supports the required tools feature ${toolsFeature} for feature ${feature}.` - ); - } } - if (envVar === "true") { - this.logger.debug( - `Feature ${feature} is enabled via the environment variable ${config.envVar}.` + if (httpError.status === 403 && isEnablementError(httpError.message)) { + return new ConfigurationError( + getFeatureEnablementError(httpError.message) ); - return true; } - return void 0; - } - /** Gets the default value of `feature`. */ - async getDefaultValue(feature) { - const config = this.getFeatureConfig(feature); - const defaultValue = config.defaultValue; - this.logger.debug( - `Feature ${feature} is ${defaultValue ? "enabled" : "disabled"} due to its default value.` - ); - return defaultValue; - } -}; -var Features = class extends OfflineFeatures { - gitHubFeatureFlags; - constructor(repositoryNwo, tempDir, logger) { - super(logger); - this.gitHubFeatureFlags = new GitHubFeatureFlags( - repositoryNwo, - path4.join(tempDir, FEATURE_FLAGS_FILE_NAME), - logger - ); - } - async getDefaultCliVersion(variant) { - if (supportsFeatureFlags(variant)) { - return await this.gitHubFeatureFlags.getDefaultCliVersionFromFlags(); + if (httpError.status === 429) { + return new ConfigurationError("API rate limit exceeded"); } - return super.getDefaultCliVersion(variant); } - /** - * - * @param feature The feature to check. - * @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the - * feature, the version of the CodeQL CLI will be checked against the minimum version. - * If the version is less than the minimum version, the feature will be considered - * disabled. If not provided, and a `minimumVersion` is specified for the feature, then - * this function will throw. - * @returns true if the feature is enabled, false otherwise. - * - * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. - */ - async getValue(feature, codeql) { - const offlineValue = await this.getOfflineValue(feature, codeql); - if (offlineValue !== void 0) { - return offlineValue; - } - const apiValue = await this.gitHubFeatureFlags.getValue(feature); - if (apiValue !== void 0) { - this.logger.debug( - `Feature ${feature} is ${apiValue ? "enabled" : "disabled"} via the GitHub API.` + return e; +} + +// src/codeql.ts +var fs10 = __toESM(require("fs")); +var path10 = __toESM(require("path")); +var core10 = __toESM(require_core()); +var toolrunner3 = __toESM(require_toolrunner()); + +// src/cli-errors.ts +var SUPPORTED_PLATFORMS = [ + ["linux", "x64"], + ["win32", "x64"], + ["darwin", "x64"], + ["darwin", "arm64"] +]; +var CliError = class extends Error { + exitCode; + stderr; + constructor({ cmd, args, exitCode, stderr }) { + const prettyCommand = prettyPrintInvocation(cmd, args); + const fatalErrors = extractFatalErrors(stderr); + const autobuildErrors = extractAutobuildErrors(stderr); + let message; + if (fatalErrors) { + message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and error was: ${ensureEndsInPeriod( + fatalErrors.trim() + )} See the logs for more details.`; + } else if (autobuildErrors) { + message = `We were unable to automatically build your code. Please provide manual build steps. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information. Encountered the following error: ${autobuildErrors}`; + } else { + const lastLine = ensureEndsInPeriod( + stderr.trim().split("\n").pop()?.trim() || "n/a" ); - return apiValue; + message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`; } - return this.getDefaultValue(feature); + super(message); + this.exitCode = exitCode; + this.stderr = stderr; } }; -var GitHubFeatureFlags = class { - constructor(repositoryNwo, featureFlagsFile, logger) { - this.repositoryNwo = repositoryNwo; - this.featureFlagsFile = featureFlagsFile; - this.logger = logger; - this.hasAccessedRemoteFeatureFlags = false; - } - cachedApiResponse; - // We cache whether the feature flags were accessed or not in order to accurately report whether flags were - // incorrectly configured vs. inaccessible in our telemetry. - hasAccessedRemoteFeatureFlags; - getCliVersionFromFeatureFlag(f) { - if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) || !f.endsWith(DEFAULT_VERSION_FEATURE_FLAG_SUFFIX)) { - return void 0; - } - const version = f.substring( - DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length, - f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length - ).replace(/_/g, "."); - if (!semver4.valid(version)) { - this.logger.warning( - `Ignoring feature flag ${f} as it does not specify a valid CodeQL version.` - ); - return void 0; - } - return version; - } - async getDefaultCliVersionFromFlags() { - const response = await this.getAllFeatures(); - const enabledFeatureFlagCliVersions = Object.entries(response).map( - ([f, isEnabled]) => isEnabled ? this.getCliVersionFromFeatureFlag(f) : void 0 - ).filter((f) => f !== void 0); - if (enabledFeatureFlagCliVersions.length === 0) { - this.logger.warning( - `Feature flags do not specify a default CLI version. Falling back to the CLI version shipped with the Action. This is ${cliVersion}.` - ); - const result = { - cliVersion, - tagName: bundleVersion - }; - if (this.hasAccessedRemoteFeatureFlags) { - result.toolsFeatureFlagsValid = false; - } - return result; +function extractFatalErrors(error3) { + const fatalErrorRegex = /.*fatal (internal )?error occurr?ed(. Details)?:/gi; + let fatalErrors = []; + let lastFatalErrorIndex; + let match; + while ((match = fatalErrorRegex.exec(error3)) !== null) { + if (lastFatalErrorIndex !== void 0) { + fatalErrors.push(error3.slice(lastFatalErrorIndex, match.index).trim()); } - const maxCliVersion = enabledFeatureFlagCliVersions.reduce( - (maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, - enabledFeatureFlagCliVersions[0] - ); - this.logger.debug( - `Derived default CLI version of ${maxCliVersion} from feature flags.` - ); - return { - cliVersion: maxCliVersion, - tagName: `codeql-bundle-v${maxCliVersion}`, - toolsFeatureFlagsValid: true - }; + lastFatalErrorIndex = match.index; } - async getValue(feature) { - const response = await this.getAllFeatures(); - if (response === void 0) { - this.logger.debug(`No feature flags API response for ${feature}.`); - return void 0; + if (lastFatalErrorIndex !== void 0) { + const lastError = error3.slice(lastFatalErrorIndex).trim(); + if (fatalErrors.length === 0) { + return lastError; } - const features = response[feature]; - if (features === void 0) { - this.logger.debug(`Feature '${feature}' undefined in API response.`); - return void 0; + const isOneLiner = !fatalErrors.some((e) => e.includes("\n")); + if (isOneLiner) { + fatalErrors = fatalErrors.map(ensureEndsInPeriod); } - return !!features; + return [ + ensureEndsInPeriod(lastError), + "Context:", + ...fatalErrors.reverse() + ].join(isOneLiner ? " " : "\n"); } - async getAllFeatures() { - if (this.cachedApiResponse !== void 0) { - return this.cachedApiResponse; - } - const fileFlags = await this.readLocalFlags(); - if (fileFlags !== void 0) { - this.cachedApiResponse = fileFlags; - return fileFlags; - } - let remoteFlags = await this.loadApiResponse(); - if (remoteFlags === void 0) { - remoteFlags = {}; - } - this.cachedApiResponse = remoteFlags; - await this.writeLocalFlags(remoteFlags); - return remoteFlags; + return void 0; +} +function extractAutobuildErrors(error3) { + const pattern = /.*\[autobuild\] \[ERROR\] (.*)/gi; + let errorLines = [...error3.matchAll(pattern)].map((match) => match[1]); + if (errorLines.length > 10) { + errorLines = errorLines.slice(0, 10); + errorLines.push("(truncated)"); } - async readLocalFlags() { - try { - if (fs4.existsSync(this.featureFlagsFile)) { - this.logger.debug( - `Loading feature flags from ${this.featureFlagsFile}` - ); - return JSON.parse( - fs4.readFileSync(this.featureFlagsFile, "utf8") - ); - } - } catch (e) { - this.logger.warning( - `Error reading cached feature flags file ${this.featureFlagsFile}: ${e}. Requesting from GitHub instead.` - ); - } - return void 0; + return errorLines.join("\n") || void 0; +} +var cliErrorsConfig = { + ["AutobuildError" /* AutobuildError */]: { + cliErrorMessageCandidates: [ + new RegExp("We were unable to automatically build your code") + ] + }, + ["CouldNotCreateTempDir" /* CouldNotCreateTempDir */]: { + cliErrorMessageCandidates: [new RegExp("Could not create temp directory")] + }, + ["ExternalRepositoryCloneFailed" /* ExternalRepositoryCloneFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("Failed to clone external Git repository") + ] + }, + ["GradleBuildFailed" /* GradleBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception.") + ] + }, + // Version of CodeQL CLI is incompatible with this version of the CodeQL Action + ["IncompatibleWithActionVersion" /* IncompatibleWithActionVersion */]: { + cliErrorMessageCandidates: [ + new RegExp("is not compatible with this CodeQL CLI") + ] + }, + ["InitCalledTwice" /* InitCalledTwice */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Refusing to create databases .* but could not process any of it" + ) + ], + additionalErrorMessageToAppend: `Is the "init" action called twice in the same job?` + }, + ["InvalidConfigFile" /* InvalidConfigFile */]: { + cliErrorMessageCandidates: [ + new RegExp("Config file .* is not valid"), + new RegExp("The supplied config file is empty") + ] + }, + ["InvalidExternalRepoSpecifier" /* InvalidExternalRepoSpecifier */]: { + cliErrorMessageCandidates: [ + new RegExp("Specifier for external repository is invalid") + ] + }, + // Expected source location for database creation does not exist + ["InvalidSourceRoot" /* InvalidSourceRoot */]: { + cliErrorMessageCandidates: [new RegExp("Invalid source root")] + }, + ["MavenBuildFailed" /* MavenBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("\\[autobuild\\] \\[ERROR\\] Failed to execute goal") + ] + }, + ["NoBuildCommandAutodetected" /* NoBuildCommandAutodetected */]: { + cliErrorMessageCandidates: [ + new RegExp("Could not auto-detect a suitable build method") + ] + }, + ["NoBuildMethodAutodetected" /* NoBuildMethodAutodetected */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Could not detect a suitable build command for the source checkout" + ) + ] + }, + // Usually when a manual build script has failed, or if an autodetected language + // was unintended to have CodeQL analysis run on it. + ["NoSourceCodeSeen" /* NoSourceCodeSeen */]: { + exitCode: 32, + cliErrorMessageCandidates: [ + new RegExp( + "CodeQL detected code written in .* but could not process any of it" + ), + new RegExp( + "CodeQL did not detect any code written in languages supported by CodeQL" + ) + ] + }, + ["NoSupportedBuildCommandSucceeded" /* NoSupportedBuildCommandSucceeded */]: { + cliErrorMessageCandidates: [ + new RegExp("No supported build command succeeded") + ] + }, + ["NoSupportedBuildSystemDetected" /* NoSupportedBuildSystemDetected */]: { + cliErrorMessageCandidates: [ + new RegExp("No supported build system detected") + ] + }, + ["OutOfMemoryOrDisk" /* OutOfMemoryOrDisk */]: { + cliErrorMessageCandidates: [ + new RegExp("CodeQL is out of memory."), + new RegExp("out of disk"), + new RegExp("No space left on device") + ], + additionalErrorMessageToAppend: "For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory" + }, + ["PackCannotBeFound" /* PackCannotBeFound */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Query pack .* cannot be found\\. Check the spelling of the pack\\." + ), + new RegExp( + "is not a .ql file, .qls file, a directory, or a query pack specification." + ) + ] + }, + ["PackMissingAuth" /* PackMissingAuth */]: { + cliErrorMessageCandidates: [ + new RegExp("GitHub Container registry .* 403 Forbidden"), + new RegExp( + "Do you need to specify a token to authenticate to the registry?" + ) + ] + }, + ["SwiftBuildFailed" /* SwiftBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp( + "\\[autobuilder/build\\] \\[build-command-failed\\] `autobuild` failed to run the build command" + ) + ] + }, + ["UnsupportedBuildMode" /* UnsupportedBuildMode */]: { + cliErrorMessageCandidates: [ + new RegExp( + "does not support the .* build mode. Please try using one of the following build modes instead" + ) + ] + }, + ["NotFoundInRegistry" /* NotFoundInRegistry */]: { + cliErrorMessageCandidates: [ + new RegExp("'.*' not found in the registry '.*'") + ] } - async writeLocalFlags(flags) { - try { - this.logger.debug(`Writing feature flags to ${this.featureFlagsFile}`); - fs4.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); - } catch (e) { - this.logger.warning( - `Error writing cached feature flags file ${this.featureFlagsFile}: ${e}.` - ); +}; +function getCliConfigCategoryIfExists(cliError) { + for (const [category, configuration] of Object.entries(cliErrorsConfig)) { + if (cliError.exitCode !== void 0 && configuration.exitCode !== void 0 && cliError.exitCode === configuration.exitCode) { + return category; } - } - async loadApiResponse() { - try { - const featuresToRequest = Object.entries(featureConfig).filter( - ([, config]) => !config.legacyApi - ).map(([f]) => f); - const FEATURES_PER_REQUEST = 25; - const featureChunks = []; - while (featuresToRequest.length > 0) { - featureChunks.push(featuresToRequest.splice(0, FEATURES_PER_REQUEST)); - } - let remoteFlags = {}; - for (const chunk of featureChunks) { - const response = await getApiClient().request( - "GET /repos/:owner/:repo/code-scanning/codeql-action/features", - { - owner: this.repositoryNwo.owner, - repo: this.repositoryNwo.repo, - features: chunk.join(",") - } - ); - const chunkFlags = response.data; - remoteFlags = { ...remoteFlags, ...chunkFlags }; - } - this.logger.debug( - "Loaded the following default values for the feature flags from the CodeQL Action API:" - ); - for (const [feature, value] of Object.entries(remoteFlags).sort( - ([nameA], [nameB]) => nameA.localeCompare(nameB) - )) { - this.logger.debug(` ${feature}: ${value}`); - } - this.hasAccessedRemoteFeatureFlags = true; - return remoteFlags; - } catch (e) { - const httpError = asHTTPError(e); - if (httpError?.status === 403) { - this.logger.warning( - `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. As a result, it will not be opted into any experimental features. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` - ); - this.hasAccessedRemoteFeatureFlags = false; - return {}; - } else { - throw new Error( - `Encountered an error while trying to determine feature enablement: ${e}` - ); + for (const e of configuration.cliErrorMessageCandidates) { + if (cliError.message.match(e) || cliError.stderr.match(e)) { + return category; } } } -}; -function supportsFeatureFlags(githubVariant) { - return githubVariant === "GitHub.com" /* DOTCOM */ || githubVariant === "GitHub Enterprise Cloud with data residency" /* GHEC_DR */; + return void 0; } -function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) { - if (!supportsFeatureFlags(gitHubVersion.type)) { - logger.debug( - "Not running against github.com. Using default values for all features." - ); - return new OfflineFeatures(logger); - } else { - return new Features(repositoryNwo, tempDir, logger); +function isUnsupportedPlatform() { + return !SUPPORTED_PLATFORMS.some( + ([platform, arch2]) => platform === process.platform && arch2 === process.arch + ); +} +function getUnsupportedPlatformError(cliError) { + return new ConfigurationError( + `The CodeQL CLI does not support the platform/architecture combination of ${process.platform}/${process.arch} (see ${"https://codeql.github.com/docs/codeql-overview/system-requirements/" /* SYSTEM_REQUIREMENTS */}). The underlying error was: ${cliError.message}` + ); +} +function wrapCliConfigurationError(cliError) { + if (isUnsupportedPlatform()) { + return getUnsupportedPlatformError(cliError); + } + const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError); + if (cliConfigErrorCategory === void 0) { + return cliError; } + let errorMessageBuilder = cliError.message; + const additionalErrorMessageToAppend = cliErrorsConfig[cliConfigErrorCategory].additionalErrorMessageToAppend; + if (additionalErrorMessageToAppend !== void 0) { + errorMessageBuilder = `${errorMessageBuilder} ${additionalErrorMessageToAppend}`; + } + return new ConfigurationError(errorMessageBuilder); } -// src/status-report.ts -var os = __toESM(require("os")); -var core9 = __toESM(require_core()); - // src/config-utils.ts var fs6 = __toESM(require("fs")); var path7 = __toESM(require("path")); +// src/caching-utils.ts +var core6 = __toESM(require_core()); + // src/config/db-config.ts var jsonschema = __toESM(require_lib2()); -var semver5 = __toESM(require_semver2()); +var semver2 = __toESM(require_semver2()); var PACK_IDENTIFIER_PATTERN = (function() { const alphaNumeric = "[a-z0-9]"; const alphaNumericDash = "[a-z0-9-]"; @@ -107388,618 +106774,1016 @@ var PACK_IDENTIFIER_PATTERN = (function() { // src/diagnostics.ts var import_fs = require("fs"); var import_path = __toESM(require("path")); -var unwrittenDiagnostics = []; -var unwrittenDefaultLanguageDiagnostics = []; -function makeDiagnostic(id, name, data = void 0) { - return { - ...data, - timestamp: data?.timestamp ?? (/* @__PURE__ */ new Date()).toISOString(), - source: { ...data?.source, id, name } - }; -} -function addDiagnostic(config, language, diagnostic) { - const logger = getActionsLogger(); - const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; - if ((0, import_fs.existsSync)(databasePath)) { - writeDiagnostic(config, language, diagnostic); - } else { - logger.debug( - `Writing a diagnostic for ${language}, but the database at ${databasePath} does not exist yet.` - ); - unwrittenDiagnostics.push({ diagnostic, language }); - } -} -function addNoLanguageDiagnostic(config, diagnostic) { - if (config !== void 0) { - addDiagnostic( - config, - // Arbitrarily choose the first language. We could also choose all languages, but that - // increases the risk of misinterpreting the data. - config.languages[0], - diagnostic - ); - } else { - unwrittenDefaultLanguageDiagnostics.push(diagnostic); - } -} -function writeDiagnostic(config, language, diagnostic) { - const logger = getActionsLogger(); - const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; - const diagnosticsPath = import_path.default.resolve( - databasePath, - "diagnostic", - "codeql-action" - ); - try { - (0, import_fs.mkdirSync)(diagnosticsPath, { recursive: true }); - const jsonPath = import_path.default.resolve( - diagnosticsPath, - // Remove colons from the timestamp as these are not allowed in Windows filenames. - `codeql-action-${diagnostic.timestamp.replaceAll(":", "")}.json` - ); - (0, import_fs.writeFileSync)(jsonPath, JSON.stringify(diagnostic)); - } catch (err) { - logger.warning(`Unable to write diagnostic message to database: ${err}`); - logger.debug(JSON.stringify(diagnostic)); - } -} - -// src/diff-informed-analysis-utils.ts -var fs5 = __toESM(require("fs")); -var path6 = __toESM(require("path")); -function getDiffRangesJsonFilePath() { - return path6.join(getTemporaryDirectory(), "pr-diff-range.json"); -} -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs5.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs5.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} - -// src/trap-caching.ts -var actionsCache2 = __toESM(require_cache5()); -// src/config-utils.ts -var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4; -var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB * 1e6; -var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; -var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, - csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, - go: "overlay_analysis_go" /* OverlayAnalysisGo */, - java: "overlay_analysis_java" /* OverlayAnalysisJava */, - javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, - python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ -}; -var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, - csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, - go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, - java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, - javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, - python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ -}; -function getPathToParsedConfigFile(tempDir) { - return path7.join(tempDir, "config"); -} -async function getConfig(tempDir, logger) { - const configFile = getPathToParsedConfigFile(tempDir); - if (!fs6.existsSync(configFile)) { - return void 0; - } - const configString = fs6.readFileSync(configFile, "utf8"); - logger.debug("Loaded config:"); - logger.debug(configString); - const config = JSON.parse(configString); - if (config.version === void 0) { - throw new ConfigurationError( - `Loaded configuration file, but it does not contain the expected 'version' field.` - ); - } - if (config.version !== getActionVersion()) { - throw new ConfigurationError( - `Loaded a configuration file for version '${config.version}', but running version '${getActionVersion()}'` - ); - } - return config; -} -function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { - const augmentedConfig = cloneObject(cliConfig); - if (extraQueryExclusions.length === 0) { - return augmentedConfig; +// src/logging.ts +var core7 = __toESM(require_core()); +function getActionsLogger() { + return { + debug: core7.debug, + info: core7.info, + warning: core7.warning, + error: core7.error, + isDebug: core7.isDebug, + startGroup: core7.startGroup, + endGroup: core7.endGroup + }; +} +function formatDuration(durationMs) { + if (durationMs < 1e3) { + return `${durationMs}ms`; } - augmentedConfig["query-filters"] = [ - // Ordering matters. If the first filter is an inclusion, it implicitly - // excludes all queries that are not included. If it is an exclusion, - // it implicitly includes all queries that are not excluded. So user - // filters (if any) should always be first to preserve intent. - ...augmentedConfig["query-filters"] || [], - ...extraQueryExclusions - ]; - if (augmentedConfig["query-filters"]?.length === 0) { - delete augmentedConfig["query-filters"]; + if (durationMs < 60 * 1e3) { + return `${(durationMs / 1e3).toFixed(1)}s`; } - return augmentedConfig; + const minutes = Math.floor(durationMs / (60 * 1e3)); + const seconds = Math.floor(durationMs % (60 * 1e3) / 1e3); + return `${minutes}m${seconds}s`; } -// src/status-report.ts -function isFirstPartyAnalysis(actionName) { - if (actionName !== "upload-sarif" /* UploadSarif */) { - return true; - } - return process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] === "true"; -} -function isThirdPartyAnalysis(actionName) { - return !isFirstPartyAnalysis(actionName); +// src/diagnostics.ts +var unwrittenDiagnostics = []; +var unwrittenDefaultLanguageDiagnostics = []; +function makeDiagnostic(id, name, data = void 0) { + return { + ...data, + timestamp: data?.timestamp ?? (/* @__PURE__ */ new Date()).toISOString(), + source: { ...data?.source, id, name } + }; } -function getActionsStatus(error3, otherFailureCause) { - if (error3 || otherFailureCause) { - return error3 instanceof ConfigurationError ? "user-error" : "failure"; +function addDiagnostic(config, language, diagnostic) { + const logger = getActionsLogger(); + const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; + if ((0, import_fs.existsSync)(databasePath)) { + writeDiagnostic(config, language, diagnostic); } else { - return "success"; + logger.debug( + `Writing a diagnostic for ${language}, but the database at ${databasePath} does not exist yet.` + ); + unwrittenDiagnostics.push({ diagnostic, language }); } } -function setJobStatusIfUnsuccessful(actionStatus) { - if (actionStatus === "user-error") { - core9.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ +function addNoLanguageDiagnostic(config, diagnostic) { + if (config !== void 0) { + addDiagnostic( + config, + // Arbitrarily choose the first language. We could also choose all languages, but that + // increases the risk of misinterpreting the data. + config.languages[0], + diagnostic ); - } else if (actionStatus === "failure" || actionStatus === "aborted") { - core9.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ + } else { + unwrittenDefaultLanguageDiagnostics.push(diagnostic); + } +} +function writeDiagnostic(config, language, diagnostic) { + const logger = getActionsLogger(); + const databasePath = language ? getCodeQLDatabasePath(config, language) : config.dbLocation; + const diagnosticsPath = import_path.default.resolve( + databasePath, + "diagnostic", + "codeql-action" + ); + try { + (0, import_fs.mkdirSync)(diagnosticsPath, { recursive: true }); + const jsonPath = import_path.default.resolve( + diagnosticsPath, + // Remove colons from the timestamp as these are not allowed in Windows filenames. + `codeql-action-${diagnostic.timestamp.replaceAll(":", "")}.json` ); + (0, import_fs.writeFileSync)(jsonPath, JSON.stringify(diagnostic)); + } catch (err) { + logger.warning(`Unable to write diagnostic message to database: ${err}`); + logger.debug(JSON.stringify(diagnostic)); } } -async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception2) { + +// src/diff-informed-analysis-utils.ts +var fs5 = __toESM(require("fs")); +var path6 = __toESM(require("path")); + +// src/feature-flags.ts +var fs4 = __toESM(require("fs")); +var path5 = __toESM(require("path")); +var semver5 = __toESM(require_semver2()); + +// src/defaults.json +var bundleVersion = "codeql-bundle-v2.24.2"; +var cliVersion = "2.24.2"; + +// src/overlay-database-utils.ts +var fs3 = __toESM(require("fs")); +var path4 = __toESM(require("path")); +var actionsCache = __toESM(require_cache5()); + +// src/git-utils.ts +var core8 = __toESM(require_core()); +var toolrunner2 = __toESM(require_toolrunner()); +var io3 = __toESM(require_io()); +var semver3 = __toESM(require_semver2()); +var runGitCommand = async function(workingDirectory, args, customErrorMessage, options) { + let stdout = ""; + let stderr = ""; + core8.debug(`Running git command: git ${args.join(" ")}`); try { - const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || ""; - const ref = await getRef(); - const jobRunUUID = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */] || ""; - const workflowRunID = getWorkflowRunID(); - const workflowRunAttempt = getWorkflowRunAttempt(); - const workflowName = process.env["GITHUB_WORKFLOW"] || ""; - const jobName = process.env["GITHUB_JOB"] || ""; - const analysis_key = await getAnalysisKey(); - let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; - if (workflowStartedAt === void 0) { - workflowStartedAt = actionStartedAt.toISOString(); - core9.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); - } - const runnerOs = getRequiredEnvParam("RUNNER_OS"); - const codeQlCliVersion = getCachedCodeQlVersion(); - const actionRef = process.env["GITHUB_ACTION_REF"] || ""; - const testingEnvironment = getTestingEnvironment(); - if (testingEnvironment) { - core9.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); - } - const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; - const statusReport = { - action_name: actionName, - action_oid: "unknown", - // TODO decide if it's possible to fill this in - action_ref: actionRef, - action_started_at: actionStartedAt.toISOString(), - action_version: getActionVersion(), - analysis_kinds: config?.analysisKinds?.join(","), - analysis_key, - build_mode: config?.buildMode, - commit_oid: commitOid, - first_party_analysis: isFirstPartyAnalysis(actionName), - job_name: jobName, - job_run_uuid: jobRunUUID, - ref, - runner_os: runnerOs, - started_at: workflowStartedAt, - status, - steady_state_default_setup: isSteadyStateDefaultSetupRun, - testing_environment: testingEnvironment || "", - workflow_name: workflowName, - workflow_run_attempt: workflowRunAttempt, - workflow_run_id: workflowRunID - }; - try { - statusReport.actions_event_name = getWorkflowEventName(); - } catch (e) { - logger.warning( - `Could not determine the workflow event name: ${getErrorMessage(e)}.` - ); - } - if (config) { - statusReport.languages = config.languages?.join(","); - } - if (diskInfo) { - statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes; - statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes; - } - if (cause) { - statusReport.cause = cause; - } - if (exception2) { - statusReport.exception = exception2; - } - if (status === "success" || status === "failure" || status === "aborted" || status === "user-error") { - statusReport.completed_at = (/* @__PURE__ */ new Date()).toISOString(); - } - const matrix = getRequiredInput("matrix"); - if (matrix) { - statusReport.matrix_vars = matrix; - } - if ("RUNNER_ARCH" in process.env) { - statusReport.runner_arch = process.env["RUNNER_ARCH"]; - } - if (!(runnerOs === "Linux" && isSelfHostedRunner())) { - statusReport.runner_os_release = os.release(); - } - if (codeQlCliVersion !== void 0) { - statusReport.codeql_version = codeQlCliVersion.version; - } - const imageVersion = process.env["ImageVersion"]; - if (imageVersion) { - statusReport.runner_image_version = imageVersion; + await new toolrunner2.ToolRunner(await io3.which("git", true), args, { + silent: true, + listeners: { + stdout: (data) => { + stdout += data.toString(); + }, + stderr: (data) => { + stderr += data.toString(); + } + }, + cwd: workingDirectory, + ...options + }).exec(); + return stdout; + } catch (error3) { + let reason = stderr; + if (stderr.includes("not a git repository")) { + reason = "The checkout path provided to the action does not appear to be a git repository."; } - return statusReport; - } catch (e) { - logger.warning( - `Failed to gather information for telemetry: ${getErrorMessage(e)}. Will skip sending status report.` + core8.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + throw error3; + } +}; +var getCommitOid = async function(checkoutPath, ref = "HEAD") { + try { + const stdout = await runGitCommand( + checkoutPath, + ["rev-parse", ref], + "Continuing with commit SHA from user input or environment." ); - if (isInTestMode()) { - throw e; - } - return void 0; + return stdout.trim(); + } catch { + return getOptionalInput("sha") || getRequiredEnvParam("GITHUB_SHA"); } -} -var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`."; -var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`."; -async function sendStatusReport(statusReport) { - setJobStatusIfUnsuccessful(statusReport.status); - const statusReportJSON = JSON.stringify(statusReport); - core9.debug(`Sending status report: ${statusReportJSON}`); - if (isInTestMode()) { - core9.debug("In test mode. Status reports are not uploaded."); - return; +}; +var determineBaseBranchHeadCommitOid = async function(checkoutPathOverride) { + if (getWorkflowEventName() !== "pull_request") { + return void 0; } - const nwo = getRepositoryNwo(); - const client = getApiClient(); + const mergeSha = getRequiredEnvParam("GITHUB_SHA"); + const checkoutPath = checkoutPathOverride ?? getOptionalInput("checkout_path"); try { - await client.request( - "PUT /repos/:owner/:repo/code-scanning/analysis/status", - { - owner: nwo.owner, - repo: nwo.repo, - data: statusReportJSON - } + let commitOid = ""; + let baseOid = ""; + let headOid = ""; + const stdout = await runGitCommand( + checkoutPath, + ["show", "-s", "--format=raw", mergeSha], + "Will calculate the base branch SHA on the server." ); - } catch (e) { - const httpError = asHTTPError(e); - if (httpError !== void 0) { - switch (httpError.status) { - case 403: - if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core9.warning( - `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` - ); - } else { - core9.warning( - `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` - ); - } - return; - case 404: - core9.warning(httpError.message); - return; - case 422: - if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core9.debug(INCOMPATIBLE_MSG); - } else { - core9.debug(OUT_OF_DATE_MSG); - } - return; + for (const data of stdout.split("\n")) { + if (data.startsWith("commit ") && commitOid === "") { + commitOid = data.substring(7); + } else if (data.startsWith("parent ")) { + if (baseOid === "") { + baseOid = data.substring(7); + } else if (headOid === "") { + headOid = data.substring(7); + } } } - core9.warning( - `An unexpected error occurred when sending a status report: ${getErrorMessage( - e - )}` - ); + if (commitOid === mergeSha && headOid.length === 40 && baseOid.length === 40) { + return baseOid; + } + return void 0; + } catch { + return void 0; } -} -async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error3, logger) { - try { - const statusReport = await createStatusReportBase( - actionName, - "failure", - actionStartedAt, - void 0, - void 0, - logger, - `Unhandled CodeQL Action error: ${getErrorMessage(error3)}`, - error3 instanceof Error ? error3.stack : void 0 +}; +var decodeGitFilePath = function(filePath) { + if (filePath.startsWith('"') && filePath.endsWith('"')) { + filePath = filePath.substring(1, filePath.length - 1); + return filePath.replace( + /\\([abfnrtv\\"]|[0-7]{1,3})/g, + (_match, seq2) => { + switch (seq2[0]) { + case "a": + return "\x07"; + case "b": + return "\b"; + case "f": + return "\f"; + case "n": + return "\n"; + case "r": + return "\r"; + case "t": + return " "; + case "v": + return "\v"; + case "\\": + return "\\"; + case '"': + return '"'; + default: + return String.fromCharCode(parseInt(seq2, 8)); + } + } ); - if (statusReport !== void 0) { - await sendStatusReport(statusReport); + } + return filePath; +}; +var getFileOidsUnderPath = async function(basePath) { + const stdout = await runGitCommand( + basePath, + ["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"], + "Cannot list Git OIDs of tracked files." + ); + const fileOidMap = {}; + const regex = /^([0-9a-f]{40})_(.+)$/; + for (const line of stdout.split("\n")) { + if (line) { + const match = line.match(regex); + if (match) { + const oid = match[1]; + const path13 = decodeGitFilePath(match[2]); + fileOidMap[path13] = oid; + } else { + throw new Error(`Unexpected "git ls-files" output: ${line}`); + } } + } + return fileOidMap; +}; +function getRefFromEnv() { + let refEnv; + try { + refEnv = getRequiredEnvParam("GITHUB_REF"); } catch (e) { - logger.warning( - `Failed to send the unhandled error status report: ${getErrorMessage(e)}.` - ); - if (isInTestMode()) { + const maybeRef = process.env["CODE_SCANNING_REF"]; + if (maybeRef === void 0 || maybeRef.length === 0) { throw e; } + refEnv = maybeRef; } + return refEnv; } - -// src/upload-lib.ts -var fs12 = __toESM(require("fs")); -var path12 = __toESM(require("path")); -var url = __toESM(require("url")); -var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); -var jsonschema2 = __toESM(require_lib2()); - -// src/codeql.ts -var fs10 = __toESM(require("fs")); -var path10 = __toESM(require("path")); -var core11 = __toESM(require_core()); -var toolrunner3 = __toESM(require_toolrunner()); - -// src/cli-errors.ts -var SUPPORTED_PLATFORMS = [ - ["linux", "x64"], - ["win32", "x64"], - ["darwin", "x64"], - ["darwin", "arm64"] -]; -var CliError = class extends Error { - exitCode; - stderr; - constructor({ cmd, args, exitCode, stderr }) { - const prettyCommand = prettyPrintInvocation(cmd, args); - const fatalErrors = extractFatalErrors(stderr); - const autobuildErrors = extractAutobuildErrors(stderr); - let message; - if (fatalErrors) { - message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and error was: ${ensureEndsInPeriod( - fatalErrors.trim() - )} See the logs for more details.`; - } else if (autobuildErrors) { - message = `We were unable to automatically build your code. Please provide manual build steps. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information. Encountered the following error: ${autobuildErrors}`; - } else { - const lastLine = ensureEndsInPeriod( - stderr.trim().split("\n").pop()?.trim() || "n/a" - ); - message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`; - } - super(message); - this.exitCode = exitCode; - this.stderr = stderr; +async function getRef() { + const refInput = getOptionalInput("ref"); + const shaInput = getOptionalInput("sha"); + const checkoutPath = getOptionalInput("checkout_path") || getOptionalInput("source-root") || getRequiredEnvParam("GITHUB_WORKSPACE"); + const hasRefInput = !!refInput; + const hasShaInput = !!shaInput; + if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) { + throw new ConfigurationError( + "Both 'ref' and 'sha' are required if one of them is provided." + ); } -}; -function extractFatalErrors(error3) { - const fatalErrorRegex = /.*fatal (internal )?error occurr?ed(. Details)?:/gi; - let fatalErrors = []; - let lastFatalErrorIndex; - let match; - while ((match = fatalErrorRegex.exec(error3)) !== null) { - if (lastFatalErrorIndex !== void 0) { - fatalErrors.push(error3.slice(lastFatalErrorIndex, match.index).trim()); - } - lastFatalErrorIndex = match.index; + const ref = refInput || getRefFromEnv(); + const sha = shaInput || getRequiredEnvParam("GITHUB_SHA"); + if (refInput) { + return refInput; + } + const pull_ref_regex = /refs\/pull\/(\d+)\/merge/; + if (!pull_ref_regex.test(ref)) { + return ref; + } + const head = await getCommitOid(checkoutPath, "HEAD"); + const hasChangedRef = sha !== head && await getCommitOid( + checkoutPath, + ref.replace(/^refs\/pull\//, "refs/remotes/pull/") + ) !== head; + if (hasChangedRef) { + const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); + core8.debug( + `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` + ); + return newRef; + } else { + return ref; + } +} +function removeRefsHeadsPrefix(ref) { + return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref; +} +async function isAnalyzingDefaultBranch() { + if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") { + return true; + } + let currentRef = await getRef(); + currentRef = removeRefsHeadsPrefix(currentRef); + const event = getWorkflowEvent(); + let defaultBranch = event?.repository?.default_branch; + if (getWorkflowEventName() === "schedule") { + defaultBranch = removeRefsHeadsPrefix(getRefFromEnv()); + } + return currentRef === defaultBranch; +} + +// src/overlay-database-utils.ts +var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; +var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; +async function writeBaseDatabaseOidsFile(config, sourceRoot) { + const gitFileOids = await getFileOidsUnderPath(sourceRoot); + const gitFileOidsJson = JSON.stringify(gitFileOids); + const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); + await fs3.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson); +} +async function readBaseDatabaseOidsFile(config, logger) { + const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); + try { + const contents = await fs3.promises.readFile( + baseDatabaseOidsFilePath, + "utf-8" + ); + return JSON.parse(contents); + } catch (e) { + logger.error( + `Failed to read overlay-base file OIDs from ${baseDatabaseOidsFilePath}: ${e.message || e}` + ); + throw e; } - if (lastFatalErrorIndex !== void 0) { - const lastError = error3.slice(lastFatalErrorIndex).trim(); - if (fatalErrors.length === 0) { - return lastError; +} +async function writeOverlayChangesFile(config, sourceRoot, logger) { + const baseFileOids = await readBaseDatabaseOidsFile(config, logger); + const overlayFileOids = await getFileOidsUnderPath(sourceRoot); + const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + logger.info( + `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + ); + const changedFilesJson = JSON.stringify({ changes: changedFiles }); + const overlayChangesFile = path4.join( + getTemporaryDirectory(), + "overlay-changes.json" + ); + logger.debug( + `Writing overlay changed files to ${overlayChangesFile}: ${changedFilesJson}` + ); + await fs3.promises.writeFile(overlayChangesFile, changedFilesJson); + return overlayChangesFile; +} +function computeChangedFiles(baseFileOids, overlayFileOids) { + const changes = []; + for (const [file, oid] of Object.entries(overlayFileOids)) { + if (!(file in baseFileOids) || baseFileOids[file] !== oid) { + changes.push(file); } - const isOneLiner = !fatalErrors.some((e) => e.includes("\n")); - if (isOneLiner) { - fatalErrors = fatalErrors.map(ensureEndsInPeriod); + } + for (const file of Object.keys(baseFileOids)) { + if (!(file in overlayFileOids)) { + changes.push(file); } - return [ - ensureEndsInPeriod(lastError), - "Context:", - ...fatalErrors.reverse() - ].join(isOneLiner ? " " : "\n"); } - return void 0; + return changes; } -function extractAutobuildErrors(error3) { - const pattern = /.*\[autobuild\] \[ERROR\] (.*)/gi; - let errorLines = [...error3.matchAll(pattern)].map((match) => match[1]); - if (errorLines.length > 10) { - errorLines = errorLines.slice(0, 10); - errorLines.push("(truncated)"); - } - return errorLines.join("\n") || void 0; + +// src/tools-features.ts +var semver4 = __toESM(require_semver2()); +function isSupportedToolsFeature(versionInfo, feature) { + return !!versionInfo.features && versionInfo.features[feature]; } -var cliErrorsConfig = { - ["AutobuildError" /* AutobuildError */]: { - cliErrorMessageCandidates: [ - new RegExp("We were unable to automatically build your code") - ] + +// src/feature-flags.ts +var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_"; +var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled"; +var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0"; +var featureConfig = { + ["allow_toolcache_input" /* AllowToolcacheInput */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT", + minimumVersion: void 0 }, - ["CouldNotCreateTempDir" /* CouldNotCreateTempDir */]: { - cliErrorMessageCandidates: [new RegExp("Could not create temp directory")] + ["cleanup_trap_caches" /* CleanupTrapCaches */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES", + minimumVersion: void 0 }, - ["ExternalRepositoryCloneFailed" /* ExternalRepositoryCloneFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("Failed to clone external Git repository") - ] + ["cpp_dependency_installation_enabled" /* CppDependencyInstallation */]: { + defaultValue: false, + envVar: "CODEQL_EXTRACTOR_CPP_AUTOINSTALL_DEPENDENCIES", + legacyApi: true, + minimumVersion: "2.15.0" }, - ["GradleBuildFailed" /* GradleBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception.") - ] + ["csharp_cache_bmn" /* CsharpCacheBuildModeNone */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_CSHARP_CACHE_BMN", + minimumVersion: void 0 }, - // Version of CodeQL CLI is incompatible with this version of the CodeQL Action - ["IncompatibleWithActionVersion" /* IncompatibleWithActionVersion */]: { - cliErrorMessageCandidates: [ - new RegExp("is not compatible with this CodeQL CLI") - ] + ["csharp_new_cache_key" /* CsharpNewCacheKey */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_CSHARP_NEW_CACHE_KEY", + minimumVersion: void 0 }, - ["InitCalledTwice" /* InitCalledTwice */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Refusing to create databases .* but could not process any of it" - ) - ], - additionalErrorMessageToAppend: `Is the "init" action called twice in the same job?` + ["diff_informed_queries" /* DiffInformedQueries */]: { + defaultValue: true, + envVar: "CODEQL_ACTION_DIFF_INFORMED_QUERIES", + minimumVersion: "2.21.0" }, - ["InvalidConfigFile" /* InvalidConfigFile */]: { - cliErrorMessageCandidates: [ - new RegExp("Config file .* is not valid"), - new RegExp("The supplied config file is empty") - ] + ["disable_csharp_buildless" /* DisableCsharpBuildless */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_DISABLE_CSHARP_BUILDLESS", + minimumVersion: void 0 }, - ["InvalidExternalRepoSpecifier" /* InvalidExternalRepoSpecifier */]: { - cliErrorMessageCandidates: [ - new RegExp("Specifier for external repository is invalid") - ] + ["disable_java_buildless_enabled" /* DisableJavaBuildlessEnabled */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_DISABLE_JAVA_BUILDLESS", + legacyApi: true, + minimumVersion: void 0 }, - // Expected source location for database creation does not exist - ["InvalidSourceRoot" /* InvalidSourceRoot */]: { - cliErrorMessageCandidates: [new RegExp("Invalid source root")] + ["disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */]: { + defaultValue: false, + envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS", + legacyApi: true, + minimumVersion: void 0 }, - ["MavenBuildFailed" /* MavenBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("\\[autobuild\\] \\[ERROR\\] Failed to execute goal") - ] + ["export_diagnostics_enabled" /* ExportDiagnosticsEnabled */]: { + defaultValue: true, + envVar: "CODEQL_ACTION_EXPORT_DIAGNOSTICS", + legacyApi: true, + minimumVersion: void 0 }, - ["NoBuildCommandAutodetected" /* NoBuildCommandAutodetected */]: { - cliErrorMessageCandidates: [ - new RegExp("Could not auto-detect a suitable build method") - ] + ["force_nightly" /* ForceNightly */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_FORCE_NIGHTLY", + minimumVersion: void 0 }, - ["NoBuildMethodAutodetected" /* NoBuildMethodAutodetected */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Could not detect a suitable build command for the source checkout" - ) - ] + ["ignore_generated_files" /* IgnoreGeneratedFiles */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_IGNORE_GENERATED_FILES", + minimumVersion: void 0 }, - // Usually when a manual build script has failed, or if an autodetected language - // was unintended to have CodeQL analysis run on it. - ["NoSourceCodeSeen" /* NoSourceCodeSeen */]: { - exitCode: 32, - cliErrorMessageCandidates: [ - new RegExp( - "CodeQL detected code written in .* but could not process any of it" - ), - new RegExp( - "CodeQL did not detect any code written in languages supported by CodeQL" - ) - ] + ["improved_proxy_certificates" /* ImprovedProxyCertificates */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_IMPROVED_PROXY_CERTIFICATES", + minimumVersion: void 0 }, - ["NoSupportedBuildCommandSucceeded" /* NoSupportedBuildCommandSucceeded */]: { - cliErrorMessageCandidates: [ - new RegExp("No supported build command succeeded") - ] + ["java_network_debugging" /* JavaNetworkDebugging */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_JAVA_NETWORK_DEBUGGING", + minimumVersion: void 0 }, - ["NoSupportedBuildSystemDetected" /* NoSupportedBuildSystemDetected */]: { - cliErrorMessageCandidates: [ - new RegExp("No supported build system detected") - ] + ["overlay_analysis" /* OverlayAnalysis */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["OutOfMemoryOrDisk" /* OutOfMemoryOrDisk */]: { - cliErrorMessageCandidates: [ - new RegExp("CodeQL is out of memory."), - new RegExp("out of disk"), - new RegExp("No space left on device") - ], - additionalErrorMessageToAppend: "For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory" + ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", + minimumVersion: void 0 }, - ["PackCannotBeFound" /* PackCannotBeFound */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Query pack .* cannot be found\\. Check the spelling of the pack\\." - ), - new RegExp( - "is not a .ql file, .qls file, a directory, or a query pack specification." - ) - ] + ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", + minimumVersion: void 0 }, - ["PackMissingAuth" /* PackMissingAuth */]: { - cliErrorMessageCandidates: [ - new RegExp("GitHub Container registry .* 403 Forbidden"), - new RegExp( - "Do you need to specify a token to authenticate to the registry?" - ) - ] + ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", + minimumVersion: void 0 }, - ["SwiftBuildFailed" /* SwiftBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp( - "\\[autobuilder/build\\] \\[build-command-failed\\] `autobuild` failed to run the build command" - ) - ] + ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", + minimumVersion: void 0 }, - ["UnsupportedBuildMode" /* UnsupportedBuildMode */]: { - cliErrorMessageCandidates: [ - new RegExp( - "does not support the .* build mode. Please try using one of the following build modes instead" - ) - ] + ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", + minimumVersion: void 0 }, - ["NotFoundInRegistry" /* NotFoundInRegistry */]: { - cliErrorMessageCandidates: [ - new RegExp("'.*' not found in the registry '.*'") - ] + ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", + minimumVersion: void 0 + }, + ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", + minimumVersion: void 0 + }, + ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", + minimumVersion: void 0 + }, + ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", + minimumVersion: void 0 + }, + ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", + minimumVersion: void 0 + }, + ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", + minimumVersion: void 0 + }, + ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", + minimumVersion: void 0 + }, + ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", + minimumVersion: void 0 + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: void 0 + }, + ["overlay_analysis_java" /* OverlayAnalysisJava */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", + minimumVersion: void 0 + }, + ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", + minimumVersion: void 0 + }, + ["overlay_analysis_python" /* OverlayAnalysisPython */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", + minimumVersion: void 0 + }, + ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", + minimumVersion: void 0 + }, + ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", + minimumVersion: void 0 + }, + ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", + minimumVersion: void 0 + }, + ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", + minimumVersion: void 0 + }, + ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", + minimumVersion: void 0, + toolsFeature: "pythonDefaultIsToNotExtractStdlib" /* PythonDefaultIsToNotExtractStdlib */ + }, + ["qa_telemetry_enabled" /* QaTelemetryEnabled */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_QA_TELEMETRY", + legacyApi: true, + minimumVersion: void 0 + }, + ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", + // For testing, this is not behind a CLI version check yet. However + // before rolling this out externally, we should set a minimum version here + // since current versions of the CodeQL CLI will log if baseline information + // cannot be found when interpreting results. + minimumVersion: void 0 + }, + ["start_proxy_connection_checks" /* StartProxyConnectionChecks */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_CONNECTION_CHECKS", + minimumVersion: void 0 + }, + ["upload_overlay_db_to_api" /* UploadOverlayDbToApi */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API", + minimumVersion: void 0, + toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ + }, + ["use_repository_properties_v2" /* UseRepositoryProperties */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", + minimumVersion: void 0 + }, + ["validate_db_config" /* ValidateDbConfig */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", + minimumVersion: void 0 + } +}; +var FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json"; +var OfflineFeatures = class { + constructor(logger) { + this.logger = logger; + } + async getDefaultCliVersion(_variant) { + return { + cliVersion, + tagName: bundleVersion + }; + } + /** + * Gets the `FeatureConfig` for `feature`. + */ + getFeatureConfig(feature) { + return featureConfig[feature]; + } + /** + * Determines whether `feature` is enabled without consulting the GitHub API. + * + * @param feature The feature to check. + * @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the + * feature, the version of the CodeQL CLI will be checked against the minimum version. + * If the version is less than the minimum version, the feature will be considered + * disabled. If not provided, and a `minimumVersion` is specified for the feature, then + * this function will throw. + * @returns true if the feature is enabled, false otherwise. + * + * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. + */ + async getValue(feature, codeql2) { + const offlineValue = await this.getOfflineValue(feature, codeql2); + if (offlineValue !== void 0) { + return offlineValue; + } + return this.getDefaultValue(feature); + } + /** + * Determines whether `feature` is enabled using the CLI and environment variables. + */ + async getOfflineValue(feature, codeql2) { + const config = this.getFeatureConfig(feature); + if (!codeql2 && config.minimumVersion) { + throw new Error( + `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.` + ); + } + if (!codeql2 && config.toolsFeature) { + throw new Error( + `Internal error: A required tools feature is specified for feature ${feature}, but no instance of CodeQL was provided.` + ); + } + const envVar = (process.env[config.envVar] || "").toLocaleLowerCase(); + if (envVar === "false") { + this.logger.debug( + `Feature ${feature} is disabled via the environment variable ${config.envVar}.` + ); + return false; + } + const minimumVersion = config.minimumVersion; + if (codeql2 && minimumVersion) { + if (!await codeQlVersionAtLeast(codeql2, minimumVersion)) { + this.logger.debug( + `Feature ${feature} is disabled because the CodeQL CLI version is older than the minimum version ${minimumVersion}.` + ); + return false; + } else { + this.logger.debug( + `CodeQL CLI version ${(await codeql2.getVersion()).version} is newer than the minimum version ${minimumVersion} for feature ${feature}.` + ); + } + } + const toolsFeature = config.toolsFeature; + if (codeql2 && toolsFeature) { + if (!await codeql2.supportsFeature(toolsFeature)) { + this.logger.debug( + `Feature ${feature} is disabled because the CodeQL CLI version does not support the required tools feature ${toolsFeature}.` + ); + return false; + } else { + this.logger.debug( + `CodeQL CLI version ${(await codeql2.getVersion()).version} supports the required tools feature ${toolsFeature} for feature ${feature}.` + ); + } + } + if (envVar === "true") { + this.logger.debug( + `Feature ${feature} is enabled via the environment variable ${config.envVar}.` + ); + return true; + } + return void 0; + } + /** Gets the default value of `feature`. */ + async getDefaultValue(feature) { + const config = this.getFeatureConfig(feature); + const defaultValue = config.defaultValue; + this.logger.debug( + `Feature ${feature} is ${defaultValue ? "enabled" : "disabled"} due to its default value.` + ); + return defaultValue; + } +}; +var Features = class extends OfflineFeatures { + gitHubFeatureFlags; + constructor(repositoryNwo, tempDir, logger) { + super(logger); + this.gitHubFeatureFlags = new GitHubFeatureFlags( + repositoryNwo, + path5.join(tempDir, FEATURE_FLAGS_FILE_NAME), + logger + ); + } + async getDefaultCliVersion(variant) { + if (supportsFeatureFlags(variant)) { + return await this.gitHubFeatureFlags.getDefaultCliVersionFromFlags(); + } + return super.getDefaultCliVersion(variant); + } + /** + * + * @param feature The feature to check. + * @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the + * feature, the version of the CodeQL CLI will be checked against the minimum version. + * If the version is less than the minimum version, the feature will be considered + * disabled. If not provided, and a `minimumVersion` is specified for the feature, then + * this function will throw. + * @returns true if the feature is enabled, false otherwise. + * + * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. + */ + async getValue(feature, codeql2) { + const offlineValue = await this.getOfflineValue(feature, codeql2); + if (offlineValue !== void 0) { + return offlineValue; + } + const apiValue = await this.gitHubFeatureFlags.getValue(feature); + if (apiValue !== void 0) { + this.logger.debug( + `Feature ${feature} is ${apiValue ? "enabled" : "disabled"} via the GitHub API.` + ); + return apiValue; + } + return this.getDefaultValue(feature); } }; -function getCliConfigCategoryIfExists(cliError) { - for (const [category, configuration] of Object.entries(cliErrorsConfig)) { - if (cliError.exitCode !== void 0 && configuration.exitCode !== void 0 && cliError.exitCode === configuration.exitCode) { - return category; +var GitHubFeatureFlags = class { + constructor(repositoryNwo, featureFlagsFile, logger) { + this.repositoryNwo = repositoryNwo; + this.featureFlagsFile = featureFlagsFile; + this.logger = logger; + this.hasAccessedRemoteFeatureFlags = false; + } + cachedApiResponse; + // We cache whether the feature flags were accessed or not in order to accurately report whether flags were + // incorrectly configured vs. inaccessible in our telemetry. + hasAccessedRemoteFeatureFlags; + getCliVersionFromFeatureFlag(f) { + if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) || !f.endsWith(DEFAULT_VERSION_FEATURE_FLAG_SUFFIX)) { + return void 0; } - for (const e of configuration.cliErrorMessageCandidates) { - if (cliError.message.match(e) || cliError.stderr.match(e)) { - return category; + const version = f.substring( + DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length, + f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length + ).replace(/_/g, "."); + if (!semver5.valid(version)) { + this.logger.warning( + `Ignoring feature flag ${f} as it does not specify a valid CodeQL version.` + ); + return void 0; + } + return version; + } + async getDefaultCliVersionFromFlags() { + const response = await this.getAllFeatures(); + const enabledFeatureFlagCliVersions = Object.entries(response).map( + ([f, isEnabled]) => isEnabled ? this.getCliVersionFromFeatureFlag(f) : void 0 + ).filter((f) => f !== void 0); + if (enabledFeatureFlagCliVersions.length === 0) { + this.logger.warning( + `Feature flags do not specify a default CLI version. Falling back to the CLI version shipped with the Action. This is ${cliVersion}.` + ); + const result = { + cliVersion, + tagName: bundleVersion + }; + if (this.hasAccessedRemoteFeatureFlags) { + result.toolsFeatureFlagsValid = false; } + return result; } + const maxCliVersion = enabledFeatureFlagCliVersions.reduce( + (maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, + enabledFeatureFlagCliVersions[0] + ); + this.logger.debug( + `Derived default CLI version of ${maxCliVersion} from feature flags.` + ); + return { + cliVersion: maxCliVersion, + tagName: `codeql-bundle-v${maxCliVersion}`, + toolsFeatureFlagsValid: true + }; } - return void 0; + async getValue(feature) { + const response = await this.getAllFeatures(); + if (response === void 0) { + this.logger.debug(`No feature flags API response for ${feature}.`); + return void 0; + } + const features = response[feature]; + if (features === void 0) { + this.logger.debug(`Feature '${feature}' undefined in API response.`); + return void 0; + } + return !!features; + } + async getAllFeatures() { + if (this.cachedApiResponse !== void 0) { + return this.cachedApiResponse; + } + const fileFlags = await this.readLocalFlags(); + if (fileFlags !== void 0) { + this.cachedApiResponse = fileFlags; + return fileFlags; + } + let remoteFlags = await this.loadApiResponse(); + if (remoteFlags === void 0) { + remoteFlags = {}; + } + this.cachedApiResponse = remoteFlags; + await this.writeLocalFlags(remoteFlags); + return remoteFlags; + } + async readLocalFlags() { + try { + if (fs4.existsSync(this.featureFlagsFile)) { + this.logger.debug( + `Loading feature flags from ${this.featureFlagsFile}` + ); + return JSON.parse( + fs4.readFileSync(this.featureFlagsFile, "utf8") + ); + } + } catch (e) { + this.logger.warning( + `Error reading cached feature flags file ${this.featureFlagsFile}: ${e}. Requesting from GitHub instead.` + ); + } + return void 0; + } + async writeLocalFlags(flags) { + try { + this.logger.debug(`Writing feature flags to ${this.featureFlagsFile}`); + fs4.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); + } catch (e) { + this.logger.warning( + `Error writing cached feature flags file ${this.featureFlagsFile}: ${e}.` + ); + } + } + async loadApiResponse() { + try { + const featuresToRequest = Object.entries(featureConfig).filter( + ([, config]) => !config.legacyApi + ).map(([f]) => f); + const FEATURES_PER_REQUEST = 25; + const featureChunks = []; + while (featuresToRequest.length > 0) { + featureChunks.push(featuresToRequest.splice(0, FEATURES_PER_REQUEST)); + } + let remoteFlags = {}; + for (const chunk of featureChunks) { + const response = await getApiClient().request( + "GET /repos/:owner/:repo/code-scanning/codeql-action/features", + { + owner: this.repositoryNwo.owner, + repo: this.repositoryNwo.repo, + features: chunk.join(",") + } + ); + const chunkFlags = response.data; + remoteFlags = { ...remoteFlags, ...chunkFlags }; + } + this.logger.debug( + "Loaded the following default values for the feature flags from the CodeQL Action API:" + ); + for (const [feature, value] of Object.entries(remoteFlags).sort( + ([nameA], [nameB]) => nameA.localeCompare(nameB) + )) { + this.logger.debug(` ${feature}: ${value}`); + } + this.hasAccessedRemoteFeatureFlags = true; + return remoteFlags; + } catch (e) { + const httpError = asHTTPError(e); + if (httpError?.status === 403) { + this.logger.warning( + `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. As a result, it will not be opted into any experimental features. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` + ); + this.hasAccessedRemoteFeatureFlags = false; + return {}; + } else { + throw new Error( + `Encountered an error while trying to determine feature enablement: ${e}` + ); + } + } + } +}; +function supportsFeatureFlags(githubVariant) { + return githubVariant === "GitHub.com" /* DOTCOM */ || githubVariant === "GitHub Enterprise Cloud with data residency" /* GHEC_DR */; } -function isUnsupportedPlatform() { - return !SUPPORTED_PLATFORMS.some( - ([platform, arch2]) => platform === process.platform && arch2 === process.arch - ); +function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) { + if (!supportsFeatureFlags(gitHubVersion.type)) { + logger.debug( + "Not running against github.com. Using default values for all features." + ); + return new OfflineFeatures(logger); + } else { + return new Features(repositoryNwo, tempDir, logger); + } } -function getUnsupportedPlatformError(cliError) { - return new ConfigurationError( - `The CodeQL CLI does not support the platform/architecture combination of ${process.platform}/${process.arch} (see ${"https://codeql.github.com/docs/codeql-overview/system-requirements/" /* SYSTEM_REQUIREMENTS */}). The underlying error was: ${cliError.message}` + +// src/diff-informed-analysis-utils.ts +function getDiffRangesJsonFilePath() { + return path6.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs5.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs5.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` ); + return JSON.parse(jsonContents); } -function wrapCliConfigurationError(cliError) { - if (isUnsupportedPlatform()) { - return getUnsupportedPlatformError(cliError); + +// src/trap-caching.ts +var actionsCache2 = __toESM(require_cache5()); + +// src/config-utils.ts +var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4; +var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB * 1e6; +var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; +var OVERLAY_ANALYSIS_FEATURES = { + actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, + cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, + csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, + go: "overlay_analysis_go" /* OverlayAnalysisGo */, + java: "overlay_analysis_java" /* OverlayAnalysisJava */, + javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, + python: "overlay_analysis_python" /* OverlayAnalysisPython */, + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, + rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, + swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ +}; +var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { + actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, + cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, + csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, + go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, + java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, + javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, + python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, + rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, + swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ +}; +function getPathToParsedConfigFile(tempDir) { + return path7.join(tempDir, "config"); +} +async function getConfig(tempDir, logger) { + const configFile = getPathToParsedConfigFile(tempDir); + if (!fs6.existsSync(configFile)) { + return void 0; } - const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError); - if (cliConfigErrorCategory === void 0) { - return cliError; + const configString = fs6.readFileSync(configFile, "utf8"); + logger.debug("Loaded config:"); + logger.debug(configString); + const config = JSON.parse(configString); + if (config.version === void 0) { + throw new ConfigurationError( + `Loaded configuration file, but it does not contain the expected 'version' field.` + ); } - let errorMessageBuilder = cliError.message; - const additionalErrorMessageToAppend = cliErrorsConfig[cliConfigErrorCategory].additionalErrorMessageToAppend; - if (additionalErrorMessageToAppend !== void 0) { - errorMessageBuilder = `${errorMessageBuilder} ${additionalErrorMessageToAppend}`; + if (config.version !== getActionVersion()) { + throw new ConfigurationError( + `Loaded a configuration file for version '${config.version}', but running version '${getActionVersion()}'` + ); } - return new ConfigurationError(errorMessageBuilder); + return config; +} +function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { + const augmentedConfig = cloneObject(cliConfig); + if (extraQueryExclusions.length === 0) { + return augmentedConfig; + } + augmentedConfig["query-filters"] = [ + // Ordering matters. If the first filter is an inclusion, it implicitly + // excludes all queries that are not included. If it is an exclusion, + // it implicitly includes all queries that are not excluded. So user + // filters (if any) should always be first to preserve intent. + ...augmentedConfig["query-filters"] || [], + ...extraQueryExclusions + ]; + if (augmentedConfig["query-filters"]?.length === 0) { + delete augmentedConfig["query-filters"]; + } + return augmentedConfig; } // src/setup-codeql.ts @@ -108223,10 +108007,10 @@ function inferCompressionMethod(tarPath) { // src/tools-download.ts var fs8 = __toESM(require("fs")); -var os2 = __toESM(require("os")); +var os = __toESM(require("os")); var path8 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); -var core10 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -108280,10 +108064,10 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } } catch (e) { - core10.warning( + core9.warning( `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` ); - core10.warning(`Falling back to downloading the bundle before extracting.`); + core9.warning(`Falling back to downloading the bundle before extracting.`); await cleanUpPath(dest, "CodeQL bundle", logger); } const toolsDownloadStart = import_perf_hooks.performance.now(); @@ -108361,7 +108145,7 @@ function getToolcacheDirectory(version) { getRequiredEnvParam("RUNNER_TOOL_CACHE"), TOOLCACHE_TOOL_NAME, semver7.clean(version) || version, - os2.arch() || "" + os.arch() || "" ); } function writeToolcacheMarkerFile(extractedPath, logger) { @@ -108948,14 +108732,14 @@ function isReservedToolsValue(tools) { } // src/tracer-config.ts -async function shouldEnableIndirectTracing(codeql, config) { +async function shouldEnableIndirectTracing(codeql2, config) { if (config.buildMode === "none" /* None */) { return false; } if (config.buildMode === "autobuild" /* Autobuild */) { return false; } - return asyncSome(config.languages, (l) => codeql.isTracedLanguage(l)); + return asyncSome(config.languages, (l) => codeql2.isTracedLanguage(l)); } // src/codeql.ts @@ -109021,7 +108805,7 @@ async function getCodeQL(cmd) { return cachedCodeQL; } async function getCodeQLForCmd(cmd, checkVersion) { - const codeql = { + const codeql2 = { getPath() { return cmd; }, @@ -109064,7 +108848,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); - if (await shouldEnableIndirectTracing(codeql, config)) { + if (await shouldEnableIndirectTracing(codeql2, config)) { extraArgs.push("--begin-tracing"); extraArgs.push(...await getTrapCachingExtractorConfigArgs(config)); extraArgs.push(`--trace-process-name=${processName}`); @@ -109458,20 +109242,20 @@ ${output}` await runCli(cmd, args); } }; - if (checkVersion && !await codeQlVersionAtLeast(codeql, CODEQL_MINIMUM_VERSION)) { + if (checkVersion && !await codeQlVersionAtLeast(codeql2, CODEQL_MINIMUM_VERSION)) { throw new ConfigurationError( - `Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${(await codeql.getVersion()).version}` + `Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${(await codeql2.getVersion()).version}` ); - } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { - const result = await codeql.getVersion(); - core11.warning( + } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql2, CODEQL_NEXT_MINIMUM_VERSION)) { + const result = await codeql2.getVersion(); + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core11.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } - return codeql; + return codeql2; } function getExtraOptionsFromEnv(paths, { ignoringOptions } = {}) { const options = getExtraOptionsEnvParam(); @@ -109567,13 +109351,231 @@ function applyAutobuildAzurePipelinesTimeoutFix() { "-Dmaven.wagon.http.pool=false" ].join(" "); } -async function getJobRunUuidSarifOptions(codeql) { +async function getJobRunUuidSarifOptions(codeql2) { const jobRunUuid = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */]; - return jobRunUuid && await codeql.supportsFeature( + return jobRunUuid && await codeql2.supportsFeature( "databaseInterpretResultsSupportsSarifRunProperty" /* DatabaseInterpretResultsSupportsSarifRunProperty */ ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } +// src/status-report.ts +var os2 = __toESM(require("os")); +var core11 = __toESM(require_core()); +function isFirstPartyAnalysis(actionName) { + if (actionName !== "upload-sarif" /* UploadSarif */) { + return true; + } + return process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] === "true"; +} +function isThirdPartyAnalysis(actionName) { + return !isFirstPartyAnalysis(actionName); +} +function getActionsStatus(error3, otherFailureCause) { + if (error3 || otherFailureCause) { + return error3 instanceof ConfigurationError ? "user-error" : "failure"; + } else { + return "success"; + } +} +function setJobStatusIfUnsuccessful(actionStatus) { + if (actionStatus === "user-error") { + core11.exportVariable( + "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, + process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ + ); + } else if (actionStatus === "failure" || actionStatus === "aborted") { + core11.exportVariable( + "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, + process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ + ); + } +} +async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception2) { + try { + const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || ""; + const ref = await getRef(); + const jobRunUUID = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */] || ""; + const workflowRunID = getWorkflowRunID(); + const workflowRunAttempt = getWorkflowRunAttempt(); + const workflowName = process.env["GITHUB_WORKFLOW"] || ""; + const jobName = process.env["GITHUB_JOB"] || ""; + const analysis_key = await getAnalysisKey(); + let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; + if (workflowStartedAt === void 0) { + workflowStartedAt = actionStartedAt.toISOString(); + core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + } + const runnerOs = getRequiredEnvParam("RUNNER_OS"); + const codeQlCliVersion = getCachedCodeQlVersion(); + const actionRef = process.env["GITHUB_ACTION_REF"] || ""; + const testingEnvironment = getTestingEnvironment(); + if (testingEnvironment) { + core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + } + const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; + const statusReport = { + action_name: actionName, + action_oid: "unknown", + // TODO decide if it's possible to fill this in + action_ref: actionRef, + action_started_at: actionStartedAt.toISOString(), + action_version: getActionVersion(), + analysis_kinds: config?.analysisKinds?.join(","), + analysis_key, + build_mode: config?.buildMode, + commit_oid: commitOid, + first_party_analysis: isFirstPartyAnalysis(actionName), + job_name: jobName, + job_run_uuid: jobRunUUID, + ref, + runner_os: runnerOs, + started_at: workflowStartedAt, + status, + steady_state_default_setup: isSteadyStateDefaultSetupRun, + testing_environment: testingEnvironment || "", + workflow_name: workflowName, + workflow_run_attempt: workflowRunAttempt, + workflow_run_id: workflowRunID + }; + try { + statusReport.actions_event_name = getWorkflowEventName(); + } catch (e) { + logger.warning( + `Could not determine the workflow event name: ${getErrorMessage(e)}.` + ); + } + if (config) { + statusReport.languages = config.languages?.join(","); + } + if (diskInfo) { + statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes; + statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes; + } + if (cause) { + statusReport.cause = cause; + } + if (exception2) { + statusReport.exception = exception2; + } + if (status === "success" || status === "failure" || status === "aborted" || status === "user-error") { + statusReport.completed_at = (/* @__PURE__ */ new Date()).toISOString(); + } + const matrix = getRequiredInput("matrix"); + if (matrix) { + statusReport.matrix_vars = matrix; + } + if ("RUNNER_ARCH" in process.env) { + statusReport.runner_arch = process.env["RUNNER_ARCH"]; + } + if (!(runnerOs === "Linux" && isSelfHostedRunner())) { + statusReport.runner_os_release = os2.release(); + } + if (codeQlCliVersion !== void 0) { + statusReport.codeql_version = codeQlCliVersion.version; + } + const imageVersion = process.env["ImageVersion"]; + if (imageVersion) { + statusReport.runner_image_version = imageVersion; + } + return statusReport; + } catch (e) { + logger.warning( + `Failed to gather information for telemetry: ${getErrorMessage(e)}. Will skip sending status report.` + ); + if (isInTestMode()) { + throw e; + } + return void 0; + } +} +var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`."; +var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`."; +async function sendStatusReport(statusReport) { + setJobStatusIfUnsuccessful(statusReport.status); + const statusReportJSON = JSON.stringify(statusReport); + core11.debug(`Sending status report: ${statusReportJSON}`); + if (isInTestMode()) { + core11.debug("In test mode. Status reports are not uploaded."); + return; + } + const nwo = getRepositoryNwo(); + const client = getApiClient(); + try { + await client.request( + "PUT /repos/:owner/:repo/code-scanning/analysis/status", + { + owner: nwo.owner, + repo: nwo.repo, + data: statusReportJSON + } + ); + } catch (e) { + const httpError = asHTTPError(e); + if (httpError !== void 0) { + switch (httpError.status) { + case 403: + if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { + core11.warning( + `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` + ); + } else { + core11.warning( + `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` + ); + } + return; + case 404: + core11.warning(httpError.message); + return; + case 422: + if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { + core11.debug(INCOMPATIBLE_MSG); + } else { + core11.debug(OUT_OF_DATE_MSG); + } + return; + } + } + core11.warning( + `An unexpected error occurred when sending a status report: ${getErrorMessage( + e + )}` + ); + } +} +async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error3, logger) { + try { + const statusReport = await createStatusReportBase( + actionName, + "failure", + actionStartedAt, + void 0, + void 0, + logger, + `Unhandled CodeQL Action error: ${getErrorMessage(error3)}`, + error3 instanceof Error ? error3.stack : void 0 + ); + if (statusReport !== void 0) { + await sendStatusReport(statusReport); + } + } catch (e) { + logger.warning( + `Failed to send the unhandled error status report: ${getErrorMessage(e)}.` + ); + if (isInTestMode()) { + throw e; + } + } +} + +// src/upload-lib.ts +var fs12 = __toESM(require("fs")); +var path12 = __toESM(require("path")); +var url = __toESM(require("url")); +var import_zlib = __toESM(require("zlib")); +var core12 = __toESM(require_core()); +var jsonschema2 = __toESM(require_lib2()); + // src/fingerprints.ts var fs11 = __toESM(require("fs")); var import_path2 = __toESM(require("path")); @@ -110702,7 +110704,7 @@ var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { - codeql, + codeql: codeql2, toolsDownloadStatusReport, toolsSource, toolsVersion, @@ -110717,10 +110719,10 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe logger, true ); - await codeql.printVersion(); + await codeql2.printVersion(); logger.endGroup(); return { - codeql, + codeql: codeql2, toolsDownloadStatusReport, toolsSource, toolsVersion, @@ -110834,7 +110836,7 @@ async function minimalInitCodeQL(logger, gitHubVersion, features) { ); return initCodeQLResult.codeql; } -async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { +async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, _features, logger, getCodeQL2, tempDir) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { return JSON.parse(fs12.readFileSync(sarifFile, "utf8")); @@ -110857,15 +110859,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo } return combineSarifFiles(sarifFiles, logger); } - let codeQL; - let tempDir = getTemporaryDirectory(); - const config = await getConfig(tempDir, logger); - if (config !== void 0) { - codeQL = await getCodeQL(config.codeQLCmd); - tempDir = config.tempDir; - } else { - codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); - } + const codeQL = await getCodeQL2(); const baseTempDir = path12.resolve(tempDir, "combined-sarif"); fs12.mkdirSync(baseTempDir, { recursive: true }); const outputDirectory = fs12.mkdtempSync(path12.resolve(baseTempDir, "output-")); @@ -111099,7 +111093,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo } return payloadObj; } -async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { +async function postProcessSarifFiles(logger, features, getCodeQL2, tempPath, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); let sarif; @@ -111113,7 +111107,9 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, sarifPaths, gitHubVersion, features, - logger + logger, + getCodeQL2, + tempPath ); } else { const sarifPath = sarifPaths[0]; @@ -111362,7 +111358,7 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/upload-sarif.ts -async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { +async function postProcessAndUploadSarif(logger, tempPath, features, getCodeQL2, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { const sarifGroups = await getGroupedSarifFilePaths( logger, sarifPath @@ -111375,6 +111371,8 @@ async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutP const postProcessingResults = await postProcessSarifFiles( logger, features, + getCodeQL2, + tempPath, checkoutPath, sarifFiles, category, @@ -111416,6 +111414,20 @@ async function sendSuccessStatusReport(startedAt, uploadStats, logger) { await sendStatusReport(statusReport); } } +var codeql; +async function getOrInitCodeQL(logger, gitHubVersion, features, config) { + if (codeql !== void 0) return codeql; + if (config !== void 0) { + codeql = await getCodeQL(config.codeQLCmd); + } else { + codeql = await minimalInitCodeQL( + logger, + gitHubVersion, + features + ); + } + return codeql; +} async function run(startedAt) { const logger = getActionsLogger(); try { @@ -111444,9 +111456,16 @@ async function run(startedAt) { const sarifPath = getRequiredInput("sarif_file"); const checkoutPath = getRequiredInput("checkout_path"); const category = getOptionalInput("category"); + let tempDir = getTemporaryDirectory(); + const config = await getConfig(tempDir, logger); + if (config !== void 0) { + tempDir = config.tempDir; + } const uploadResults = await postProcessAndUploadSarif( logger, + tempDir, features, + () => getOrInitCodeQL(logger, gitHubVersion, features, config), "always", checkoutPath, sarifPath, diff --git a/src/analyze-action.ts b/src/analyze-action.ts index 4093e37869..e8267fc0a0 100644 --- a/src/analyze-action.ts +++ b/src/analyze-action.ts @@ -363,7 +363,9 @@ async function run(startedAt: Date) { uploadResults = await postProcessAndUploadSarif( logger, + config.tempDir, features, + async () => codeql, uploadKind, checkoutPath, outputDir, diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index b0afb9b8b9..da252db197 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -424,6 +424,8 @@ async function testFailedSarifUpload( } t.true( uploadFiles.calledOnceWith( + sinon.match.string, + codeqlObject, sinon.match.string, sinon.match.string, category, diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 28637f98c6..f3c9e38e74 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -104,6 +104,8 @@ async function maybeUploadFailedSarif( logger.info(`Uploading failed SARIF file ${sarifFile}`); const uploadResult = await uploadLib.uploadFiles( + config.tempDir, + codeql, sarifFile, checkoutPath, category, diff --git a/src/upload-lib.ts b/src/upload-lib.ts index 2c58beb2b7..4c4fd24c93 100644 --- a/src/upload-lib.ts +++ b/src/upload-lib.ts @@ -11,8 +11,7 @@ import * as actionsUtil from "./actions-util"; import * as analyses from "./analyses"; import * as api from "./api-client"; import { getGitHubVersion, wrapApiConfigurationError } from "./api-client"; -import { getCodeQL, type CodeQL } from "./codeql"; -import { getConfig } from "./config-utils"; +import { type CodeQL } from "./codeql"; import { readDiffRangesJsonFile } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { FeatureEnablement } from "./feature-flags"; @@ -219,6 +218,8 @@ export async function minimalInitCodeQL( return initCodeQLResult.codeql; } +export type CodeQLGetter = () => Promise; + // Takes a list of paths to sarif files and combines them together using the // CLI `github merge-results` command when all SARIF files are produced by // CodeQL. Otherwise, it will fall back to combining the files in the action. @@ -226,8 +227,10 @@ export async function minimalInitCodeQL( async function combineSarifFilesUsingCLI( sarifFiles: string[], gitHubVersion: GitHubVersion, - features: FeatureEnablement, + _features: FeatureEnablement, logger: Logger, + getCodeQL: CodeQLGetter, + tempDir: string, ): Promise { logger.info("Combining SARIF files using the CodeQL CLI"); @@ -265,18 +268,10 @@ async function combineSarifFilesUsingCLI( return combineSarifFiles(sarifFiles, logger); } - // Initialize CodeQL, either by using the config file from the 'init' step, - // or by initializing it here. - let codeQL: CodeQL; - let tempDir: string = actionsUtil.getTemporaryDirectory(); - - const config = await getConfig(tempDir, logger); - if (config !== undefined) { - codeQL = await getCodeQL(config.codeQLCmd); - tempDir = config.tempDir; - } else { - codeQL = await minimalInitCodeQL(logger, gitHubVersion, features); - } + // Obtain a `CodeQL` instance. For `analyze`, this is typically the instance that was used for running the queries. + // For `upload-sarif`, this either initialises a new instance or returns a previously initialised one if `getCodeQL` + // is called more than once. + const codeQL: CodeQL = await getCodeQL(); const baseTempDir = path.resolve(tempDir, "combined-sarif"); fs.mkdirSync(baseTempDir, { recursive: true }); @@ -682,6 +677,8 @@ export interface PostProcessingResults { * * @param logger The logger to use. * @param features Information about enabled features. + * @param getCodeQL A function to retrieve a `CodeQL` instance. + * @param tempPath A path to a temporary directory. * @param checkoutPath The path where the repo was checked out at. * @param sarifPaths The paths of the SARIF files to post-process. * @param category The analysis category. @@ -693,6 +690,8 @@ export interface PostProcessingResults { export async function postProcessSarifFiles( logger: Logger, features: FeatureEnablement, + getCodeQL: CodeQLGetter, + tempPath: string, checkoutPath: string, sarifPaths: string[], category: string | undefined, @@ -717,6 +716,8 @@ export async function postProcessSarifFiles( gitHubVersion, features, logger, + getCodeQL, + tempPath, ); } else { const sarifPath = sarifPaths[0]; @@ -777,6 +778,8 @@ export async function writePostProcessedFiles( * to. */ export async function uploadFiles( + tempDir: string, + codeql: CodeQL, inputSarifPath: string, checkoutPath: string, category: string | undefined, @@ -790,6 +793,8 @@ export async function uploadFiles( ); return uploadSpecifiedFiles( + tempDir, + codeql, sarifPaths, checkoutPath, category, @@ -803,6 +808,8 @@ export async function uploadFiles( * Uploads the given array of SARIF files. */ async function uploadSpecifiedFiles( + tempDir: string, + codeql: CodeQL, sarifPaths: string[], checkoutPath: string, category: string | undefined, @@ -813,6 +820,8 @@ async function uploadSpecifiedFiles( const processingResults: PostProcessingResults = await postProcessSarifFiles( logger, features, + async () => codeql, + tempDir, checkoutPath, sarifPaths, category, diff --git a/src/upload-sarif-action.ts b/src/upload-sarif-action.ts index cec41b2766..3209a2dde7 100644 --- a/src/upload-sarif-action.ts +++ b/src/upload-sarif-action.ts @@ -4,7 +4,9 @@ import * as actionsUtil from "./actions-util"; import { getActionVersion, getTemporaryDirectory } from "./actions-util"; import * as analyses from "./analyses"; import { getGitHubVersion } from "./api-client"; -import { initFeatures } from "./feature-flags"; +import { CodeQL, getCodeQL } from "./codeql"; +import { Config, getConfig } from "./config-utils"; +import { FeatureEnablement, initFeatures } from "./feature-flags"; import { Logger, getActionsLogger } from "./logging"; import { getRepositoryNwo } from "./repository"; import { @@ -20,6 +22,7 @@ import * as upload_lib from "./upload-lib"; import { postProcessAndUploadSarif } from "./upload-sarif"; import { ConfigurationError, + GitHubVersion, checkActionVersion, checkDiskUsage, getErrorMessage, @@ -54,6 +57,35 @@ async function sendSuccessStatusReport( } } +/** The cached `CodeQL` instance, if any. */ +let codeql: CodeQL | undefined; + +/** Get or initialise a `CodeQL` instance for use by the `upload-sarif` action. */ +async function getOrInitCodeQL( + logger: Logger, + gitHubVersion: GitHubVersion, + features: FeatureEnablement, + config: Config | undefined, +): Promise { + // Return the cached instance, if we have one. + if (codeql !== undefined) return codeql; + + // If we have been able to load a `Config` from an earlier CodeQL Action step in the job, + // then use the CodeQL executable that we have used previously. Otherwise, initialise the + // CLI specifically for `upload-sarif`. Either way, we cache the instance. + if (config !== undefined) { + codeql = await getCodeQL(config.codeQLCmd); + } else { + codeql = await upload_lib.minimalInitCodeQL( + logger, + gitHubVersion, + features, + ); + } + + return codeql; +} + async function run(startedAt: Date) { // To capture errors appropriately, keep as much code within the try-catch as // possible, and only use safe functions outside. @@ -94,9 +126,20 @@ async function run(startedAt: Date) { const checkoutPath = actionsUtil.getRequiredInput("checkout_path"); const category = actionsUtil.getOptionalInput("category"); + // Determine the temporary directory to use. If we are able to read a `Config` from a previous CodeQL Action + // step in the job, then use the temporary directory configured there. Otherwise, use our default. + let tempDir: string = actionsUtil.getTemporaryDirectory(); + + const config = await getConfig(tempDir, logger); + if (config !== undefined) { + tempDir = config.tempDir; + } + const uploadResults = await postProcessAndUploadSarif( logger, + tempDir, features, + () => getOrInitCodeQL(logger, gitHubVersion, features, config), "always", checkoutPath, sarifPath, diff --git a/src/upload-sarif.test.ts b/src/upload-sarif.test.ts index d32c0c0312..601dbfd46d 100644 --- a/src/upload-sarif.test.ts +++ b/src/upload-sarif.test.ts @@ -5,6 +5,7 @@ import test, { ExecutionContext } from "ava"; import * as sinon from "sinon"; import { AnalysisKind, getAnalysisConfig } from "./analyses"; +import { getCodeQLForTesting } from "./codeql"; import { getRunnerLogger } from "./logging"; import { createFeatures, setupTests } from "./testing-utils"; import { UploadResult } from "./upload-lib"; @@ -31,6 +32,8 @@ function mockPostProcessSarifFiles() { sinon.match.any, sinon.match.any, sinon.match.any, + sinon.match.any, + sinon.match.any, analysisConfig, ) .resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" }); @@ -73,7 +76,9 @@ const postProcessAndUploadSarifMacro = test.macro({ const actual = await postProcessAndUploadSarif( logger, + tempDir, features, + async () => getCodeQLForTesting(), "always", "", testPath, @@ -90,6 +95,8 @@ const postProcessAndUploadSarifMacro = test.macro({ postProcessSarifFiles.calledWith( logger, features, + sinon.match.func, + tempDir, sinon.match.any, analysisKindResult.expectedFiles?.map(toFullPath) ?? fullSarifPaths, @@ -221,7 +228,9 @@ test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) const actual = await postProcessAndUploadSarif( logger, + tempDir, features, + () => getCodeQLForTesting(), "never", "", tempDir, @@ -248,7 +257,9 @@ test("postProcessAndUploadSarif writes post-processed SARIF files if output dire const postProcessedOutPath = path.join(tempDir, "post-processed"); const actual = await postProcessAndUploadSarif( logger, + tempDir, features, + () => getCodeQLForTesting(), "never", "", tempDir, diff --git a/src/upload-sarif.ts b/src/upload-sarif.ts index bc2c886982..372b2d1f47 100644 --- a/src/upload-sarif.ts +++ b/src/upload-sarif.ts @@ -14,7 +14,9 @@ export type UploadSarifResults = Partial< * Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services. * * @param logger The logger to use. + * @param tempPath The path to the temporary directory. * @param features Information about enabled features. + * @param getCodeQL A function to retrieve a `CodeQL` instance. * @param uploadKind The kind of upload that is requested. * @param checkoutPath The path where the repository was checked out at. * @param sarifPath The path to the file or directory to upload. @@ -25,7 +27,9 @@ export type UploadSarifResults = Partial< */ export async function postProcessAndUploadSarif( logger: Logger, + tempPath: string, features: FeatureEnablement, + getCodeQL: upload_lib.CodeQLGetter, uploadKind: UploadKind, checkoutPath: string, sarifPath: string, @@ -45,6 +49,8 @@ export async function postProcessAndUploadSarif( const postProcessingResults = await upload_lib.postProcessSarifFiles( logger, features, + getCodeQL, + tempPath, checkoutPath, sarifFiles, category, From b30d90c4969cc299c54db65963f16ea099c1614e Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 19:53:44 +0000 Subject: [PATCH 3/6] Tidy up imports --- src/upload-sarif-action.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/upload-sarif-action.ts b/src/upload-sarif-action.ts index 3209a2dde7..737396dd62 100644 --- a/src/upload-sarif-action.ts +++ b/src/upload-sarif-action.ts @@ -1,7 +1,6 @@ import * as core from "@actions/core"; import * as actionsUtil from "./actions-util"; -import { getActionVersion, getTemporaryDirectory } from "./actions-util"; import * as analyses from "./analyses"; import { getGitHubVersion } from "./api-client"; import { CodeQL, getCodeQL } from "./codeql"; @@ -93,10 +92,10 @@ async function run(startedAt: Date) { const logger = getActionsLogger(); try { - initializeEnvironment(getActionVersion()); + initializeEnvironment(actionsUtil.getActionVersion()); const gitHubVersion = await getGitHubVersion(); - checkActionVersion(getActionVersion(), gitHubVersion); + checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion); // Make inputs accessible in the `post` step. actionsUtil.persistInputs(); @@ -105,7 +104,7 @@ async function run(startedAt: Date) { const features = initFeatures( gitHubVersion, repositoryNwo, - getTemporaryDirectory(), + actionsUtil.getTemporaryDirectory(), logger, ); From 37f3bfc967a946fefc220c3c9e48e57b5599a390 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 19:59:56 +0000 Subject: [PATCH 4/6] Move `getOrInitCodeQL` to `upload-sarif` --- lib/upload-sarif-action.js | 5550 ++++++++++++++++++------------------ src/upload-sarif-action.ts | 37 +- src/upload-sarif.ts | 34 +- 3 files changed, 2812 insertions(+), 2809 deletions(-) diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 3681b9d847..d36868fa24 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -105887,8 +105887,8 @@ function cacheCodeQlVersion(version) { function getCachedCodeQlVersion() { return cachedCodeQlVersion; } -async function codeQlVersionAtLeast(codeql2, requiredVersion) { - return semver.gte((await codeql2.getVersion()).version, requiredVersion); +async function codeQlVersionAtLeast(codeql, requiredVersion) { + return semver.gte((await codeql.getVersion()).version, requiredVersion); } function getBaseDatabaseOidsFilePath(config) { return path.join(config.dbLocation, BASE_DATABASE_OIDS_FILE_NAME); @@ -106500,254 +106500,6 @@ function wrapApiConfigurationError(e) { return e; } -// src/codeql.ts -var fs10 = __toESM(require("fs")); -var path10 = __toESM(require("path")); -var core10 = __toESM(require_core()); -var toolrunner3 = __toESM(require_toolrunner()); - -// src/cli-errors.ts -var SUPPORTED_PLATFORMS = [ - ["linux", "x64"], - ["win32", "x64"], - ["darwin", "x64"], - ["darwin", "arm64"] -]; -var CliError = class extends Error { - exitCode; - stderr; - constructor({ cmd, args, exitCode, stderr }) { - const prettyCommand = prettyPrintInvocation(cmd, args); - const fatalErrors = extractFatalErrors(stderr); - const autobuildErrors = extractAutobuildErrors(stderr); - let message; - if (fatalErrors) { - message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and error was: ${ensureEndsInPeriod( - fatalErrors.trim() - )} See the logs for more details.`; - } else if (autobuildErrors) { - message = `We were unable to automatically build your code. Please provide manual build steps. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information. Encountered the following error: ${autobuildErrors}`; - } else { - const lastLine = ensureEndsInPeriod( - stderr.trim().split("\n").pop()?.trim() || "n/a" - ); - message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`; - } - super(message); - this.exitCode = exitCode; - this.stderr = stderr; - } -}; -function extractFatalErrors(error3) { - const fatalErrorRegex = /.*fatal (internal )?error occurr?ed(. Details)?:/gi; - let fatalErrors = []; - let lastFatalErrorIndex; - let match; - while ((match = fatalErrorRegex.exec(error3)) !== null) { - if (lastFatalErrorIndex !== void 0) { - fatalErrors.push(error3.slice(lastFatalErrorIndex, match.index).trim()); - } - lastFatalErrorIndex = match.index; - } - if (lastFatalErrorIndex !== void 0) { - const lastError = error3.slice(lastFatalErrorIndex).trim(); - if (fatalErrors.length === 0) { - return lastError; - } - const isOneLiner = !fatalErrors.some((e) => e.includes("\n")); - if (isOneLiner) { - fatalErrors = fatalErrors.map(ensureEndsInPeriod); - } - return [ - ensureEndsInPeriod(lastError), - "Context:", - ...fatalErrors.reverse() - ].join(isOneLiner ? " " : "\n"); - } - return void 0; -} -function extractAutobuildErrors(error3) { - const pattern = /.*\[autobuild\] \[ERROR\] (.*)/gi; - let errorLines = [...error3.matchAll(pattern)].map((match) => match[1]); - if (errorLines.length > 10) { - errorLines = errorLines.slice(0, 10); - errorLines.push("(truncated)"); - } - return errorLines.join("\n") || void 0; -} -var cliErrorsConfig = { - ["AutobuildError" /* AutobuildError */]: { - cliErrorMessageCandidates: [ - new RegExp("We were unable to automatically build your code") - ] - }, - ["CouldNotCreateTempDir" /* CouldNotCreateTempDir */]: { - cliErrorMessageCandidates: [new RegExp("Could not create temp directory")] - }, - ["ExternalRepositoryCloneFailed" /* ExternalRepositoryCloneFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("Failed to clone external Git repository") - ] - }, - ["GradleBuildFailed" /* GradleBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception.") - ] - }, - // Version of CodeQL CLI is incompatible with this version of the CodeQL Action - ["IncompatibleWithActionVersion" /* IncompatibleWithActionVersion */]: { - cliErrorMessageCandidates: [ - new RegExp("is not compatible with this CodeQL CLI") - ] - }, - ["InitCalledTwice" /* InitCalledTwice */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Refusing to create databases .* but could not process any of it" - ) - ], - additionalErrorMessageToAppend: `Is the "init" action called twice in the same job?` - }, - ["InvalidConfigFile" /* InvalidConfigFile */]: { - cliErrorMessageCandidates: [ - new RegExp("Config file .* is not valid"), - new RegExp("The supplied config file is empty") - ] - }, - ["InvalidExternalRepoSpecifier" /* InvalidExternalRepoSpecifier */]: { - cliErrorMessageCandidates: [ - new RegExp("Specifier for external repository is invalid") - ] - }, - // Expected source location for database creation does not exist - ["InvalidSourceRoot" /* InvalidSourceRoot */]: { - cliErrorMessageCandidates: [new RegExp("Invalid source root")] - }, - ["MavenBuildFailed" /* MavenBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp("\\[autobuild\\] \\[ERROR\\] Failed to execute goal") - ] - }, - ["NoBuildCommandAutodetected" /* NoBuildCommandAutodetected */]: { - cliErrorMessageCandidates: [ - new RegExp("Could not auto-detect a suitable build method") - ] - }, - ["NoBuildMethodAutodetected" /* NoBuildMethodAutodetected */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Could not detect a suitable build command for the source checkout" - ) - ] - }, - // Usually when a manual build script has failed, or if an autodetected language - // was unintended to have CodeQL analysis run on it. - ["NoSourceCodeSeen" /* NoSourceCodeSeen */]: { - exitCode: 32, - cliErrorMessageCandidates: [ - new RegExp( - "CodeQL detected code written in .* but could not process any of it" - ), - new RegExp( - "CodeQL did not detect any code written in languages supported by CodeQL" - ) - ] - }, - ["NoSupportedBuildCommandSucceeded" /* NoSupportedBuildCommandSucceeded */]: { - cliErrorMessageCandidates: [ - new RegExp("No supported build command succeeded") - ] - }, - ["NoSupportedBuildSystemDetected" /* NoSupportedBuildSystemDetected */]: { - cliErrorMessageCandidates: [ - new RegExp("No supported build system detected") - ] - }, - ["OutOfMemoryOrDisk" /* OutOfMemoryOrDisk */]: { - cliErrorMessageCandidates: [ - new RegExp("CodeQL is out of memory."), - new RegExp("out of disk"), - new RegExp("No space left on device") - ], - additionalErrorMessageToAppend: "For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory" - }, - ["PackCannotBeFound" /* PackCannotBeFound */]: { - cliErrorMessageCandidates: [ - new RegExp( - "Query pack .* cannot be found\\. Check the spelling of the pack\\." - ), - new RegExp( - "is not a .ql file, .qls file, a directory, or a query pack specification." - ) - ] - }, - ["PackMissingAuth" /* PackMissingAuth */]: { - cliErrorMessageCandidates: [ - new RegExp("GitHub Container registry .* 403 Forbidden"), - new RegExp( - "Do you need to specify a token to authenticate to the registry?" - ) - ] - }, - ["SwiftBuildFailed" /* SwiftBuildFailed */]: { - cliErrorMessageCandidates: [ - new RegExp( - "\\[autobuilder/build\\] \\[build-command-failed\\] `autobuild` failed to run the build command" - ) - ] - }, - ["UnsupportedBuildMode" /* UnsupportedBuildMode */]: { - cliErrorMessageCandidates: [ - new RegExp( - "does not support the .* build mode. Please try using one of the following build modes instead" - ) - ] - }, - ["NotFoundInRegistry" /* NotFoundInRegistry */]: { - cliErrorMessageCandidates: [ - new RegExp("'.*' not found in the registry '.*'") - ] - } -}; -function getCliConfigCategoryIfExists(cliError) { - for (const [category, configuration] of Object.entries(cliErrorsConfig)) { - if (cliError.exitCode !== void 0 && configuration.exitCode !== void 0 && cliError.exitCode === configuration.exitCode) { - return category; - } - for (const e of configuration.cliErrorMessageCandidates) { - if (cliError.message.match(e) || cliError.stderr.match(e)) { - return category; - } - } - } - return void 0; -} -function isUnsupportedPlatform() { - return !SUPPORTED_PLATFORMS.some( - ([platform, arch2]) => platform === process.platform && arch2 === process.arch - ); -} -function getUnsupportedPlatformError(cliError) { - return new ConfigurationError( - `The CodeQL CLI does not support the platform/architecture combination of ${process.platform}/${process.arch} (see ${"https://codeql.github.com/docs/codeql-overview/system-requirements/" /* SYSTEM_REQUIREMENTS */}). The underlying error was: ${cliError.message}` - ); -} -function wrapCliConfigurationError(cliError) { - if (isUnsupportedPlatform()) { - return getUnsupportedPlatformError(cliError); - } - const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError); - if (cliConfigErrorCategory === void 0) { - return cliError; - } - let errorMessageBuilder = cliError.message; - const additionalErrorMessageToAppend = cliErrorsConfig[cliConfigErrorCategory].additionalErrorMessageToAppend; - if (additionalErrorMessageToAppend !== void 0) { - errorMessageBuilder = `${errorMessageBuilder} ${additionalErrorMessageToAppend}`; - } - return new ConfigurationError(errorMessageBuilder); -} - // src/config-utils.ts var fs6 = __toESM(require("fs")); var path7 = __toESM(require("path")); @@ -107404,8 +107156,8 @@ var OfflineFeatures = class { * * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. */ - async getValue(feature, codeql2) { - const offlineValue = await this.getOfflineValue(feature, codeql2); + async getValue(feature, codeql) { + const offlineValue = await this.getOfflineValue(feature, codeql); if (offlineValue !== void 0) { return offlineValue; } @@ -107414,14 +107166,14 @@ var OfflineFeatures = class { /** * Determines whether `feature` is enabled using the CLI and environment variables. */ - async getOfflineValue(feature, codeql2) { + async getOfflineValue(feature, codeql) { const config = this.getFeatureConfig(feature); - if (!codeql2 && config.minimumVersion) { + if (!codeql && config.minimumVersion) { throw new Error( `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.` ); } - if (!codeql2 && config.toolsFeature) { + if (!codeql && config.toolsFeature) { throw new Error( `Internal error: A required tools feature is specified for feature ${feature}, but no instance of CodeQL was provided.` ); @@ -107434,28 +107186,28 @@ var OfflineFeatures = class { return false; } const minimumVersion = config.minimumVersion; - if (codeql2 && minimumVersion) { - if (!await codeQlVersionAtLeast(codeql2, minimumVersion)) { + if (codeql && minimumVersion) { + if (!await codeQlVersionAtLeast(codeql, minimumVersion)) { this.logger.debug( `Feature ${feature} is disabled because the CodeQL CLI version is older than the minimum version ${minimumVersion}.` ); return false; } else { this.logger.debug( - `CodeQL CLI version ${(await codeql2.getVersion()).version} is newer than the minimum version ${minimumVersion} for feature ${feature}.` + `CodeQL CLI version ${(await codeql.getVersion()).version} is newer than the minimum version ${minimumVersion} for feature ${feature}.` ); } } const toolsFeature = config.toolsFeature; - if (codeql2 && toolsFeature) { - if (!await codeql2.supportsFeature(toolsFeature)) { + if (codeql && toolsFeature) { + if (!await codeql.supportsFeature(toolsFeature)) { this.logger.debug( `Feature ${feature} is disabled because the CodeQL CLI version does not support the required tools feature ${toolsFeature}.` ); return false; } else { this.logger.debug( - `CodeQL CLI version ${(await codeql2.getVersion()).version} supports the required tools feature ${toolsFeature} for feature ${feature}.` + `CodeQL CLI version ${(await codeql.getVersion()).version} supports the required tools feature ${toolsFeature} for feature ${feature}.` ); } } @@ -107505,8 +107257,8 @@ var Features = class extends OfflineFeatures { * * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. */ - async getValue(feature, codeql2) { - const offlineValue = await this.getOfflineValue(feature, codeql2); + async getValue(feature, codeql) { + const offlineValue = await this.getOfflineValue(feature, codeql); if (offlineValue !== void 0) { return offlineValue; } @@ -107795,1789 +107547,213 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { return augmentedConfig; } -// src/setup-codeql.ts -var fs9 = __toESM(require("fs")); -var path9 = __toESM(require("path")); -var toolcache3 = __toESM(require_tool_cache()); -var import_fast_deep_equal = __toESM(require_fast_deep_equal()); -var semver8 = __toESM(require_semver2()); - -// node_modules/uuid/dist-node/stringify.js -var byteToHex = []; -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 256).toString(16).slice(1)); +// src/status-report.ts +var os = __toESM(require("os")); +var core9 = __toESM(require_core()); +function isFirstPartyAnalysis(actionName) { + if (actionName !== "upload-sarif" /* UploadSarif */) { + return true; + } + return process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] === "true"; } -function unsafeStringify(arr, offset = 0) { - return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +function isThirdPartyAnalysis(actionName) { + return !isFirstPartyAnalysis(actionName); } - -// node_modules/uuid/dist-node/rng.js -var import_node_crypto = require("node:crypto"); -var rnds8Pool = new Uint8Array(256); -var poolPtr = rnds8Pool.length; -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - (0, import_node_crypto.randomFillSync)(rnds8Pool); - poolPtr = 0; +function getActionsStatus(error3, otherFailureCause) { + if (error3 || otherFailureCause) { + return error3 instanceof ConfigurationError ? "user-error" : "failure"; + } else { + return "success"; } - return rnds8Pool.slice(poolPtr, poolPtr += 16); } - -// node_modules/uuid/dist-node/native.js -var import_node_crypto2 = require("node:crypto"); -var native_default = { randomUUID: import_node_crypto2.randomUUID }; - -// node_modules/uuid/dist-node/v4.js -function _v4(options, buf, offset) { - options = options || {}; - const rnds = options.random ?? options.rng?.() ?? rng(); - if (rnds.length < 16) { - throw new Error("Random bytes length must be >= 16"); +function setJobStatusIfUnsuccessful(actionStatus) { + if (actionStatus === "user-error") { + core9.exportVariable( + "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, + process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ + ); + } else if (actionStatus === "failure" || actionStatus === "aborted") { + core9.exportVariable( + "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, + process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ + ); } - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - offset = offset || 0; - if (offset < 0 || offset + 16 > buf.length) { - throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`); +} +async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception2) { + try { + const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || ""; + const ref = await getRef(); + const jobRunUUID = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */] || ""; + const workflowRunID = getWorkflowRunID(); + const workflowRunAttempt = getWorkflowRunAttempt(); + const workflowName = process.env["GITHUB_WORKFLOW"] || ""; + const jobName = process.env["GITHUB_JOB"] || ""; + const analysis_key = await getAnalysisKey(); + let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; + if (workflowStartedAt === void 0) { + workflowStartedAt = actionStartedAt.toISOString(); + core9.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; + const runnerOs = getRequiredEnvParam("RUNNER_OS"); + const codeQlCliVersion = getCachedCodeQlVersion(); + const actionRef = process.env["GITHUB_ACTION_REF"] || ""; + const testingEnvironment = getTestingEnvironment(); + if (testingEnvironment) { + core9.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } - return buf; - } - return unsafeStringify(rnds); -} -function v4(options, buf, offset) { - if (native_default.randomUUID && !buf && !options) { - return native_default.randomUUID(); - } - return _v4(options, buf, offset); -} -var v4_default = v4; - -// src/tar.ts -var import_child_process = require("child_process"); -var fs7 = __toESM(require("fs")); -var stream = __toESM(require("stream")); -var import_toolrunner = __toESM(require_toolrunner()); -var io4 = __toESM(require_io()); -var toolcache = __toESM(require_tool_cache()); -var semver6 = __toESM(require_semver2()); -var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3"; -var MIN_REQUIRED_GNU_TAR_VERSION = "1.31"; -async function getTarVersion() { - const tar = await io4.which("tar", true); - let stdout = ""; - const exitCode = await new import_toolrunner.ToolRunner(tar, ["--version"], { - listeners: { - stdout: (data) => { - stdout += data.toString(); - } + const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; + const statusReport = { + action_name: actionName, + action_oid: "unknown", + // TODO decide if it's possible to fill this in + action_ref: actionRef, + action_started_at: actionStartedAt.toISOString(), + action_version: getActionVersion(), + analysis_kinds: config?.analysisKinds?.join(","), + analysis_key, + build_mode: config?.buildMode, + commit_oid: commitOid, + first_party_analysis: isFirstPartyAnalysis(actionName), + job_name: jobName, + job_run_uuid: jobRunUUID, + ref, + runner_os: runnerOs, + started_at: workflowStartedAt, + status, + steady_state_default_setup: isSteadyStateDefaultSetupRun, + testing_environment: testingEnvironment || "", + workflow_name: workflowName, + workflow_run_attempt: workflowRunAttempt, + workflow_run_id: workflowRunID + }; + try { + statusReport.actions_event_name = getWorkflowEventName(); + } catch (e) { + logger.warning( + `Could not determine the workflow event name: ${getErrorMessage(e)}.` + ); } - }).exec(); - if (exitCode !== 0) { - throw new Error("Failed to call tar --version"); - } - if (stdout.includes("GNU tar")) { - const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); + if (config) { + statusReport.languages = config.languages?.join(","); } - return { type: "gnu", version: match[1] }; - } else if (stdout.includes("bsdtar")) { - const match = stdout.match(/bsdtar ([0-9.]+)/); - if (!match?.[1]) { - throw new Error("Failed to parse output of tar --version."); + if (diskInfo) { + statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes; + statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes; } - return { type: "bsd", version: match[1] }; - } else { - throw new Error("Unknown tar version"); - } -} -async function isZstdAvailable(logger) { - const foundZstdBinary = await isBinaryAccessible("zstd", logger); - try { - const tarVersion = await getTarVersion(); - const { type: type2, version } = tarVersion; - logger.info(`Found ${type2} tar version ${version}.`); - switch (type2) { - case "gnu": - return { - available: foundZstdBinary && // GNU tar only uses major and minor version numbers - semver6.gte( - semver6.coerce(version), - semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION) - ), - foundZstdBinary, - version: tarVersion - }; - case "bsd": - return { - available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain - // a patch version number. - semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION), - foundZstdBinary, - version: tarVersion - }; - default: - assertNever(type2); + if (cause) { + statusReport.cause = cause; + } + if (exception2) { + statusReport.exception = exception2; + } + if (status === "success" || status === "failure" || status === "aborted" || status === "user-error") { + statusReport.completed_at = (/* @__PURE__ */ new Date()).toISOString(); + } + const matrix = getRequiredInput("matrix"); + if (matrix) { + statusReport.matrix_vars = matrix; + } + if ("RUNNER_ARCH" in process.env) { + statusReport.runner_arch = process.env["RUNNER_ARCH"]; + } + if (!(runnerOs === "Linux" && isSelfHostedRunner())) { + statusReport.runner_os_release = os.release(); + } + if (codeQlCliVersion !== void 0) { + statusReport.codeql_version = codeQlCliVersion.version; } + const imageVersion = process.env["ImageVersion"]; + if (imageVersion) { + statusReport.runner_image_version = imageVersion; + } + return statusReport; } catch (e) { logger.warning( - `Failed to determine tar version, therefore will assume zstd is not available. The underlying error was: ${e}` + `Failed to gather information for telemetry: ${getErrorMessage(e)}. Will skip sending status report.` ); - return { available: false, foundZstdBinary }; - } -} -async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs7.mkdirSync(dest, { recursive: true }); - switch (compressionMethod) { - case "gzip": - return await toolcache.extractTar(tarPath, dest); - case "zstd": { - if (!tarVersion) { - throw new Error( - "Could not determine tar version, which is required to extract a Zstandard archive." - ); - } - await extractTarZst(tarPath, dest, tarVersion, logger); - return dest; + if (isInTestMode()) { + throw e; } + return void 0; } } -async function extractTarZst(tar, dest, tarVersion, logger) { - logger.debug( - `Extracting to ${dest}.${tar instanceof stream.Readable ? ` Input stream has high water mark ${tar.readableHighWaterMark}.` : ""}` - ); +var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`."; +var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`."; +async function sendStatusReport(statusReport) { + setJobStatusIfUnsuccessful(statusReport.status); + const statusReportJSON = JSON.stringify(statusReport); + core9.debug(`Sending status report: ${statusReportJSON}`); + if (isInTestMode()) { + core9.debug("In test mode. Status reports are not uploaded."); + return; + } + const nwo = getRepositoryNwo(); + const client = getApiClient(); try { - const args = ["-x", "--zstd", "--ignore-zeros"]; - if (tarVersion.type === "gnu") { - args.push("--warning=no-unknown-keyword"); - args.push("--overwrite"); - } - args.push("-f", tar instanceof stream.Readable ? "-" : tar, "-C", dest); - process.stdout.write(`[command]tar ${args.join(" ")} -`); - await new Promise((resolve6, reject) => { - const tarProcess = (0, import_child_process.spawn)("tar", args, { stdio: "pipe" }); - let stdout = ""; - tarProcess.stdout?.on("data", (data) => { - stdout += data.toString(); - process.stdout.write(data); - }); - let stderr = ""; - tarProcess.stderr?.on("data", (data) => { - stderr += data.toString(); - process.stdout.write(data); - }); - tarProcess.on("error", (err) => { - reject(new Error(`Error while extracting tar: ${err}`)); - }); - if (tar instanceof stream.Readable) { - tar.pipe(tarProcess.stdin).on("error", (err) => { - reject( - new Error(`Error while downloading and extracting tar: ${err}`) - ); - }); + await client.request( + "PUT /repos/:owner/:repo/code-scanning/analysis/status", + { + owner: nwo.owner, + repo: nwo.repo, + data: statusReportJSON } - tarProcess.on("exit", (code) => { - if (code !== 0) { - reject( - new CommandInvocationError( - "tar", - args, - code ?? void 0, - stdout, - stderr - ) - ); - } - resolve6(); - }); - }); + ); } catch (e) { - await cleanUpPath(dest, "extraction destination directory", logger); - throw e; - } -} -var KNOWN_EXTENSIONS = { - "tar.gz": "gzip", - "tar.zst": "zstd" -}; -function inferCompressionMethod(tarPath) { - for (const [ext, method] of Object.entries(KNOWN_EXTENSIONS)) { - if (tarPath.endsWith(`.${ext}`)) { - return method; - } - } - return void 0; -} - -// src/tools-download.ts -var fs8 = __toESM(require("fs")); -var os = __toESM(require("os")); -var path8 = __toESM(require("path")); -var import_perf_hooks = require("perf_hooks"); -var core9 = __toESM(require_core()); -var import_http_client = __toESM(require_lib()); -var toolcache2 = __toESM(require_tool_cache()); -var import_follow_redirects = __toESM(require_follow_redirects()); -var semver7 = __toESM(require_semver2()); -var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; -var TOOLCACHE_TOOL_NAME = "CodeQL"; -function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) { - return { - combinedDurationMs: downloadDurationMs + extractionDurationMs, - downloadDurationMs, - extractionDurationMs, - streamExtraction: false - }; -} -function makeStreamedToolsDownloadDurations(combinedDurationMs) { - return { - combinedDurationMs, - downloadDurationMs: void 0, - extractionDurationMs: void 0, - streamExtraction: true - }; -} -async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorization, headers, tarVersion, logger) { - logger.info( - `Downloading CodeQL tools from ${codeqlURL} . This may take a while.` - ); - try { - if (compressionMethod === "zstd" && process.platform === "linux") { - logger.info(`Streaming the extraction of the CodeQL bundle.`); - const toolsInstallStart = import_perf_hooks.performance.now(); - await downloadAndExtractZstdWithStreaming( - codeqlURL, - dest, - authorization, - headers, - tarVersion, - logger - ); - const combinedDurationMs = Math.round( - import_perf_hooks.performance.now() - toolsInstallStart - ); - logger.info( - `Finished downloading and extracting CodeQL bundle to ${dest} (${formatDuration( - combinedDurationMs - )}).` - ); - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeStreamedToolsDownloadDurations(combinedDurationMs) - }; + const httpError = asHTTPError(e); + if (httpError !== void 0) { + switch (httpError.status) { + case 403: + if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { + core9.warning( + `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` + ); + } else { + core9.warning( + `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` + ); + } + return; + case 404: + core9.warning(httpError.message); + return; + case 422: + if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { + core9.debug(INCOMPATIBLE_MSG); + } else { + core9.debug(OUT_OF_DATE_MSG); + } + return; + } } - } catch (e) { core9.warning( - `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` + `An unexpected error occurred when sending a status report: ${getErrorMessage( + e + )}` ); - core9.warning(`Falling back to downloading the bundle before extracting.`); - await cleanUpPath(dest, "CodeQL bundle", logger); } - const toolsDownloadStart = import_perf_hooks.performance.now(); - const archivedBundlePath = await toolcache2.downloadTool( - codeqlURL, - void 0, - authorization, - headers - ); - const downloadDurationMs = Math.round(import_perf_hooks.performance.now() - toolsDownloadStart); - logger.info( - `Finished downloading CodeQL bundle to ${archivedBundlePath} (${formatDuration( - downloadDurationMs - )}).` - ); - let extractionDurationMs; +} +async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error3, logger) { try { - logger.info("Extracting CodeQL bundle."); - const extractionStart = import_perf_hooks.performance.now(); - await extract( - archivedBundlePath, - dest, - compressionMethod, - tarVersion, - logger - ); - extractionDurationMs = Math.round(import_perf_hooks.performance.now() - extractionStart); - logger.info( - `Finished extracting CodeQL bundle to ${dest} (${formatDuration( - extractionDurationMs - )}).` + const statusReport = await createStatusReportBase( + actionName, + "failure", + actionStartedAt, + void 0, + void 0, + logger, + `Unhandled CodeQL Action error: ${getErrorMessage(error3)}`, + error3 instanceof Error ? error3.stack : void 0 ); - } finally { - await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger); - } - return { - compressionMethod, - toolsUrl: sanitizeUrlForStatusReport(codeqlURL), - ...makeDownloadFirstToolsDownloadDurations( - downloadDurationMs, - extractionDurationMs - ) - }; -} -async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs8.mkdirSync(dest, { recursive: true }); - const agent = new import_http_client.HttpClient().getAgent(codeqlURL); - headers = Object.assign( - { "User-Agent": "CodeQL Action" }, - authorization ? { authorization } : {}, - headers - ); - const response = await new Promise( - (resolve6) => import_follow_redirects.https.get( - codeqlURL, - { - headers, - // Increase the high water mark to improve performance. - highWaterMark: STREAMING_HIGH_WATERMARK_BYTES, - // Use the agent to respect proxy settings. - agent - }, - (r) => resolve6(r) - ) - ); - if (response.statusCode !== 200) { - throw new Error( - `Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.` + if (statusReport !== void 0) { + await sendStatusReport(statusReport); + } + } catch (e) { + logger.warning( + `Failed to send the unhandled error status report: ${getErrorMessage(e)}.` ); - } - await extractTarZst(response, dest, tarVersion, logger); -} -function getToolcacheDirectory(version) { - return path8.join( - getRequiredEnvParam("RUNNER_TOOL_CACHE"), - TOOLCACHE_TOOL_NAME, - semver7.clean(version) || version, - os.arch() || "" - ); -} -function writeToolcacheMarkerFile(extractedPath, logger) { - const markerFilePath = `${extractedPath}.complete`; - fs8.writeFileSync(markerFilePath, ""); - logger.info(`Created toolcache marker file ${markerFilePath}`); -} -function sanitizeUrlForStatusReport(url2) { - return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some( - (repo) => url2.startsWith(`https://github.com/${repo}/releases/download/`) - ) ? url2 : "sanitized-value"; -} - -// src/setup-codeql.ts -var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; -var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing"; -var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies"; -var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"]; -var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"]; -var CODEQL_TOOLCACHE_INPUT = "toolcache"; -function getCodeQLBundleExtension(compressionMethod) { - switch (compressionMethod) { - case "gzip": - return ".tar.gz"; - case "zstd": - return ".tar.zst"; - default: - assertNever(compressionMethod); - } -} -function getCodeQLBundleName(compressionMethod) { - const extension = getCodeQLBundleExtension(compressionMethod); - let platform; - if (process.platform === "win32") { - platform = "win64"; - } else if (process.platform === "linux") { - platform = "linux64"; - } else if (process.platform === "darwin") { - platform = "osx64"; - } else { - return `codeql-bundle${extension}`; - } - return `codeql-bundle-${platform}${extension}`; -} -function getCodeQLActionRepository(logger) { - if (isRunningLocalAction()) { - logger.info( - "The CodeQL Action is checked out locally. Using the default CodeQL Action repository." - ); - return CODEQL_DEFAULT_ACTION_REPOSITORY; - } - return getRequiredEnvParam("GITHUB_ACTION_REPOSITORY"); -} -async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod, logger) { - const codeQLActionRepository = getCodeQLActionRepository(logger); - const potentialDownloadSources = [ - // This GitHub instance, and this Action. - [apiDetails.url, codeQLActionRepository], - // This GitHub instance, and the canonical Action. - [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY], - // GitHub.com, and the canonical Action. - [GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY] - ]; - const uniqueDownloadSources = potentialDownloadSources.filter( - (source, index, self2) => { - return !self2.slice(0, index).some((other) => (0, import_fast_deep_equal.default)(source, other)); - } - ); - const codeQLBundleName = getCodeQLBundleName(compressionMethod); - for (const downloadSource of uniqueDownloadSources) { - const [apiURL, repository] = downloadSource; - if (apiURL === GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { - break; - } - const [repositoryOwner, repositoryName] = repository.split("/"); - try { - const release2 = await getApiClient().rest.repos.getReleaseByTag({ - owner: repositoryOwner, - repo: repositoryName, - tag: tagName - }); - for (const asset of release2.data.assets) { - if (asset.name === codeQLBundleName) { - logger.info( - `Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.` - ); - return asset.url; - } - } - } catch (e) { - logger.info( - `Looked for CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} but got error ${e}.` - ); - } - } - return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`; -} -function tryGetBundleVersionFromTagName(tagName, logger) { - const match = tagName.match(/^codeql-bundle-(.*)$/); - if (match === null || match.length < 2) { - logger.debug(`Could not determine bundle version from tag ${tagName}.`); - return void 0; - } - return match[1]; -} -function tryGetTagNameFromUrl(url2, logger) { - const matches = [...url2.matchAll(/\/(codeql-bundle-[^/]*)\//g)]; - if (matches.length === 0) { - logger.debug(`Could not determine tag name for URL ${url2}.`); - return void 0; - } - const match = matches[matches.length - 1]; - if (match?.length !== 2) { - logger.debug( - `Could not determine tag name for URL ${url2}. Matched ${JSON.stringify( - match - )}.` - ); - return void 0; - } - return match[1]; -} -function convertToSemVer(version, logger) { - if (!semver8.valid(version)) { - logger.debug( - `Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.` - ); - version = `0.0.0-${version}`; - } - const s = semver8.clean(version); - if (!s) { - throw new Error(`Bundle version ${version} is not in SemVer format.`); - } - return s; -} -async function findOverridingToolsInCache(humanReadableVersion, logger) { - const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ - folder: toolcache3.find("CodeQL", version), - version - })).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version"))); - if (candidates.length === 1) { - const candidate = candidates[0]; - logger.debug( - `CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.` - ); - return { - codeqlFolder: candidate.folder, - sourceType: "toolcache", - toolsVersion: candidate.version - }; - } else if (candidates.length === 0) { - logger.debug( - "Did not find any candidate pinned versions of the CodeQL tools in the toolcache." - ); - } else { - logger.debug( - "Could not use CodeQL tools from the toolcache since more than one candidate pinned version was found in the toolcache." - ); - } - return void 0; -} -async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) { - if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) { - logger.info(`Using CodeQL CLI from local path ${toolsInput}`); - const compressionMethod2 = inferCompressionMethod(toolsInput); - if (compressionMethod2 === void 0) { - throw new ConfigurationError( - `Could not infer compression method from path ${toolsInput}. Please specify a path ending in '.tar.gz' or '.tar.zst'.` - ); - } - return { - codeqlTarPath: toolsInput, - compressionMethod: compressionMethod2, - sourceType: "local", - toolsVersion: "local" - }; - } - let cliVersion2; - let tagName; - let url2; - const canForceNightlyWithFF = isDynamicWorkflow() || isInTestMode(); - const forceNightlyValueFF = await features.getValue("force_nightly" /* ForceNightly */); - const forceNightly = forceNightlyValueFF && canForceNightlyWithFF; - const nightlyRequestedByToolsInput = toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput); - if (forceNightly || nightlyRequestedByToolsInput) { - if (forceNightly) { - logger.info( - `Using the latest CodeQL CLI nightly, as forced by the ${"force_nightly" /* ForceNightly */} feature flag.` - ); - addNoLanguageDiagnostic( - void 0, - makeDiagnostic( - "codeql-action/forced-nightly-cli", - "A nightly release of CodeQL was used", - { - markdownMessage: "GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\nNightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\nIf use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.", - visibility: { - cliSummaryTable: true, - statusPage: true, - telemetry: true - }, - severity: "note" - } - ) - ); - } else { - logger.info( - `Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.` - ); - } - toolsInput = await getNightlyToolsUrl(logger); - } - const forceShippedTools = toolsInput && CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput); - if (forceShippedTools) { - cliVersion2 = cliVersion; - tagName = bundleVersion; - logger.info( - `'tools: ${toolsInput}' was requested, so using CodeQL version ${cliVersion2}, the version shipped with the Action.` - ); - if (toolsInput === "latest") { - logger.warning( - "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required." - ); - } - } else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) { - let latestToolcacheVersion; - const allowToolcacheValueFF = await features.getValue( - "allow_toolcache_input" /* AllowToolcacheInput */ - ); - const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode()); - if (allowToolcacheValue) { - logger.info( - `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.` - ); - latestToolcacheVersion = getLatestToolcacheVersion(logger); - if (latestToolcacheVersion) { - cliVersion2 = latestToolcacheVersion; - } - } - if (latestToolcacheVersion === void 0) { - if (allowToolcacheValue) { - logger.info( - `Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...` - ); - } else { - if (allowToolcacheValueFF) { - logger.warning( - `Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.` - ); - } else { - logger.info( - `Ignoring 'tools: ${toolsInput}' because the feature is not enabled.` - ); - } - } - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; - } - } else if (toolsInput !== void 0) { - tagName = tryGetTagNameFromUrl(toolsInput, logger); - url2 = toolsInput; - if (tagName) { - const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger); - if (bundleVersion3 && semver8.valid(bundleVersion3)) { - cliVersion2 = convertToSemVer(bundleVersion3, logger); - } - } - } else { - cliVersion2 = defaultCliVersion.cliVersion; - tagName = defaultCliVersion.tagName; - } - const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger); - const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown"; - logger.debug( - `Attempting to obtain CodeQL tools. CLI version: ${cliVersion2 ?? "unknown"}, bundle tag name: ${tagName ?? "unknown"}, URL: ${url2 ?? "unspecified"}.` - ); - let codeqlFolder; - if (cliVersion2) { - codeqlFolder = toolcache3.find("CodeQL", cliVersion2); - if (!codeqlFolder) { - logger.debug( - `Didn't find a version of the CodeQL tools in the toolcache with a version number exactly matching ${cliVersion2}.` - ); - const allVersions = toolcache3.findAllVersions("CodeQL"); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - const candidateVersions = allVersions.filter( - (version) => version.startsWith(`${cliVersion2}-`) - ); - if (candidateVersions.length === 1) { - logger.debug( - `Exactly one version of the CodeQL tools starting with ${cliVersion2} found in the toolcache, using that.` - ); - codeqlFolder = toolcache3.find("CodeQL", candidateVersions[0]); - } else if (candidateVersions.length === 0) { - logger.debug( - `Didn't find any versions of the CodeQL tools starting with ${cliVersion2} in the toolcache. Trying next fallback method.` - ); - } else { - logger.warning( - `Found ${candidateVersions.length} versions of the CodeQL tools starting with ${cliVersion2} in the toolcache, but at most one was expected.` - ); - logger.debug("Trying next fallback method."); - } - } - } - if (!codeqlFolder && tagName) { - const fallbackVersion = await tryGetFallbackToolcacheVersion( - cliVersion2, - tagName, - logger - ); - if (fallbackVersion) { - codeqlFolder = toolcache3.find("CodeQL", fallbackVersion); - } else { - logger.debug( - `Could not determine a fallback toolcache version number for CodeQL tools version ${humanReadableVersion}.` - ); - } - } - if (codeqlFolder) { - logger.info( - `Found CodeQL tools version ${humanReadableVersion} in the toolcache.` - ); - } else { - logger.info( - `Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.` - ); - } - if (codeqlFolder) { - if (cliVersion2) { - logger.info( - `Using CodeQL CLI version ${cliVersion2} from toolcache at ${codeqlFolder}` - ); - } else { - logger.info(`Using CodeQL CLI from toolcache at ${codeqlFolder}`); - } - return { - codeqlFolder, - sourceType: "toolcache", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; - } - if (variant === "GitHub Enterprise Server" /* GHES */ && !forceShippedTools && !toolsInput) { - const result = await findOverridingToolsInCache( - humanReadableVersion, - logger - ); - if (result !== void 0) { - return result; - } - } - let compressionMethod; - if (!url2) { - compressionMethod = cliVersion2 !== void 0 && await useZstdBundle(cliVersion2, tarSupportsZstd) ? "zstd" : "gzip"; - url2 = await getCodeQLBundleDownloadURL( - tagName, - apiDetails, - compressionMethod, - logger - ); - } else { - const method = inferCompressionMethod(url2); - if (method === void 0) { - throw new ConfigurationError( - `Could not infer compression method from URL ${url2}. Please specify a URL ending in '.tar.gz' or '.tar.zst'.` - ); - } - compressionMethod = method; - } - if (cliVersion2) { - logger.info(`Using CodeQL CLI version ${cliVersion2} sourced from ${url2} .`); - } else { - logger.info(`Using CodeQL CLI sourced from ${url2} .`); - } - return { - bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger), - cliVersion: cliVersion2, - codeqlURL: url2, - compressionMethod, - sourceType: "download", - toolsVersion: cliVersion2 ?? humanReadableVersion - }; -} -async function tryGetFallbackToolcacheVersion(cliVersion2, tagName, logger) { - const bundleVersion2 = tryGetBundleVersionFromTagName(tagName, logger); - if (!bundleVersion2) { - return void 0; - } - const fallbackVersion = convertToSemVer(bundleVersion2, logger); - logger.debug( - `Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ${cliVersion2 ?? tagName}.` - ); - return fallbackVersion; -} -var downloadCodeQL = async function(codeqlURL, compressionMethod, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) { - const parsedCodeQLURL = new URL(codeqlURL); - const searchParams = new URLSearchParams(parsedCodeQLURL.search); - const headers = { - accept: "application/octet-stream" - }; - let authorization = void 0; - if (searchParams.has("token")) { - logger.debug("CodeQL tools URL contains an authorization token."); - } else { - authorization = getAuthorizationHeaderFor( - logger, - apiDetails, - codeqlURL - ); - } - const toolcacheInfo = getToolcacheDestinationInfo( - maybeBundleVersion, - maybeCliVersion, - logger - ); - const extractedBundlePath = toolcacheInfo?.path ?? getTempExtractionDir(tempDir); - const statusReport = await downloadAndExtract( - codeqlURL, - compressionMethod, - extractedBundlePath, - authorization, - { "User-Agent": "CodeQL Action", ...headers }, - tarVersion, - logger - ); - if (!toolcacheInfo) { - logger.debug( - `Could not cache CodeQL tools because we could not determine the bundle version from the URL ${codeqlURL}.` - ); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? "unknown" - }; - } - writeToolcacheMarkerFile(toolcacheInfo.path, logger); - return { - codeqlFolder: extractedBundlePath, - statusReport, - toolsVersion: maybeCliVersion ?? toolcacheInfo.version - }; -}; -function getToolcacheDestinationInfo(maybeBundleVersion, maybeCliVersion, logger) { - if (maybeBundleVersion) { - const version = getCanonicalToolcacheVersion( - maybeCliVersion, - maybeBundleVersion, - logger - ); - return { - path: getToolcacheDirectory(version), - version - }; - } - return void 0; -} -function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) { - if (!cliVersion2?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) { - return convertToSemVer(bundleVersion2, logger); - } - return cliVersion2; -} -async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { - if (!await isBinaryAccessible("tar", logger)) { - throw new ConfigurationError( - "Could not find tar in PATH, so unable to extract CodeQL bundle." - ); - } - const zstdAvailability = await isZstdAvailable(logger); - const source = await getCodeQLSource( - toolsInput, - defaultCliVersion, - apiDetails, - variant, - zstdAvailability.available, - features, - logger - ); - let codeqlFolder; - let toolsVersion = source.toolsVersion; - let toolsDownloadStatusReport; - let toolsSource; - switch (source.sourceType) { - case "local": { - codeqlFolder = await extract( - source.codeqlTarPath, - getTempExtractionDir(tempDir), - source.compressionMethod, - zstdAvailability.version, - logger - ); - toolsSource = "LOCAL" /* Local */; - break; - } - case "toolcache": - codeqlFolder = source.codeqlFolder; - logger.debug(`CodeQL found in cache ${codeqlFolder}`); - toolsSource = "TOOLCACHE" /* Toolcache */; - break; - case "download": { - const result = await downloadCodeQL( - source.codeqlURL, - source.compressionMethod, - source.bundleVersion, - source.cliVersion, - apiDetails, - zstdAvailability.version, - tempDir, - logger - ); - toolsVersion = result.toolsVersion; - codeqlFolder = result.codeqlFolder; - toolsDownloadStatusReport = result.statusReport; - toolsSource = "DOWNLOAD" /* Download */; - break; - } - default: - assertNever(source); - } - return { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; -} -async function useZstdBundle(cliVersion2, tarSupportsZstd) { - return ( - // In testing, gzip performs better than zstd on Windows. - process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE) - ); -} -function getTempExtractionDir(tempDir) { - return path9.join(tempDir, v4_default()); -} -async function getNightlyToolsUrl(logger) { - const zstdAvailability = await isZstdAvailable(logger); - const compressionMethod = await useZstdBundle( - CODEQL_VERSION_ZSTD_BUNDLE, - zstdAvailability.available - ) ? "zstd" : "gzip"; - try { - const release2 = await getApiClient().rest.repos.listReleases({ - owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER, - repo: CODEQL_NIGHTLIES_REPOSITORY_NAME, - per_page: 1, - page: 1, - prerelease: true - }); - const latestRelease = release2.data[0]; - if (!latestRelease) { - throw new Error("Could not find the latest nightly release."); - } - return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`; - } catch (e) { - throw new Error( - `Failed to retrieve the latest nightly release: ${wrapError(e)}` - ); - } -} -function getLatestToolcacheVersion(logger) { - const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a)); - logger.debug( - `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( - allVersions - )}.` - ); - if (allVersions.length > 0) { - const latestToolcacheVersion = allVersions[0]; - logger.info( - `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.` - ); - return latestToolcacheVersion; - } - return void 0; -} -function isReservedToolsValue(tools) { - return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT; -} - -// src/tracer-config.ts -async function shouldEnableIndirectTracing(codeql2, config) { - if (config.buildMode === "none" /* None */) { - return false; - } - if (config.buildMode === "autobuild" /* Autobuild */) { - return false; - } - return asyncSome(config.languages, (l) => codeql2.isTracedLanguage(l)); -} - -// src/codeql.ts -var cachedCodeQL = void 0; -var CODEQL_MINIMUM_VERSION = "2.17.6"; -var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6"; -var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13"; -var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19"; -var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++"; -var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1"; -async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) { - try { - const { - codeqlFolder, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - } = await setupCodeQLBundle( - toolsInput, - apiDetails, - tempDir, - variant, - defaultCliVersion, - features, - logger - ); - logger.debug( - `Bundle download status report: ${JSON.stringify( - toolsDownloadStatusReport - )}` - ); - let codeqlCmd = path10.join(codeqlFolder, "codeql", "codeql"); - if (process.platform === "win32") { - codeqlCmd += ".exe"; - } else if (process.platform !== "linux" && process.platform !== "darwin") { - throw new ConfigurationError( - `Unsupported platform: ${process.platform}` - ); - } - cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion); - return { - codeql: cachedCodeQL, - toolsDownloadStatusReport, - toolsSource, - toolsVersion, - zstdAvailability - }; - } catch (rawError) { - const e = wrapApiConfigurationError(rawError); - const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error; - throw new ErrorClass( - `Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? ` - -Details: ${e.stack}` : ""}` - ); - } -} -async function getCodeQL(cmd) { - if (cachedCodeQL === void 0) { - cachedCodeQL = await getCodeQLForCmd(cmd, true); - } - return cachedCodeQL; -} -async function getCodeQLForCmd(cmd, checkVersion) { - const codeql2 = { - getPath() { - return cmd; - }, - async getVersion() { - let result = getCachedCodeQlVersion(); - if (result === void 0) { - const output = await runCli(cmd, ["version", "--format=json"], { - noStreamStdout: true - }); - try { - result = JSON.parse(output); - } catch { - throw Error( - `Invalid JSON output from \`version --format=json\`: ${output}` - ); - } - cacheCodeQlVersion(result); - } - return result; - }, - async printVersion() { - await runCli(cmd, ["version", "--format=json"]); - }, - async supportsFeature(feature) { - return isSupportedToolsFeature(await this.getVersion(), feature); - }, - async isTracedLanguage(language) { - const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path10.join( - extractorPath, - "tools", - "tracing-config.lua" - ); - return fs10.existsSync(tracingConfigPath); - }, - async isScannedLanguage(language) { - return !await this.isTracedLanguage(language); - }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { - const extraArgs = config.languages.map( - (language) => `--language=${language}` - ); - if (await shouldEnableIndirectTracing(codeql2, config)) { - extraArgs.push("--begin-tracing"); - extraArgs.push(...await getTrapCachingExtractorConfigArgs(config)); - extraArgs.push(`--trace-process-name=${processName}`); - } - const codeScanningConfigFile = await writeCodeScanningConfigFile( - config, - logger - ); - const externalRepositoryToken = getOptionalInput( - "external-repository-token" - ); - extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`); - if (externalRepositoryToken) { - extraArgs.push("--external-repository-token-stdin"); - } - if (config.buildMode !== void 0) { - extraArgs.push(`--build-mode=${config.buildMode}`); - } - if (qlconfigFile !== void 0) { - extraArgs.push(`--qlconfig-file=${qlconfigFile}`); - } - const overwriteFlag = isSupportedToolsFeature( - await this.getVersion(), - "forceOverwrite" /* ForceOverwrite */ - ) ? "--force-overwrite" : "--overwrite"; - const overlayDatabaseMode = config.overlayDatabaseMode; - if (overlayDatabaseMode === "overlay" /* Overlay */) { - const overlayChangesFile = await writeOverlayChangesFile( - config, - sourceRoot, - logger - ); - extraArgs.push(`--overlay-changes=${overlayChangesFile}`); - } else if (overlayDatabaseMode === "overlay-base" /* OverlayBase */) { - extraArgs.push("--overlay-base"); - } - const baselineFilesOptions = config.enableFileCoverageInformation ? [ - "--calculate-language-specific-baseline", - "--sublanguage-file-coverage" - ] : ["--no-calculate-baseline"]; - await runCli( - cmd, - [ - "database", - "init", - ...overlayDatabaseMode === "overlay" /* Overlay */ ? [] : [overwriteFlag], - "--db-cluster", - config.dbLocation, - `--source-root=${sourceRoot}`, - ...baselineFilesOptions, - "--extractor-include-aliases", - ...extraArgs, - ...getExtraOptionsFromEnv(["database", "init"], { - // Some user configs specify `--no-calculate-baseline` as an additional - // argument to `codeql database init`. Therefore ignore the baseline file - // options here to avoid specifying the same argument twice and erroring. - ignoringOptions: ["--overwrite", ...baselineFilesOptions] - }) - ], - { stdin: externalRepositoryToken } - ); - if (overlayDatabaseMode === "overlay-base" /* OverlayBase */) { - await writeBaseDatabaseOidsFile(config, sourceRoot); - } - }, - async runAutobuild(config, language) { - applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path10.join( - await this.resolveExtractor(language), - "tools", - process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" - ); - if (config.debugMode) { - process.env["CODEQL_VERBOSITY" /* CLI_VERBOSITY */] = process.env["CODEQL_VERBOSITY" /* CLI_VERBOSITY */] || EXTRACTION_DEBUG_MODE_VERBOSITY; - } - await runCli(autobuildCmd); - }, - async extractScannedLanguage(config, language) { - await runCli(cmd, [ - "database", - "trace-command", - "--index-traceless-dbs", - ...await getTrapCachingExtractorConfigArgsForLang(config, language), - ...getExtractionVerbosityArguments(config.debugMode), - ...getExtraOptionsFromEnv(["database", "trace-command"]), - getCodeQLDatabasePath(config, language) - ]); - }, - async extractUsingBuildMode(config, language) { - if (config.buildMode === "autobuild" /* Autobuild */) { - applyAutobuildAzurePipelinesTimeoutFix(); - } - try { - await runCli(cmd, [ - "database", - "trace-command", - "--use-build-mode", - "--working-dir", - process.cwd(), - ...await getTrapCachingExtractorConfigArgsForLang(config, language), - ...getExtractionVerbosityArguments(config.debugMode), - ...getExtraOptionsFromEnv(["database", "trace-command"]), - getCodeQLDatabasePath(config, language) - ]); - } catch (e) { - if (config.buildMode === "autobuild" /* Autobuild */) { - const prefix = `We were unable to automatically build your code. Please change the build mode for this language to manual and specify build steps for your project. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information.`; - throw new ConfigurationError(`${prefix} ${getErrorMessage(e)}`); - } else { - throw e; - } - } - }, - async finalizeDatabase(databasePath, threadsFlag, memoryFlag, enableDebugLogging) { - const args = [ - "database", - "finalize", - "--finalize-dataset", - threadsFlag, - memoryFlag, - ...getExtractionVerbosityArguments(enableDebugLogging), - ...getExtraOptionsFromEnv(["database", "finalize"]), - databasePath - ]; - await runCli(cmd, args); - }, - async resolveLanguages() { - const codeqlArgs = [ - "resolve", - "languages", - "--format=json", - ...getExtraOptionsFromEnv(["resolve", "languages"]) - ]; - const output = await runCli(cmd, codeqlArgs); - try { - return JSON.parse(output); - } catch (e) { - throw new Error( - `Unexpected output from codeql resolve languages: ${e}` - ); - } - }, - async betterResolveLanguages({ - filterToLanguagesWithQueries - } = { filterToLanguagesWithQueries: false }) { - const codeqlArgs = [ - "resolve", - "languages", - "--format=betterjson", - "--extractor-options-verbosity=4", - "--extractor-include-aliases", - ...filterToLanguagesWithQueries ? ["--filter-to-languages-with-queries"] : [], - ...getExtraOptionsFromEnv(["resolve", "languages"]) - ]; - const output = await runCli(cmd, codeqlArgs); - try { - return JSON.parse(output); - } catch (e) { - throw new Error( - `Unexpected output from codeql resolve languages with --format=betterjson: ${e}` - ); - } - }, - async resolveBuildEnvironment(workingDir, language) { - const codeqlArgs = [ - "resolve", - "build-environment", - `--language=${language}`, - "--extractor-include-aliases", - ...getExtraOptionsFromEnv(["resolve", "build-environment"]) - ]; - if (workingDir !== void 0) { - codeqlArgs.push("--working-dir", workingDir); - } - const output = await runCli(cmd, codeqlArgs); - try { - return JSON.parse(output); - } catch (e) { - throw new Error( - `Unexpected output from codeql resolve build-environment: ${e} in -${output}` - ); - } - }, - async databaseRunQueries(databasePath, flags, queries = []) { - const codeqlArgs = [ - "database", - "run-queries", - ...flags, - databasePath, - "--min-disk-free=1024", - // Try to leave at least 1GB free - "-v", - ...queries, - ...getExtraOptionsFromEnv(["database", "run-queries"], { - ignoringOptions: ["--expect-discarded-cache"] - }) - ]; - await runCli(cmd, codeqlArgs); - }, - async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, threadsFlag, verbosityFlag, sarifRunPropertyFlag, automationDetailsId, config, features) { - const shouldExportDiagnostics = await features.getValue( - "export_diagnostics_enabled" /* ExportDiagnosticsEnabled */, - this - ); - const codeqlArgs = [ - "database", - "interpret-results", - threadsFlag, - "--format=sarif-latest", - verbosityFlag, - `--output=${sarifFile}`, - "--print-diagnostics-summary", - "--print-metrics-summary", - "--sarif-add-baseline-file-info", - `--sarif-codescanning-config=${getGeneratedCodeScanningConfigPath( - config - )}`, - "--sarif-group-rules-by-pack", - "--sarif-include-query-help=always", - "--sublanguage-file-coverage", - ...await getJobRunUuidSarifOptions(this), - ...getExtraOptionsFromEnv(["database", "interpret-results"]) - ]; - if (sarifRunPropertyFlag !== void 0) { - codeqlArgs.push(sarifRunPropertyFlag); - } - if (automationDetailsId !== void 0) { - codeqlArgs.push("--sarif-category", automationDetailsId); - } - if (shouldExportDiagnostics) { - codeqlArgs.push("--sarif-include-diagnostics"); - } else { - codeqlArgs.push("--no-sarif-include-diagnostics"); - } - codeqlArgs.push(databasePath); - if (querySuitePaths) { - codeqlArgs.push(...querySuitePaths); - } - return await runCli(cmd, codeqlArgs, { - noStreamStdout: true - }); - }, - async databaseCleanupCluster(config, cleanupLevel) { - const cacheCleanupFlag = await codeQlVersionAtLeast( - this, - CODEQL_VERSION_CACHE_CLEANUP - ) ? "--cache-cleanup" : "--mode"; - for (const language of config.languages) { - const databasePath = getCodeQLDatabasePath(config, language); - const codeqlArgs = [ - "database", - "cleanup", - databasePath, - `${cacheCleanupFlag}=${cleanupLevel}`, - ...getExtraOptionsFromEnv(["database", "cleanup"]) - ]; - await runCli(cmd, codeqlArgs); - } - }, - async databaseBundle(databasePath, outputFilePath, databaseName, includeDiagnostics, alsoIncludeRelativePaths) { - const includeDiagnosticsArgs = includeDiagnostics ? ["--include-diagnostics"] : []; - const args = [ - "database", - "bundle", - databasePath, - `--output=${outputFilePath}`, - `--name=${databaseName}`, - ...includeDiagnosticsArgs, - ...getExtraOptionsFromEnv(["database", "bundle"], { - ignoringOptions: includeDiagnosticsArgs - }) - ]; - if (await this.supportsFeature("bundleSupportsIncludeOption" /* BundleSupportsIncludeOption */)) { - args.push( - ...alsoIncludeRelativePaths.flatMap((relativePath) => [ - "--include", - relativePath - ]) - ); - } - await new toolrunner3.ToolRunner(cmd, args).exec(); - }, - async databaseExportDiagnostics(databasePath, sarifFile, automationDetailsId) { - const args = [ - "database", - "export-diagnostics", - `${databasePath}`, - "--db-cluster", - // Database is always a cluster for CodeQL versions that support diagnostics. - "--format=sarif-latest", - `--output=${sarifFile}`, - "--sarif-include-diagnostics", - // ExportDiagnosticsEnabled is always true if this command is run. - "-vvv", - ...getExtraOptionsFromEnv(["diagnostics", "export"]) - ]; - if (automationDetailsId !== void 0) { - args.push("--sarif-category", automationDetailsId); - } - await new toolrunner3.ToolRunner(cmd, args).exec(); - }, - async diagnosticsExport(sarifFile, automationDetailsId, config) { - const args = [ - "diagnostics", - "export", - "--format=sarif-latest", - `--output=${sarifFile}`, - `--sarif-codescanning-config=${getGeneratedCodeScanningConfigPath( - config - )}`, - ...getExtraOptionsFromEnv(["diagnostics", "export"]) - ]; - if (automationDetailsId !== void 0) { - args.push("--sarif-category", automationDetailsId); - } - await new toolrunner3.ToolRunner(cmd, args).exec(); - }, - async resolveExtractor(language) { - let extractorPath = ""; - await new toolrunner3.ToolRunner( - cmd, - [ - "resolve", - "extractor", - "--format=json", - `--language=${language}`, - "--extractor-include-aliases", - ...getExtraOptionsFromEnv(["resolve", "extractor"]) - ], - { - silent: true, - listeners: { - stdout: (data) => { - extractorPath += data.toString(); - }, - stderr: (data) => { - process.stderr.write(data); - } - } - } - ).exec(); - return JSON.parse(extractorPath); - }, - async resolveQueriesStartingPacks(queries) { - const codeqlArgs = [ - "resolve", - "queries", - "--format=startingpacks", - ...getExtraOptionsFromEnv(["resolve", "queries"]), - ...queries - ]; - const output = await runCli(cmd, codeqlArgs, { noStreamStdout: true }); - try { - return JSON.parse(output); - } catch (e) { - throw new Error( - `Unexpected output from codeql resolve queries --format=startingpacks: ${e}` - ); - } - }, - async resolveDatabase(databasePath) { - const codeqlArgs = [ - "resolve", - "database", - databasePath, - "--format=json", - ...getExtraOptionsFromEnv(["resolve", "database"]) - ]; - const output = await runCli(cmd, codeqlArgs, { noStreamStdout: true }); - try { - return JSON.parse(output); - } catch (e) { - throw new Error( - `Unexpected output from codeql resolve database --format=json: ${e}` - ); - } - }, - async mergeResults(sarifFiles, outputFile, { - mergeRunsFromEqualCategory = false - }) { - const args = [ - "github", - "merge-results", - "--output", - outputFile, - ...getExtraOptionsFromEnv(["github", "merge-results"]) - ]; - for (const sarifFile of sarifFiles) { - args.push("--sarif", sarifFile); - } - if (mergeRunsFromEqualCategory) { - args.push("--sarif-merge-runs-from-equal-category"); - } - await runCli(cmd, args); - } - }; - if (checkVersion && !await codeQlVersionAtLeast(codeql2, CODEQL_MINIMUM_VERSION)) { - throw new ConfigurationError( - `Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${(await codeql2.getVersion()).version}` - ); - } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql2, CODEQL_NEXT_MINIMUM_VERSION)) { - const result = await codeql2.getVersion(); - core10.warning( - `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. - -Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` - ); - core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); - } - return codeql2; -} -function getExtraOptionsFromEnv(paths, { ignoringOptions } = {}) { - const options = getExtraOptionsEnvParam(); - return getExtraOptions(options, paths, []).filter( - (option) => !ignoringOptions?.includes(option) - ); -} -function asExtraOptions(options, pathInfo) { - if (options === void 0) { - return []; - } - if (!Array.isArray(options)) { - const msg = `The extra options for '${pathInfo.join( - "." - )}' ('${JSON.stringify(options)}') are not in an array.`; - throw new Error(msg); - } - return options.map((o) => { - const t = typeof o; - if (t !== "string" && t !== "number" && t !== "boolean") { - const msg = `The extra option for '${pathInfo.join( - "." - )}' ('${JSON.stringify(o)}') is not a primitive value.`; - throw new Error(msg); - } - return `${o}`; - }); -} -function getExtraOptions(options, paths, pathInfo) { - const all = asExtraOptions(options?.["*"], pathInfo.concat("*")); - const specific = paths.length === 0 ? asExtraOptions(options, pathInfo) : getExtraOptions( - options?.[paths[0]], - paths?.slice(1), - pathInfo.concat(paths[0]) - ); - return all.concat(specific); -} -async function runCli(cmd, args = [], opts = {}) { - try { - return await runTool(cmd, args, opts); - } catch (e) { - if (e instanceof CommandInvocationError) { - throw wrapCliConfigurationError(new CliError(e)); - } - throw e; - } -} -async function writeCodeScanningConfigFile(config, logger) { - const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config); - const augmentedConfig = appendExtraQueryExclusions( - config.extraQueryExclusions, - config.computedConfig - ); - logger.info( - `Writing augmented user configuration file to ${codeScanningConfigFile}` - ); - logger.startGroup("Augmented user configuration file contents"); - logger.info(dump(augmentedConfig)); - logger.endGroup(); - fs10.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); - return codeScanningConfigFile; -} -var TRAP_CACHE_SIZE_MB = 1024; -async function getTrapCachingExtractorConfigArgs(config) { - const result = []; - for (const language of config.languages) - result.push( - await getTrapCachingExtractorConfigArgsForLang(config, language) - ); - return result.flat(); -} -async function getTrapCachingExtractorConfigArgsForLang(config, language) { - const cacheDir = config.trapCaches[language]; - if (cacheDir === void 0) return []; - const write = await isAnalyzingDefaultBranch(); - return [ - `-O=${language}.trap.cache.dir=${cacheDir}`, - `-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`, - `-O=${language}.trap.cache.write=${write}` - ]; -} -function getGeneratedCodeScanningConfigPath(config) { - return path10.resolve(config.tempDir, "user-config.yaml"); -} -function getExtractionVerbosityArguments(enableDebugLogging) { - return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; -} -function applyAutobuildAzurePipelinesTimeoutFix() { - const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || ""; - process.env["JAVA_TOOL_OPTIONS"] = [ - ...javaToolOptions.split(/\s+/), - "-Dhttp.keepAlive=false", - "-Dmaven.wagon.http.pool=false" - ].join(" "); -} -async function getJobRunUuidSarifOptions(codeql2) { - const jobRunUuid = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */]; - return jobRunUuid && await codeql2.supportsFeature( - "databaseInterpretResultsSupportsSarifRunProperty" /* DatabaseInterpretResultsSupportsSarifRunProperty */ - ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; -} - -// src/status-report.ts -var os2 = __toESM(require("os")); -var core11 = __toESM(require_core()); -function isFirstPartyAnalysis(actionName) { - if (actionName !== "upload-sarif" /* UploadSarif */) { - return true; - } - return process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] === "true"; -} -function isThirdPartyAnalysis(actionName) { - return !isFirstPartyAnalysis(actionName); -} -function getActionsStatus(error3, otherFailureCause) { - if (error3 || otherFailureCause) { - return error3 instanceof ConfigurationError ? "user-error" : "failure"; - } else { - return "success"; - } -} -function setJobStatusIfUnsuccessful(actionStatus) { - if (actionStatus === "user-error") { - core11.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ - ); - } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( - "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, - process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ - ); - } -} -async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception2) { - try { - const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || ""; - const ref = await getRef(); - const jobRunUUID = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */] || ""; - const workflowRunID = getWorkflowRunID(); - const workflowRunAttempt = getWorkflowRunAttempt(); - const workflowName = process.env["GITHUB_WORKFLOW"] || ""; - const jobName = process.env["GITHUB_JOB"] || ""; - const analysis_key = await getAnalysisKey(); - let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; - if (workflowStartedAt === void 0) { - workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); - } - const runnerOs = getRequiredEnvParam("RUNNER_OS"); - const codeQlCliVersion = getCachedCodeQlVersion(); - const actionRef = process.env["GITHUB_ACTION_REF"] || ""; - const testingEnvironment = getTestingEnvironment(); - if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); - } - const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; - const statusReport = { - action_name: actionName, - action_oid: "unknown", - // TODO decide if it's possible to fill this in - action_ref: actionRef, - action_started_at: actionStartedAt.toISOString(), - action_version: getActionVersion(), - analysis_kinds: config?.analysisKinds?.join(","), - analysis_key, - build_mode: config?.buildMode, - commit_oid: commitOid, - first_party_analysis: isFirstPartyAnalysis(actionName), - job_name: jobName, - job_run_uuid: jobRunUUID, - ref, - runner_os: runnerOs, - started_at: workflowStartedAt, - status, - steady_state_default_setup: isSteadyStateDefaultSetupRun, - testing_environment: testingEnvironment || "", - workflow_name: workflowName, - workflow_run_attempt: workflowRunAttempt, - workflow_run_id: workflowRunID - }; - try { - statusReport.actions_event_name = getWorkflowEventName(); - } catch (e) { - logger.warning( - `Could not determine the workflow event name: ${getErrorMessage(e)}.` - ); - } - if (config) { - statusReport.languages = config.languages?.join(","); - } - if (diskInfo) { - statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes; - statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes; - } - if (cause) { - statusReport.cause = cause; - } - if (exception2) { - statusReport.exception = exception2; - } - if (status === "success" || status === "failure" || status === "aborted" || status === "user-error") { - statusReport.completed_at = (/* @__PURE__ */ new Date()).toISOString(); - } - const matrix = getRequiredInput("matrix"); - if (matrix) { - statusReport.matrix_vars = matrix; - } - if ("RUNNER_ARCH" in process.env) { - statusReport.runner_arch = process.env["RUNNER_ARCH"]; - } - if (!(runnerOs === "Linux" && isSelfHostedRunner())) { - statusReport.runner_os_release = os2.release(); - } - if (codeQlCliVersion !== void 0) { - statusReport.codeql_version = codeQlCliVersion.version; - } - const imageVersion = process.env["ImageVersion"]; - if (imageVersion) { - statusReport.runner_image_version = imageVersion; - } - return statusReport; - } catch (e) { - logger.warning( - `Failed to gather information for telemetry: ${getErrorMessage(e)}. Will skip sending status report.` - ); - if (isInTestMode()) { - throw e; - } - return void 0; - } -} -var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`."; -var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`."; -async function sendStatusReport(statusReport) { - setJobStatusIfUnsuccessful(statusReport.status); - const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); - if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); - return; - } - const nwo = getRepositoryNwo(); - const client = getApiClient(); - try { - await client.request( - "PUT /repos/:owner/:repo/code-scanning/analysis/status", - { - owner: nwo.owner, - repo: nwo.repo, - data: statusReportJSON - } - ); - } catch (e) { - const httpError = asHTTPError(e); - if (httpError !== void 0) { - switch (httpError.status) { - case 403: - if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( - `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` - ); - } else { - core11.warning( - `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` - ); - } - return; - case 404: - core11.warning(httpError.message); - return; - case 422: - if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); - } else { - core11.debug(OUT_OF_DATE_MSG); - } - return; - } - } - core11.warning( - `An unexpected error occurred when sending a status report: ${getErrorMessage( - e - )}` - ); - } -} -async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error3, logger) { - try { - const statusReport = await createStatusReportBase( - actionName, - "failure", - actionStartedAt, - void 0, - void 0, - logger, - `Unhandled CodeQL Action error: ${getErrorMessage(error3)}`, - error3 instanceof Error ? error3.stack : void 0 - ); - if (statusReport !== void 0) { - await sendStatusReport(statusReport); - } - } catch (e) { - logger.warning( - `Failed to send the unhandled error status report: ${getErrorMessage(e)}.` - ); - if (isInTestMode()) { - throw e; - } + if (isInTestMode()) { + throw e; + } } } @@ -109590,7 +107766,7 @@ var core12 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts -var fs11 = __toESM(require("fs")); +var fs7 = __toESM(require("fs")); var import_path2 = __toESM(require("path")); // node_modules/long/index.js @@ -109919,805 +108095,2631 @@ try { ).exports; } catch { } -function Long(low, high, unsigned) { - this.low = low | 0; - this.high = high | 0; - this.unsigned = !!unsigned; +function Long(low, high, unsigned) { + this.low = low | 0; + this.high = high | 0; + this.unsigned = !!unsigned; +} +Long.prototype.__isLong__; +Object.defineProperty(Long.prototype, "__isLong__", { value: true }); +function isLong(obj) { + return (obj && obj["__isLong__"]) === true; +} +function ctz32(value) { + var c = Math.clz32(value & -value); + return value ? 31 - c : c; +} +Long.isLong = isLong; +var INT_CACHE = {}; +var UINT_CACHE = {}; +function fromInt(value, unsigned) { + var obj, cachedObj, cache; + if (unsigned) { + value >>>= 0; + if (cache = 0 <= value && value < 256) { + cachedObj = UINT_CACHE[value]; + if (cachedObj) return cachedObj; + } + obj = fromBits(value, 0, true); + if (cache) UINT_CACHE[value] = obj; + return obj; + } else { + value |= 0; + if (cache = -128 <= value && value < 128) { + cachedObj = INT_CACHE[value]; + if (cachedObj) return cachedObj; + } + obj = fromBits(value, value < 0 ? -1 : 0, false); + if (cache) INT_CACHE[value] = obj; + return obj; + } +} +Long.fromInt = fromInt; +function fromNumber(value, unsigned) { + if (isNaN(value)) return unsigned ? UZERO : ZERO; + if (unsigned) { + if (value < 0) return UZERO; + if (value >= TWO_PWR_64_DBL) return MAX_UNSIGNED_VALUE; + } else { + if (value <= -TWO_PWR_63_DBL) return MIN_VALUE; + if (value + 1 >= TWO_PWR_63_DBL) return MAX_VALUE; + } + if (value < 0) return fromNumber(-value, unsigned).neg(); + return fromBits( + value % TWO_PWR_32_DBL | 0, + value / TWO_PWR_32_DBL | 0, + unsigned + ); +} +Long.fromNumber = fromNumber; +function fromBits(lowBits, highBits, unsigned) { + return new Long(lowBits, highBits, unsigned); +} +Long.fromBits = fromBits; +var pow_dbl = Math.pow; +function fromString(str2, unsigned, radix) { + if (str2.length === 0) throw Error("empty string"); + if (typeof unsigned === "number") { + radix = unsigned; + unsigned = false; + } else { + unsigned = !!unsigned; + } + if (str2 === "NaN" || str2 === "Infinity" || str2 === "+Infinity" || str2 === "-Infinity") + return unsigned ? UZERO : ZERO; + radix = radix || 10; + if (radix < 2 || 36 < radix) throw RangeError("radix"); + var p; + if ((p = str2.indexOf("-")) > 0) throw Error("interior hyphen"); + else if (p === 0) { + return fromString(str2.substring(1), unsigned, radix).neg(); + } + var radixToPower = fromNumber(pow_dbl(radix, 8)); + var result = ZERO; + for (var i = 0; i < str2.length; i += 8) { + var size = Math.min(8, str2.length - i), value = parseInt(str2.substring(i, i + size), radix); + if (size < 8) { + var power = fromNumber(pow_dbl(radix, size)); + result = result.mul(power).add(fromNumber(value)); + } else { + result = result.mul(radixToPower); + result = result.add(fromNumber(value)); + } + } + result.unsigned = unsigned; + return result; +} +Long.fromString = fromString; +function fromValue(val, unsigned) { + if (typeof val === "number") return fromNumber(val, unsigned); + if (typeof val === "string") return fromString(val, unsigned); + return fromBits( + val.low, + val.high, + typeof unsigned === "boolean" ? unsigned : val.unsigned + ); +} +Long.fromValue = fromValue; +var TWO_PWR_16_DBL = 1 << 16; +var TWO_PWR_24_DBL = 1 << 24; +var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL; +var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL; +var TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2; +var TWO_PWR_24 = fromInt(TWO_PWR_24_DBL); +var ZERO = fromInt(0); +Long.ZERO = ZERO; +var UZERO = fromInt(0, true); +Long.UZERO = UZERO; +var ONE = fromInt(1); +Long.ONE = ONE; +var UONE = fromInt(1, true); +Long.UONE = UONE; +var NEG_ONE = fromInt(-1); +Long.NEG_ONE = NEG_ONE; +var MAX_VALUE = fromBits(4294967295 | 0, 2147483647 | 0, false); +Long.MAX_VALUE = MAX_VALUE; +var MAX_UNSIGNED_VALUE = fromBits(4294967295 | 0, 4294967295 | 0, true); +Long.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE; +var MIN_VALUE = fromBits(0, 2147483648 | 0, false); +Long.MIN_VALUE = MIN_VALUE; +var LongPrototype = Long.prototype; +LongPrototype.toInt = function toInt() { + return this.unsigned ? this.low >>> 0 : this.low; +}; +LongPrototype.toNumber = function toNumber() { + if (this.unsigned) + return (this.high >>> 0) * TWO_PWR_32_DBL + (this.low >>> 0); + return this.high * TWO_PWR_32_DBL + (this.low >>> 0); +}; +LongPrototype.toString = function toString2(radix) { + radix = radix || 10; + if (radix < 2 || 36 < radix) throw RangeError("radix"); + if (this.isZero()) return "0"; + if (this.isNegative()) { + if (this.eq(MIN_VALUE)) { + var radixLong = fromNumber(radix), div = this.div(radixLong), rem1 = div.mul(radixLong).sub(this); + return div.toString(radix) + rem1.toInt().toString(radix); + } else return "-" + this.neg().toString(radix); + } + var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned), rem = this; + var result = ""; + while (true) { + var remDiv = rem.div(radixToPower), intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0, digits = intval.toString(radix); + rem = remDiv; + if (rem.isZero()) return digits + result; + else { + while (digits.length < 6) digits = "0" + digits; + result = "" + digits + result; + } + } +}; +LongPrototype.getHighBits = function getHighBits() { + return this.high; +}; +LongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() { + return this.high >>> 0; +}; +LongPrototype.getLowBits = function getLowBits() { + return this.low; +}; +LongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() { + return this.low >>> 0; +}; +LongPrototype.getNumBitsAbs = function getNumBitsAbs() { + if (this.isNegative()) + return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs(); + var val = this.high != 0 ? this.high : this.low; + for (var bit = 31; bit > 0; bit--) if ((val & 1 << bit) != 0) break; + return this.high != 0 ? bit + 33 : bit + 1; +}; +LongPrototype.isSafeInteger = function isSafeInteger() { + var top11Bits = this.high >> 21; + if (!top11Bits) return true; + if (this.unsigned) return false; + return top11Bits === -1 && !(this.low === 0 && this.high === -2097152); +}; +LongPrototype.isZero = function isZero() { + return this.high === 0 && this.low === 0; +}; +LongPrototype.eqz = LongPrototype.isZero; +LongPrototype.isNegative = function isNegative() { + return !this.unsigned && this.high < 0; +}; +LongPrototype.isPositive = function isPositive() { + return this.unsigned || this.high >= 0; +}; +LongPrototype.isOdd = function isOdd() { + return (this.low & 1) === 1; +}; +LongPrototype.isEven = function isEven() { + return (this.low & 1) === 0; +}; +LongPrototype.equals = function equals(other) { + if (!isLong(other)) other = fromValue(other); + if (this.unsigned !== other.unsigned && this.high >>> 31 === 1 && other.high >>> 31 === 1) + return false; + return this.high === other.high && this.low === other.low; +}; +LongPrototype.eq = LongPrototype.equals; +LongPrototype.notEquals = function notEquals(other) { + return !this.eq( + /* validates */ + other + ); +}; +LongPrototype.neq = LongPrototype.notEquals; +LongPrototype.ne = LongPrototype.notEquals; +LongPrototype.lessThan = function lessThan(other) { + return this.comp( + /* validates */ + other + ) < 0; +}; +LongPrototype.lt = LongPrototype.lessThan; +LongPrototype.lessThanOrEqual = function lessThanOrEqual(other) { + return this.comp( + /* validates */ + other + ) <= 0; +}; +LongPrototype.lte = LongPrototype.lessThanOrEqual; +LongPrototype.le = LongPrototype.lessThanOrEqual; +LongPrototype.greaterThan = function greaterThan(other) { + return this.comp( + /* validates */ + other + ) > 0; +}; +LongPrototype.gt = LongPrototype.greaterThan; +LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) { + return this.comp( + /* validates */ + other + ) >= 0; +}; +LongPrototype.gte = LongPrototype.greaterThanOrEqual; +LongPrototype.ge = LongPrototype.greaterThanOrEqual; +LongPrototype.compare = function compare(other) { + if (!isLong(other)) other = fromValue(other); + if (this.eq(other)) return 0; + var thisNeg = this.isNegative(), otherNeg = other.isNegative(); + if (thisNeg && !otherNeg) return -1; + if (!thisNeg && otherNeg) return 1; + if (!this.unsigned) return this.sub(other).isNegative() ? -1 : 1; + return other.high >>> 0 > this.high >>> 0 || other.high === this.high && other.low >>> 0 > this.low >>> 0 ? -1 : 1; +}; +LongPrototype.comp = LongPrototype.compare; +LongPrototype.negate = function negate() { + if (!this.unsigned && this.eq(MIN_VALUE)) return MIN_VALUE; + return this.not().add(ONE); +}; +LongPrototype.neg = LongPrototype.negate; +LongPrototype.add = function add(addend) { + if (!isLong(addend)) addend = fromValue(addend); + var a48 = this.high >>> 16; + var a32 = this.high & 65535; + var a16 = this.low >>> 16; + var a00 = this.low & 65535; + var b48 = addend.high >>> 16; + var b32 = addend.high & 65535; + var b16 = addend.low >>> 16; + var b00 = addend.low & 65535; + var c48 = 0, c32 = 0, c16 = 0, c00 = 0; + c00 += a00 + b00; + c16 += c00 >>> 16; + c00 &= 65535; + c16 += a16 + b16; + c32 += c16 >>> 16; + c16 &= 65535; + c32 += a32 + b32; + c48 += c32 >>> 16; + c32 &= 65535; + c48 += a48 + b48; + c48 &= 65535; + return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); +}; +LongPrototype.subtract = function subtract(subtrahend) { + if (!isLong(subtrahend)) subtrahend = fromValue(subtrahend); + return this.add(subtrahend.neg()); +}; +LongPrototype.sub = LongPrototype.subtract; +LongPrototype.multiply = function multiply(multiplier) { + if (this.isZero()) return this; + if (!isLong(multiplier)) multiplier = fromValue(multiplier); + if (wasm) { + var low = wasm["mul"](this.low, this.high, multiplier.low, multiplier.high); + return fromBits(low, wasm["get_high"](), this.unsigned); + } + if (multiplier.isZero()) return this.unsigned ? UZERO : ZERO; + if (this.eq(MIN_VALUE)) return multiplier.isOdd() ? MIN_VALUE : ZERO; + if (multiplier.eq(MIN_VALUE)) return this.isOdd() ? MIN_VALUE : ZERO; + if (this.isNegative()) { + if (multiplier.isNegative()) return this.neg().mul(multiplier.neg()); + else return this.neg().mul(multiplier).neg(); + } else if (multiplier.isNegative()) return this.mul(multiplier.neg()).neg(); + if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24)) + return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned); + var a48 = this.high >>> 16; + var a32 = this.high & 65535; + var a16 = this.low >>> 16; + var a00 = this.low & 65535; + var b48 = multiplier.high >>> 16; + var b32 = multiplier.high & 65535; + var b16 = multiplier.low >>> 16; + var b00 = multiplier.low & 65535; + var c48 = 0, c32 = 0, c16 = 0, c00 = 0; + c00 += a00 * b00; + c16 += c00 >>> 16; + c00 &= 65535; + c16 += a16 * b00; + c32 += c16 >>> 16; + c16 &= 65535; + c16 += a00 * b16; + c32 += c16 >>> 16; + c16 &= 65535; + c32 += a32 * b00; + c48 += c32 >>> 16; + c32 &= 65535; + c32 += a16 * b16; + c48 += c32 >>> 16; + c32 &= 65535; + c32 += a00 * b32; + c48 += c32 >>> 16; + c32 &= 65535; + c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48; + c48 &= 65535; + return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); +}; +LongPrototype.mul = LongPrototype.multiply; +LongPrototype.divide = function divide(divisor) { + if (!isLong(divisor)) divisor = fromValue(divisor); + if (divisor.isZero()) throw Error("division by zero"); + if (wasm) { + if (!this.unsigned && this.high === -2147483648 && divisor.low === -1 && divisor.high === -1) { + return this; + } + var low = (this.unsigned ? wasm["div_u"] : wasm["div_s"])( + this.low, + this.high, + divisor.low, + divisor.high + ); + return fromBits(low, wasm["get_high"](), this.unsigned); + } + if (this.isZero()) return this.unsigned ? UZERO : ZERO; + var approx, rem, res; + if (!this.unsigned) { + if (this.eq(MIN_VALUE)) { + if (divisor.eq(ONE) || divisor.eq(NEG_ONE)) + return MIN_VALUE; + else if (divisor.eq(MIN_VALUE)) return ONE; + else { + var halfThis = this.shr(1); + approx = halfThis.div(divisor).shl(1); + if (approx.eq(ZERO)) { + return divisor.isNegative() ? ONE : NEG_ONE; + } else { + rem = this.sub(divisor.mul(approx)); + res = approx.add(rem.div(divisor)); + return res; + } + } + } else if (divisor.eq(MIN_VALUE)) return this.unsigned ? UZERO : ZERO; + if (this.isNegative()) { + if (divisor.isNegative()) return this.neg().div(divisor.neg()); + return this.neg().div(divisor).neg(); + } else if (divisor.isNegative()) return this.div(divisor.neg()).neg(); + res = ZERO; + } else { + if (!divisor.unsigned) divisor = divisor.toUnsigned(); + if (divisor.gt(this)) return UZERO; + if (divisor.gt(this.shru(1))) + return UONE; + res = UZERO; + } + rem = this; + while (rem.gte(divisor)) { + approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber())); + var log2 = Math.ceil(Math.log(approx) / Math.LN2), delta = log2 <= 48 ? 1 : pow_dbl(2, log2 - 48), approxRes = fromNumber(approx), approxRem = approxRes.mul(divisor); + while (approxRem.isNegative() || approxRem.gt(rem)) { + approx -= delta; + approxRes = fromNumber(approx, this.unsigned); + approxRem = approxRes.mul(divisor); + } + if (approxRes.isZero()) approxRes = ONE; + res = res.add(approxRes); + rem = rem.sub(approxRem); + } + return res; +}; +LongPrototype.div = LongPrototype.divide; +LongPrototype.modulo = function modulo(divisor) { + if (!isLong(divisor)) divisor = fromValue(divisor); + if (wasm) { + var low = (this.unsigned ? wasm["rem_u"] : wasm["rem_s"])( + this.low, + this.high, + divisor.low, + divisor.high + ); + return fromBits(low, wasm["get_high"](), this.unsigned); + } + return this.sub(this.div(divisor).mul(divisor)); +}; +LongPrototype.mod = LongPrototype.modulo; +LongPrototype.rem = LongPrototype.modulo; +LongPrototype.not = function not() { + return fromBits(~this.low, ~this.high, this.unsigned); +}; +LongPrototype.countLeadingZeros = function countLeadingZeros() { + return this.high ? Math.clz32(this.high) : Math.clz32(this.low) + 32; +}; +LongPrototype.clz = LongPrototype.countLeadingZeros; +LongPrototype.countTrailingZeros = function countTrailingZeros() { + return this.low ? ctz32(this.low) : ctz32(this.high) + 32; +}; +LongPrototype.ctz = LongPrototype.countTrailingZeros; +LongPrototype.and = function and(other) { + if (!isLong(other)) other = fromValue(other); + return fromBits(this.low & other.low, this.high & other.high, this.unsigned); +}; +LongPrototype.or = function or(other) { + if (!isLong(other)) other = fromValue(other); + return fromBits(this.low | other.low, this.high | other.high, this.unsigned); +}; +LongPrototype.xor = function xor(other) { + if (!isLong(other)) other = fromValue(other); + return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned); +}; +LongPrototype.shiftLeft = function shiftLeft(numBits) { + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + else if (numBits < 32) + return fromBits( + this.low << numBits, + this.high << numBits | this.low >>> 32 - numBits, + this.unsigned + ); + else return fromBits(0, this.low << numBits - 32, this.unsigned); +}; +LongPrototype.shl = LongPrototype.shiftLeft; +LongPrototype.shiftRight = function shiftRight(numBits) { + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + else if (numBits < 32) + return fromBits( + this.low >>> numBits | this.high << 32 - numBits, + this.high >> numBits, + this.unsigned + ); + else + return fromBits( + this.high >> numBits - 32, + this.high >= 0 ? 0 : -1, + this.unsigned + ); +}; +LongPrototype.shr = LongPrototype.shiftRight; +LongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) { + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + if (numBits < 32) + return fromBits( + this.low >>> numBits | this.high << 32 - numBits, + this.high >>> numBits, + this.unsigned + ); + if (numBits === 32) return fromBits(this.high, 0, this.unsigned); + return fromBits(this.high >>> numBits - 32, 0, this.unsigned); +}; +LongPrototype.shru = LongPrototype.shiftRightUnsigned; +LongPrototype.shr_u = LongPrototype.shiftRightUnsigned; +LongPrototype.rotateLeft = function rotateLeft(numBits) { + var b; + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); + if (numBits < 32) { + b = 32 - numBits; + return fromBits( + this.low << numBits | this.high >>> b, + this.high << numBits | this.low >>> b, + this.unsigned + ); + } + numBits -= 32; + b = 32 - numBits; + return fromBits( + this.high << numBits | this.low >>> b, + this.low << numBits | this.high >>> b, + this.unsigned + ); +}; +LongPrototype.rotl = LongPrototype.rotateLeft; +LongPrototype.rotateRight = function rotateRight(numBits) { + var b; + if (isLong(numBits)) numBits = numBits.toInt(); + if ((numBits &= 63) === 0) return this; + if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); + if (numBits < 32) { + b = 32 - numBits; + return fromBits( + this.high << b | this.low >>> numBits, + this.low << b | this.high >>> numBits, + this.unsigned + ); + } + numBits -= 32; + b = 32 - numBits; + return fromBits( + this.low << b | this.high >>> numBits, + this.high << b | this.low >>> numBits, + this.unsigned + ); +}; +LongPrototype.rotr = LongPrototype.rotateRight; +LongPrototype.toSigned = function toSigned() { + if (!this.unsigned) return this; + return fromBits(this.low, this.high, false); +}; +LongPrototype.toUnsigned = function toUnsigned() { + if (this.unsigned) return this; + return fromBits(this.low, this.high, true); +}; +LongPrototype.toBytes = function toBytes(le) { + return le ? this.toBytesLE() : this.toBytesBE(); +}; +LongPrototype.toBytesLE = function toBytesLE() { + var hi = this.high, lo = this.low; + return [ + lo & 255, + lo >>> 8 & 255, + lo >>> 16 & 255, + lo >>> 24, + hi & 255, + hi >>> 8 & 255, + hi >>> 16 & 255, + hi >>> 24 + ]; +}; +LongPrototype.toBytesBE = function toBytesBE() { + var hi = this.high, lo = this.low; + return [ + hi >>> 24, + hi >>> 16 & 255, + hi >>> 8 & 255, + hi & 255, + lo >>> 24, + lo >>> 16 & 255, + lo >>> 8 & 255, + lo & 255 + ]; +}; +Long.fromBytes = function fromBytes(bytes, unsigned, le) { + return le ? Long.fromBytesLE(bytes, unsigned) : Long.fromBytesBE(bytes, unsigned); +}; +Long.fromBytesLE = function fromBytesLE(bytes, unsigned) { + return new Long( + bytes[0] | bytes[1] << 8 | bytes[2] << 16 | bytes[3] << 24, + bytes[4] | bytes[5] << 8 | bytes[6] << 16 | bytes[7] << 24, + unsigned + ); +}; +Long.fromBytesBE = function fromBytesBE(bytes, unsigned) { + return new Long( + bytes[4] << 24 | bytes[5] << 16 | bytes[6] << 8 | bytes[7], + bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], + unsigned + ); +}; +if (typeof BigInt === "function") { + Long.fromBigInt = function fromBigInt(value, unsigned) { + var lowBits = Number(BigInt.asIntN(32, value)); + var highBits = Number(BigInt.asIntN(32, value >> BigInt(32))); + return fromBits(lowBits, highBits, unsigned); + }; + Long.fromValue = function fromValueWithBigInt(value, unsigned) { + if (typeof value === "bigint") return Long.fromBigInt(value, unsigned); + return fromValue(value, unsigned); + }; + LongPrototype.toBigInt = function toBigInt() { + var lowBigInt = BigInt(this.low >>> 0); + var highBigInt = BigInt(this.unsigned ? this.high >>> 0 : this.high); + return highBigInt << BigInt(32) | lowBigInt; + }; +} +var long_default = Long; + +// src/fingerprints.ts +var tab = " ".charCodeAt(0); +var space = " ".charCodeAt(0); +var lf = "\n".charCodeAt(0); +var cr = "\r".charCodeAt(0); +var EOF = 65535; +var BLOCK_SIZE = 100; +var MOD = long_default.fromInt(37); +function computeFirstMod() { + let firstMod = long_default.ONE; + for (let i = 0; i < BLOCK_SIZE; i++) { + firstMod = firstMod.multiply(MOD); + } + return firstMod; +} +async function hash(callback, filepath) { + const window2 = Array(BLOCK_SIZE).fill(0); + const lineNumbers = Array(BLOCK_SIZE).fill(-1); + let hashRaw = long_default.ZERO; + const firstMod = computeFirstMod(); + let index = 0; + let lineNumber = 0; + let lineStart = true; + let prevCR = false; + const hashCounts = {}; + const outputHash = function() { + const hashValue = hashRaw.toUnsigned().toString(16); + if (!hashCounts[hashValue]) { + hashCounts[hashValue] = 0; + } + hashCounts[hashValue]++; + callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`); + lineNumbers[index] = -1; + }; + const updateHash = function(current) { + const begin = window2[index]; + window2[index] = current; + hashRaw = MOD.multiply(hashRaw).add(long_default.fromInt(current)).subtract(firstMod.multiply(long_default.fromInt(begin))); + index = (index + 1) % BLOCK_SIZE; + }; + const processCharacter = function(current) { + if (current === space || current === tab || prevCR && current === lf) { + prevCR = false; + return; + } + if (current === cr) { + current = lf; + prevCR = true; + } else { + prevCR = false; + } + if (lineNumbers[index] !== -1) { + outputHash(); + } + if (lineStart) { + lineStart = false; + lineNumber++; + lineNumbers[index] = lineNumber; + } + if (current === lf) { + lineStart = true; + } + updateHash(current); + }; + const readStream = fs7.createReadStream(filepath, "utf8"); + for await (const data of readStream) { + for (let i = 0; i < data.length; ++i) { + processCharacter(data.charCodeAt(i)); + } + } + processCharacter(EOF); + for (let i = 0; i < BLOCK_SIZE; i++) { + if (lineNumbers[index] !== -1) { + outputHash(); + } + updateHash(0); + } +} +function locationUpdateCallback(result, location, logger) { + let locationStartLine = location.physicalLocation?.region?.startLine; + if (locationStartLine === void 0) { + locationStartLine = 1; + } + return function(lineNumber, hashValue) { + if (locationStartLine !== lineNumber) { + return; + } + if (!result.partialFingerprints) { + result.partialFingerprints = {}; + } + const existingFingerprint = result.partialFingerprints.primaryLocationLineHash; + if (!existingFingerprint) { + result.partialFingerprints.primaryLocationLineHash = hashValue; + } else if (existingFingerprint !== hashValue) { + logger.warning( + `Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}` + ); + } + }; +} +function resolveUriToFile(location, artifacts, sourceRoot, logger) { + if (!location.uri && location.index !== void 0) { + if (typeof location.index !== "number" || location.index < 0 || location.index >= artifacts.length || typeof artifacts[location.index].location !== "object") { + logger.debug(`Ignoring location as index "${location.index}" is invalid`); + return void 0; + } + location = artifacts[location.index].location; + } + if (typeof location.uri !== "string") { + logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); + return void 0; + } + let uri; + try { + uri = decodeURIComponent(location.uri); + } catch { + logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); + return void 0; + } + const fileUriPrefix = "file://"; + if (uri.startsWith(fileUriPrefix)) { + uri = uri.substring(fileUriPrefix.length); + } + if (uri.indexOf("://") !== -1) { + logger.debug( + `Ignoring location URI "${uri}" as the scheme is not recognised` + ); + return void 0; + } + const srcRootPrefix = `${sourceRoot}/`; + if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) { + logger.debug( + `Ignoring location URI "${uri}" as it is outside of the src root` + ); + return void 0; + } + if (!import_path2.default.isAbsolute(uri)) { + uri = srcRootPrefix + uri; + } + if (!fs7.existsSync(uri)) { + logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); + return void 0; + } + if (fs7.statSync(uri).isDirectory()) { + logger.debug(`Unable to compute fingerprint for directory: ${uri}`); + return void 0; + } + return uri; +} +async function addFingerprints(sarif, sourceRoot, logger) { + logger.info( + `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` + ); + const callbacksByFile = {}; + for (const run2 of sarif.runs || []) { + const artifacts = run2.artifacts || []; + for (const result of run2.results || []) { + const primaryLocation = (result.locations || [])[0]; + if (!primaryLocation?.physicalLocation?.artifactLocation) { + logger.debug( + `Unable to compute fingerprint for invalid location: ${JSON.stringify( + primaryLocation + )}` + ); + continue; + } + if (primaryLocation?.physicalLocation?.region?.startLine === void 0) { + continue; + } + const filepath = resolveUriToFile( + primaryLocation.physicalLocation.artifactLocation, + artifacts, + sourceRoot, + logger + ); + if (!filepath) { + continue; + } + if (!callbacksByFile[filepath]) { + callbacksByFile[filepath] = []; + } + callbacksByFile[filepath].push( + locationUpdateCallback(result, primaryLocation, logger) + ); + } + } + for (const [filepath, callbacks] of Object.entries(callbacksByFile)) { + const teeCallback = function(lineNumber, hashValue) { + for (const c of Object.values(callbacks)) { + c(lineNumber, hashValue); + } + }; + await hash(teeCallback, filepath); + } + return sarif; +} + +// src/init.ts +var toolrunner4 = __toESM(require_toolrunner()); +var io5 = __toESM(require_io()); + +// src/codeql.ts +var fs11 = __toESM(require("fs")); +var path11 = __toESM(require("path")); +var core11 = __toESM(require_core()); +var toolrunner3 = __toESM(require_toolrunner()); + +// src/cli-errors.ts +var SUPPORTED_PLATFORMS = [ + ["linux", "x64"], + ["win32", "x64"], + ["darwin", "x64"], + ["darwin", "arm64"] +]; +var CliError = class extends Error { + exitCode; + stderr; + constructor({ cmd, args, exitCode, stderr }) { + const prettyCommand = prettyPrintInvocation(cmd, args); + const fatalErrors = extractFatalErrors(stderr); + const autobuildErrors = extractAutobuildErrors(stderr); + let message; + if (fatalErrors) { + message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and error was: ${ensureEndsInPeriod( + fatalErrors.trim() + )} See the logs for more details.`; + } else if (autobuildErrors) { + message = `We were unable to automatically build your code. Please provide manual build steps. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information. Encountered the following error: ${autobuildErrors}`; + } else { + const lastLine = ensureEndsInPeriod( + stderr.trim().split("\n").pop()?.trim() || "n/a" + ); + message = `Encountered a fatal error while running "${prettyCommand}". Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`; + } + super(message); + this.exitCode = exitCode; + this.stderr = stderr; + } +}; +function extractFatalErrors(error3) { + const fatalErrorRegex = /.*fatal (internal )?error occurr?ed(. Details)?:/gi; + let fatalErrors = []; + let lastFatalErrorIndex; + let match; + while ((match = fatalErrorRegex.exec(error3)) !== null) { + if (lastFatalErrorIndex !== void 0) { + fatalErrors.push(error3.slice(lastFatalErrorIndex, match.index).trim()); + } + lastFatalErrorIndex = match.index; + } + if (lastFatalErrorIndex !== void 0) { + const lastError = error3.slice(lastFatalErrorIndex).trim(); + if (fatalErrors.length === 0) { + return lastError; + } + const isOneLiner = !fatalErrors.some((e) => e.includes("\n")); + if (isOneLiner) { + fatalErrors = fatalErrors.map(ensureEndsInPeriod); + } + return [ + ensureEndsInPeriod(lastError), + "Context:", + ...fatalErrors.reverse() + ].join(isOneLiner ? " " : "\n"); + } + return void 0; +} +function extractAutobuildErrors(error3) { + const pattern = /.*\[autobuild\] \[ERROR\] (.*)/gi; + let errorLines = [...error3.matchAll(pattern)].map((match) => match[1]); + if (errorLines.length > 10) { + errorLines = errorLines.slice(0, 10); + errorLines.push("(truncated)"); + } + return errorLines.join("\n") || void 0; +} +var cliErrorsConfig = { + ["AutobuildError" /* AutobuildError */]: { + cliErrorMessageCandidates: [ + new RegExp("We were unable to automatically build your code") + ] + }, + ["CouldNotCreateTempDir" /* CouldNotCreateTempDir */]: { + cliErrorMessageCandidates: [new RegExp("Could not create temp directory")] + }, + ["ExternalRepositoryCloneFailed" /* ExternalRepositoryCloneFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("Failed to clone external Git repository") + ] + }, + ["GradleBuildFailed" /* GradleBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception.") + ] + }, + // Version of CodeQL CLI is incompatible with this version of the CodeQL Action + ["IncompatibleWithActionVersion" /* IncompatibleWithActionVersion */]: { + cliErrorMessageCandidates: [ + new RegExp("is not compatible with this CodeQL CLI") + ] + }, + ["InitCalledTwice" /* InitCalledTwice */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Refusing to create databases .* but could not process any of it" + ) + ], + additionalErrorMessageToAppend: `Is the "init" action called twice in the same job?` + }, + ["InvalidConfigFile" /* InvalidConfigFile */]: { + cliErrorMessageCandidates: [ + new RegExp("Config file .* is not valid"), + new RegExp("The supplied config file is empty") + ] + }, + ["InvalidExternalRepoSpecifier" /* InvalidExternalRepoSpecifier */]: { + cliErrorMessageCandidates: [ + new RegExp("Specifier for external repository is invalid") + ] + }, + // Expected source location for database creation does not exist + ["InvalidSourceRoot" /* InvalidSourceRoot */]: { + cliErrorMessageCandidates: [new RegExp("Invalid source root")] + }, + ["MavenBuildFailed" /* MavenBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp("\\[autobuild\\] \\[ERROR\\] Failed to execute goal") + ] + }, + ["NoBuildCommandAutodetected" /* NoBuildCommandAutodetected */]: { + cliErrorMessageCandidates: [ + new RegExp("Could not auto-detect a suitable build method") + ] + }, + ["NoBuildMethodAutodetected" /* NoBuildMethodAutodetected */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Could not detect a suitable build command for the source checkout" + ) + ] + }, + // Usually when a manual build script has failed, or if an autodetected language + // was unintended to have CodeQL analysis run on it. + ["NoSourceCodeSeen" /* NoSourceCodeSeen */]: { + exitCode: 32, + cliErrorMessageCandidates: [ + new RegExp( + "CodeQL detected code written in .* but could not process any of it" + ), + new RegExp( + "CodeQL did not detect any code written in languages supported by CodeQL" + ) + ] + }, + ["NoSupportedBuildCommandSucceeded" /* NoSupportedBuildCommandSucceeded */]: { + cliErrorMessageCandidates: [ + new RegExp("No supported build command succeeded") + ] + }, + ["NoSupportedBuildSystemDetected" /* NoSupportedBuildSystemDetected */]: { + cliErrorMessageCandidates: [ + new RegExp("No supported build system detected") + ] + }, + ["OutOfMemoryOrDisk" /* OutOfMemoryOrDisk */]: { + cliErrorMessageCandidates: [ + new RegExp("CodeQL is out of memory."), + new RegExp("out of disk"), + new RegExp("No space left on device") + ], + additionalErrorMessageToAppend: "For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory" + }, + ["PackCannotBeFound" /* PackCannotBeFound */]: { + cliErrorMessageCandidates: [ + new RegExp( + "Query pack .* cannot be found\\. Check the spelling of the pack\\." + ), + new RegExp( + "is not a .ql file, .qls file, a directory, or a query pack specification." + ) + ] + }, + ["PackMissingAuth" /* PackMissingAuth */]: { + cliErrorMessageCandidates: [ + new RegExp("GitHub Container registry .* 403 Forbidden"), + new RegExp( + "Do you need to specify a token to authenticate to the registry?" + ) + ] + }, + ["SwiftBuildFailed" /* SwiftBuildFailed */]: { + cliErrorMessageCandidates: [ + new RegExp( + "\\[autobuilder/build\\] \\[build-command-failed\\] `autobuild` failed to run the build command" + ) + ] + }, + ["UnsupportedBuildMode" /* UnsupportedBuildMode */]: { + cliErrorMessageCandidates: [ + new RegExp( + "does not support the .* build mode. Please try using one of the following build modes instead" + ) + ] + }, + ["NotFoundInRegistry" /* NotFoundInRegistry */]: { + cliErrorMessageCandidates: [ + new RegExp("'.*' not found in the registry '.*'") + ] + } +}; +function getCliConfigCategoryIfExists(cliError) { + for (const [category, configuration] of Object.entries(cliErrorsConfig)) { + if (cliError.exitCode !== void 0 && configuration.exitCode !== void 0 && cliError.exitCode === configuration.exitCode) { + return category; + } + for (const e of configuration.cliErrorMessageCandidates) { + if (cliError.message.match(e) || cliError.stderr.match(e)) { + return category; + } + } + } + return void 0; +} +function isUnsupportedPlatform() { + return !SUPPORTED_PLATFORMS.some( + ([platform, arch2]) => platform === process.platform && arch2 === process.arch + ); +} +function getUnsupportedPlatformError(cliError) { + return new ConfigurationError( + `The CodeQL CLI does not support the platform/architecture combination of ${process.platform}/${process.arch} (see ${"https://codeql.github.com/docs/codeql-overview/system-requirements/" /* SYSTEM_REQUIREMENTS */}). The underlying error was: ${cliError.message}` + ); +} +function wrapCliConfigurationError(cliError) { + if (isUnsupportedPlatform()) { + return getUnsupportedPlatformError(cliError); + } + const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError); + if (cliConfigErrorCategory === void 0) { + return cliError; + } + let errorMessageBuilder = cliError.message; + const additionalErrorMessageToAppend = cliErrorsConfig[cliConfigErrorCategory].additionalErrorMessageToAppend; + if (additionalErrorMessageToAppend !== void 0) { + errorMessageBuilder = `${errorMessageBuilder} ${additionalErrorMessageToAppend}`; + } + return new ConfigurationError(errorMessageBuilder); +} + +// src/setup-codeql.ts +var fs10 = __toESM(require("fs")); +var path10 = __toESM(require("path")); +var toolcache3 = __toESM(require_tool_cache()); +var import_fast_deep_equal = __toESM(require_fast_deep_equal()); +var semver8 = __toESM(require_semver2()); + +// node_modules/uuid/dist-node/stringify.js +var byteToHex = []; +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 256).toString(16).slice(1)); +} +function unsafeStringify(arr, offset = 0) { + return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +} + +// node_modules/uuid/dist-node/rng.js +var import_node_crypto = require("node:crypto"); +var rnds8Pool = new Uint8Array(256); +var poolPtr = rnds8Pool.length; +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + (0, import_node_crypto.randomFillSync)(rnds8Pool); + poolPtr = 0; + } + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +// node_modules/uuid/dist-node/native.js +var import_node_crypto2 = require("node:crypto"); +var native_default = { randomUUID: import_node_crypto2.randomUUID }; + +// node_modules/uuid/dist-node/v4.js +function _v4(options, buf, offset) { + options = options || {}; + const rnds = options.random ?? options.rng?.() ?? rng(); + if (rnds.length < 16) { + throw new Error("Random bytes length must be >= 16"); + } + rnds[6] = rnds[6] & 15 | 64; + rnds[8] = rnds[8] & 63 | 128; + if (buf) { + offset = offset || 0; + if (offset < 0 || offset + 16 > buf.length) { + throw new RangeError(`UUID byte range ${offset}:${offset + 15} is out of buffer bounds`); + } + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + return buf; + } + return unsafeStringify(rnds); +} +function v4(options, buf, offset) { + if (native_default.randomUUID && !buf && !options) { + return native_default.randomUUID(); + } + return _v4(options, buf, offset); +} +var v4_default = v4; + +// src/tar.ts +var import_child_process = require("child_process"); +var fs8 = __toESM(require("fs")); +var stream = __toESM(require("stream")); +var import_toolrunner = __toESM(require_toolrunner()); +var io4 = __toESM(require_io()); +var toolcache = __toESM(require_tool_cache()); +var semver6 = __toESM(require_semver2()); +var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3"; +var MIN_REQUIRED_GNU_TAR_VERSION = "1.31"; +async function getTarVersion() { + const tar = await io4.which("tar", true); + let stdout = ""; + const exitCode = await new import_toolrunner.ToolRunner(tar, ["--version"], { + listeners: { + stdout: (data) => { + stdout += data.toString(); + } + } + }).exec(); + if (exitCode !== 0) { + throw new Error("Failed to call tar --version"); + } + if (stdout.includes("GNU tar")) { + const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/); + if (!match?.[1]) { + throw new Error("Failed to parse output of tar --version."); + } + return { type: "gnu", version: match[1] }; + } else if (stdout.includes("bsdtar")) { + const match = stdout.match(/bsdtar ([0-9.]+)/); + if (!match?.[1]) { + throw new Error("Failed to parse output of tar --version."); + } + return { type: "bsd", version: match[1] }; + } else { + throw new Error("Unknown tar version"); + } +} +async function isZstdAvailable(logger) { + const foundZstdBinary = await isBinaryAccessible("zstd", logger); + try { + const tarVersion = await getTarVersion(); + const { type: type2, version } = tarVersion; + logger.info(`Found ${type2} tar version ${version}.`); + switch (type2) { + case "gnu": + return { + available: foundZstdBinary && // GNU tar only uses major and minor version numbers + semver6.gte( + semver6.coerce(version), + semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION) + ), + foundZstdBinary, + version: tarVersion + }; + case "bsd": + return { + available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain + // a patch version number. + semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION), + foundZstdBinary, + version: tarVersion + }; + default: + assertNever(type2); + } + } catch (e) { + logger.warning( + `Failed to determine tar version, therefore will assume zstd is not available. The underlying error was: ${e}` + ); + return { available: false, foundZstdBinary }; + } +} +async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { + fs8.mkdirSync(dest, { recursive: true }); + switch (compressionMethod) { + case "gzip": + return await toolcache.extractTar(tarPath, dest); + case "zstd": { + if (!tarVersion) { + throw new Error( + "Could not determine tar version, which is required to extract a Zstandard archive." + ); + } + await extractTarZst(tarPath, dest, tarVersion, logger); + return dest; + } + } +} +async function extractTarZst(tar, dest, tarVersion, logger) { + logger.debug( + `Extracting to ${dest}.${tar instanceof stream.Readable ? ` Input stream has high water mark ${tar.readableHighWaterMark}.` : ""}` + ); + try { + const args = ["-x", "--zstd", "--ignore-zeros"]; + if (tarVersion.type === "gnu") { + args.push("--warning=no-unknown-keyword"); + args.push("--overwrite"); + } + args.push("-f", tar instanceof stream.Readable ? "-" : tar, "-C", dest); + process.stdout.write(`[command]tar ${args.join(" ")} +`); + await new Promise((resolve6, reject) => { + const tarProcess = (0, import_child_process.spawn)("tar", args, { stdio: "pipe" }); + let stdout = ""; + tarProcess.stdout?.on("data", (data) => { + stdout += data.toString(); + process.stdout.write(data); + }); + let stderr = ""; + tarProcess.stderr?.on("data", (data) => { + stderr += data.toString(); + process.stdout.write(data); + }); + tarProcess.on("error", (err) => { + reject(new Error(`Error while extracting tar: ${err}`)); + }); + if (tar instanceof stream.Readable) { + tar.pipe(tarProcess.stdin).on("error", (err) => { + reject( + new Error(`Error while downloading and extracting tar: ${err}`) + ); + }); + } + tarProcess.on("exit", (code) => { + if (code !== 0) { + reject( + new CommandInvocationError( + "tar", + args, + code ?? void 0, + stdout, + stderr + ) + ); + } + resolve6(); + }); + }); + } catch (e) { + await cleanUpPath(dest, "extraction destination directory", logger); + throw e; + } +} +var KNOWN_EXTENSIONS = { + "tar.gz": "gzip", + "tar.zst": "zstd" +}; +function inferCompressionMethod(tarPath) { + for (const [ext, method] of Object.entries(KNOWN_EXTENSIONS)) { + if (tarPath.endsWith(`.${ext}`)) { + return method; + } + } + return void 0; } -Long.prototype.__isLong__; -Object.defineProperty(Long.prototype, "__isLong__", { value: true }); -function isLong(obj) { - return (obj && obj["__isLong__"]) === true; + +// src/tools-download.ts +var fs9 = __toESM(require("fs")); +var os2 = __toESM(require("os")); +var path9 = __toESM(require("path")); +var import_perf_hooks = require("perf_hooks"); +var core10 = __toESM(require_core()); +var import_http_client = __toESM(require_lib()); +var toolcache2 = __toESM(require_tool_cache()); +var import_follow_redirects = __toESM(require_follow_redirects()); +var semver7 = __toESM(require_semver2()); +var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; +var TOOLCACHE_TOOL_NAME = "CodeQL"; +function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) { + return { + combinedDurationMs: downloadDurationMs + extractionDurationMs, + downloadDurationMs, + extractionDurationMs, + streamExtraction: false + }; } -function ctz32(value) { - var c = Math.clz32(value & -value); - return value ? 31 - c : c; +function makeStreamedToolsDownloadDurations(combinedDurationMs) { + return { + combinedDurationMs, + downloadDurationMs: void 0, + extractionDurationMs: void 0, + streamExtraction: true + }; } -Long.isLong = isLong; -var INT_CACHE = {}; -var UINT_CACHE = {}; -function fromInt(value, unsigned) { - var obj, cachedObj, cache; - if (unsigned) { - value >>>= 0; - if (cache = 0 <= value && value < 256) { - cachedObj = UINT_CACHE[value]; - if (cachedObj) return cachedObj; +async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorization, headers, tarVersion, logger) { + logger.info( + `Downloading CodeQL tools from ${codeqlURL} . This may take a while.` + ); + try { + if (compressionMethod === "zstd" && process.platform === "linux") { + logger.info(`Streaming the extraction of the CodeQL bundle.`); + const toolsInstallStart = import_perf_hooks.performance.now(); + await downloadAndExtractZstdWithStreaming( + codeqlURL, + dest, + authorization, + headers, + tarVersion, + logger + ); + const combinedDurationMs = Math.round( + import_perf_hooks.performance.now() - toolsInstallStart + ); + logger.info( + `Finished downloading and extracting CodeQL bundle to ${dest} (${formatDuration( + combinedDurationMs + )}).` + ); + return { + compressionMethod, + toolsUrl: sanitizeUrlForStatusReport(codeqlURL), + ...makeStreamedToolsDownloadDurations(combinedDurationMs) + }; } - obj = fromBits(value, 0, true); - if (cache) UINT_CACHE[value] = obj; - return obj; + } catch (e) { + core10.warning( + `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` + ); + core10.warning(`Falling back to downloading the bundle before extracting.`); + await cleanUpPath(dest, "CodeQL bundle", logger); + } + const toolsDownloadStart = import_perf_hooks.performance.now(); + const archivedBundlePath = await toolcache2.downloadTool( + codeqlURL, + void 0, + authorization, + headers + ); + const downloadDurationMs = Math.round(import_perf_hooks.performance.now() - toolsDownloadStart); + logger.info( + `Finished downloading CodeQL bundle to ${archivedBundlePath} (${formatDuration( + downloadDurationMs + )}).` + ); + let extractionDurationMs; + try { + logger.info("Extracting CodeQL bundle."); + const extractionStart = import_perf_hooks.performance.now(); + await extract( + archivedBundlePath, + dest, + compressionMethod, + tarVersion, + logger + ); + extractionDurationMs = Math.round(import_perf_hooks.performance.now() - extractionStart); + logger.info( + `Finished extracting CodeQL bundle to ${dest} (${formatDuration( + extractionDurationMs + )}).` + ); + } finally { + await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger); + } + return { + compressionMethod, + toolsUrl: sanitizeUrlForStatusReport(codeqlURL), + ...makeDownloadFirstToolsDownloadDurations( + downloadDurationMs, + extractionDurationMs + ) + }; +} +async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { + fs9.mkdirSync(dest, { recursive: true }); + const agent = new import_http_client.HttpClient().getAgent(codeqlURL); + headers = Object.assign( + { "User-Agent": "CodeQL Action" }, + authorization ? { authorization } : {}, + headers + ); + const response = await new Promise( + (resolve6) => import_follow_redirects.https.get( + codeqlURL, + { + headers, + // Increase the high water mark to improve performance. + highWaterMark: STREAMING_HIGH_WATERMARK_BYTES, + // Use the agent to respect proxy settings. + agent + }, + (r) => resolve6(r) + ) + ); + if (response.statusCode !== 200) { + throw new Error( + `Failed to download CodeQL bundle from ${codeqlURL}. HTTP status code: ${response.statusCode}.` + ); + } + await extractTarZst(response, dest, tarVersion, logger); +} +function getToolcacheDirectory(version) { + return path9.join( + getRequiredEnvParam("RUNNER_TOOL_CACHE"), + TOOLCACHE_TOOL_NAME, + semver7.clean(version) || version, + os2.arch() || "" + ); +} +function writeToolcacheMarkerFile(extractedPath, logger) { + const markerFilePath = `${extractedPath}.complete`; + fs9.writeFileSync(markerFilePath, ""); + logger.info(`Created toolcache marker file ${markerFilePath}`); +} +function sanitizeUrlForStatusReport(url2) { + return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some( + (repo) => url2.startsWith(`https://github.com/${repo}/releases/download/`) + ) ? url2 : "sanitized-value"; +} + +// src/setup-codeql.ts +var CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action"; +var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing"; +var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies"; +var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"]; +var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"]; +var CODEQL_TOOLCACHE_INPUT = "toolcache"; +function getCodeQLBundleExtension(compressionMethod) { + switch (compressionMethod) { + case "gzip": + return ".tar.gz"; + case "zstd": + return ".tar.zst"; + default: + assertNever(compressionMethod); + } +} +function getCodeQLBundleName(compressionMethod) { + const extension = getCodeQLBundleExtension(compressionMethod); + let platform; + if (process.platform === "win32") { + platform = "win64"; + } else if (process.platform === "linux") { + platform = "linux64"; + } else if (process.platform === "darwin") { + platform = "osx64"; } else { - value |= 0; - if (cache = -128 <= value && value < 128) { - cachedObj = INT_CACHE[value]; - if (cachedObj) return cachedObj; + return `codeql-bundle${extension}`; + } + return `codeql-bundle-${platform}${extension}`; +} +function getCodeQLActionRepository(logger) { + if (isRunningLocalAction()) { + logger.info( + "The CodeQL Action is checked out locally. Using the default CodeQL Action repository." + ); + return CODEQL_DEFAULT_ACTION_REPOSITORY; + } + return getRequiredEnvParam("GITHUB_ACTION_REPOSITORY"); +} +async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod, logger) { + const codeQLActionRepository = getCodeQLActionRepository(logger); + const potentialDownloadSources = [ + // This GitHub instance, and this Action. + [apiDetails.url, codeQLActionRepository], + // This GitHub instance, and the canonical Action. + [apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY], + // GitHub.com, and the canonical Action. + [GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY] + ]; + const uniqueDownloadSources = potentialDownloadSources.filter( + (source, index, self2) => { + return !self2.slice(0, index).some((other) => (0, import_fast_deep_equal.default)(source, other)); + } + ); + const codeQLBundleName = getCodeQLBundleName(compressionMethod); + for (const downloadSource of uniqueDownloadSources) { + const [apiURL, repository] = downloadSource; + if (apiURL === GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) { + break; + } + const [repositoryOwner, repositoryName] = repository.split("/"); + try { + const release2 = await getApiClient().rest.repos.getReleaseByTag({ + owner: repositoryOwner, + repo: repositoryName, + tag: tagName + }); + for (const asset of release2.data.assets) { + if (asset.name === codeQLBundleName) { + logger.info( + `Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.` + ); + return asset.url; + } + } + } catch (e) { + logger.info( + `Looked for CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} but got error ${e}.` + ); } - obj = fromBits(value, value < 0 ? -1 : 0, false); - if (cache) INT_CACHE[value] = obj; - return obj; } + return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`; } -Long.fromInt = fromInt; -function fromNumber(value, unsigned) { - if (isNaN(value)) return unsigned ? UZERO : ZERO; - if (unsigned) { - if (value < 0) return UZERO; - if (value >= TWO_PWR_64_DBL) return MAX_UNSIGNED_VALUE; - } else { - if (value <= -TWO_PWR_63_DBL) return MIN_VALUE; - if (value + 1 >= TWO_PWR_63_DBL) return MAX_VALUE; +function tryGetBundleVersionFromTagName(tagName, logger) { + const match = tagName.match(/^codeql-bundle-(.*)$/); + if (match === null || match.length < 2) { + logger.debug(`Could not determine bundle version from tag ${tagName}.`); + return void 0; } - if (value < 0) return fromNumber(-value, unsigned).neg(); - return fromBits( - value % TWO_PWR_32_DBL | 0, - value / TWO_PWR_32_DBL | 0, - unsigned - ); + return match[1]; } -Long.fromNumber = fromNumber; -function fromBits(lowBits, highBits, unsigned) { - return new Long(lowBits, highBits, unsigned); +function tryGetTagNameFromUrl(url2, logger) { + const matches = [...url2.matchAll(/\/(codeql-bundle-[^/]*)\//g)]; + if (matches.length === 0) { + logger.debug(`Could not determine tag name for URL ${url2}.`); + return void 0; + } + const match = matches[matches.length - 1]; + if (match?.length !== 2) { + logger.debug( + `Could not determine tag name for URL ${url2}. Matched ${JSON.stringify( + match + )}.` + ); + return void 0; + } + return match[1]; } -Long.fromBits = fromBits; -var pow_dbl = Math.pow; -function fromString(str2, unsigned, radix) { - if (str2.length === 0) throw Error("empty string"); - if (typeof unsigned === "number") { - radix = unsigned; - unsigned = false; +function convertToSemVer(version, logger) { + if (!semver8.valid(version)) { + logger.debug( + `Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.` + ); + version = `0.0.0-${version}`; + } + const s = semver8.clean(version); + if (!s) { + throw new Error(`Bundle version ${version} is not in SemVer format.`); + } + return s; +} +async function findOverridingToolsInCache(humanReadableVersion, logger) { + const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ + folder: toolcache3.find("CodeQL", version), + version + })).filter(({ folder }) => fs10.existsSync(path10.join(folder, "pinned-version"))); + if (candidates.length === 1) { + const candidate = candidates[0]; + logger.debug( + `CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.` + ); + return { + codeqlFolder: candidate.folder, + sourceType: "toolcache", + toolsVersion: candidate.version + }; + } else if (candidates.length === 0) { + logger.debug( + "Did not find any candidate pinned versions of the CodeQL tools in the toolcache." + ); } else { - unsigned = !!unsigned; + logger.debug( + "Could not use CodeQL tools from the toolcache since more than one candidate pinned version was found in the toolcache." + ); } - if (str2 === "NaN" || str2 === "Infinity" || str2 === "+Infinity" || str2 === "-Infinity") - return unsigned ? UZERO : ZERO; - radix = radix || 10; - if (radix < 2 || 36 < radix) throw RangeError("radix"); - var p; - if ((p = str2.indexOf("-")) > 0) throw Error("interior hyphen"); - else if (p === 0) { - return fromString(str2.substring(1), unsigned, radix).neg(); + return void 0; +} +async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) { + if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) { + logger.info(`Using CodeQL CLI from local path ${toolsInput}`); + const compressionMethod2 = inferCompressionMethod(toolsInput); + if (compressionMethod2 === void 0) { + throw new ConfigurationError( + `Could not infer compression method from path ${toolsInput}. Please specify a path ending in '.tar.gz' or '.tar.zst'.` + ); + } + return { + codeqlTarPath: toolsInput, + compressionMethod: compressionMethod2, + sourceType: "local", + toolsVersion: "local" + }; } - var radixToPower = fromNumber(pow_dbl(radix, 8)); - var result = ZERO; - for (var i = 0; i < str2.length; i += 8) { - var size = Math.min(8, str2.length - i), value = parseInt(str2.substring(i, i + size), radix); - if (size < 8) { - var power = fromNumber(pow_dbl(radix, size)); - result = result.mul(power).add(fromNumber(value)); + let cliVersion2; + let tagName; + let url2; + const canForceNightlyWithFF = isDynamicWorkflow() || isInTestMode(); + const forceNightlyValueFF = await features.getValue("force_nightly" /* ForceNightly */); + const forceNightly = forceNightlyValueFF && canForceNightlyWithFF; + const nightlyRequestedByToolsInput = toolsInput !== void 0 && CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput); + if (forceNightly || nightlyRequestedByToolsInput) { + if (forceNightly) { + logger.info( + `Using the latest CodeQL CLI nightly, as forced by the ${"force_nightly" /* ForceNightly */} feature flag.` + ); + addNoLanguageDiagnostic( + void 0, + makeDiagnostic( + "codeql-action/forced-nightly-cli", + "A nightly release of CodeQL was used", + { + markdownMessage: "GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\nNightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\nIf use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.", + visibility: { + cliSummaryTable: true, + statusPage: true, + telemetry: true + }, + severity: "note" + } + ) + ); } else { - result = result.mul(radixToPower); - result = result.add(fromNumber(value)); + logger.info( + `Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.` + ); } + toolsInput = await getNightlyToolsUrl(logger); } - result.unsigned = unsigned; - return result; -} -Long.fromString = fromString; -function fromValue(val, unsigned) { - if (typeof val === "number") return fromNumber(val, unsigned); - if (typeof val === "string") return fromString(val, unsigned); - return fromBits( - val.low, - val.high, - typeof unsigned === "boolean" ? unsigned : val.unsigned - ); -} -Long.fromValue = fromValue; -var TWO_PWR_16_DBL = 1 << 16; -var TWO_PWR_24_DBL = 1 << 24; -var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL; -var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL; -var TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2; -var TWO_PWR_24 = fromInt(TWO_PWR_24_DBL); -var ZERO = fromInt(0); -Long.ZERO = ZERO; -var UZERO = fromInt(0, true); -Long.UZERO = UZERO; -var ONE = fromInt(1); -Long.ONE = ONE; -var UONE = fromInt(1, true); -Long.UONE = UONE; -var NEG_ONE = fromInt(-1); -Long.NEG_ONE = NEG_ONE; -var MAX_VALUE = fromBits(4294967295 | 0, 2147483647 | 0, false); -Long.MAX_VALUE = MAX_VALUE; -var MAX_UNSIGNED_VALUE = fromBits(4294967295 | 0, 4294967295 | 0, true); -Long.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE; -var MIN_VALUE = fromBits(0, 2147483648 | 0, false); -Long.MIN_VALUE = MIN_VALUE; -var LongPrototype = Long.prototype; -LongPrototype.toInt = function toInt() { - return this.unsigned ? this.low >>> 0 : this.low; -}; -LongPrototype.toNumber = function toNumber() { - if (this.unsigned) - return (this.high >>> 0) * TWO_PWR_32_DBL + (this.low >>> 0); - return this.high * TWO_PWR_32_DBL + (this.low >>> 0); -}; -LongPrototype.toString = function toString2(radix) { - radix = radix || 10; - if (radix < 2 || 36 < radix) throw RangeError("radix"); - if (this.isZero()) return "0"; - if (this.isNegative()) { - if (this.eq(MIN_VALUE)) { - var radixLong = fromNumber(radix), div = this.div(radixLong), rem1 = div.mul(radixLong).sub(this); - return div.toString(radix) + rem1.toInt().toString(radix); - } else return "-" + this.neg().toString(radix); - } - var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned), rem = this; - var result = ""; - while (true) { - var remDiv = rem.div(radixToPower), intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0, digits = intval.toString(radix); - rem = remDiv; - if (rem.isZero()) return digits + result; - else { - while (digits.length < 6) digits = "0" + digits; - result = "" + digits + result; + const forceShippedTools = toolsInput && CODEQL_BUNDLE_VERSION_ALIAS.includes(toolsInput); + if (forceShippedTools) { + cliVersion2 = cliVersion; + tagName = bundleVersion; + logger.info( + `'tools: ${toolsInput}' was requested, so using CodeQL version ${cliVersion2}, the version shipped with the Action.` + ); + if (toolsInput === "latest") { + logger.warning( + "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required." + ); + } + } else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) { + let latestToolcacheVersion; + const allowToolcacheValueFF = await features.getValue( + "allow_toolcache_input" /* AllowToolcacheInput */ + ); + const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode()); + if (allowToolcacheValue) { + logger.info( + `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.` + ); + latestToolcacheVersion = getLatestToolcacheVersion(logger); + if (latestToolcacheVersion) { + cliVersion2 = latestToolcacheVersion; + } + } + if (latestToolcacheVersion === void 0) { + if (allowToolcacheValue) { + logger.info( + `Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...` + ); + } else { + if (allowToolcacheValueFF) { + logger.warning( + `Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.` + ); + } else { + logger.info( + `Ignoring 'tools: ${toolsInput}' because the feature is not enabled.` + ); + } + } + cliVersion2 = defaultCliVersion.cliVersion; + tagName = defaultCliVersion.tagName; + } + } else if (toolsInput !== void 0) { + tagName = tryGetTagNameFromUrl(toolsInput, logger); + url2 = toolsInput; + if (tagName) { + const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger); + if (bundleVersion3 && semver8.valid(bundleVersion3)) { + cliVersion2 = convertToSemVer(bundleVersion3, logger); + } } + } else { + cliVersion2 = defaultCliVersion.cliVersion; + tagName = defaultCliVersion.tagName; } -}; -LongPrototype.getHighBits = function getHighBits() { - return this.high; -}; -LongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() { - return this.high >>> 0; -}; -LongPrototype.getLowBits = function getLowBits() { - return this.low; -}; -LongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() { - return this.low >>> 0; -}; -LongPrototype.getNumBitsAbs = function getNumBitsAbs() { - if (this.isNegative()) - return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs(); - var val = this.high != 0 ? this.high : this.low; - for (var bit = 31; bit > 0; bit--) if ((val & 1 << bit) != 0) break; - return this.high != 0 ? bit + 33 : bit + 1; -}; -LongPrototype.isSafeInteger = function isSafeInteger() { - var top11Bits = this.high >> 21; - if (!top11Bits) return true; - if (this.unsigned) return false; - return top11Bits === -1 && !(this.low === 0 && this.high === -2097152); -}; -LongPrototype.isZero = function isZero() { - return this.high === 0 && this.low === 0; -}; -LongPrototype.eqz = LongPrototype.isZero; -LongPrototype.isNegative = function isNegative() { - return !this.unsigned && this.high < 0; -}; -LongPrototype.isPositive = function isPositive() { - return this.unsigned || this.high >= 0; -}; -LongPrototype.isOdd = function isOdd() { - return (this.low & 1) === 1; -}; -LongPrototype.isEven = function isEven() { - return (this.low & 1) === 0; -}; -LongPrototype.equals = function equals(other) { - if (!isLong(other)) other = fromValue(other); - if (this.unsigned !== other.unsigned && this.high >>> 31 === 1 && other.high >>> 31 === 1) - return false; - return this.high === other.high && this.low === other.low; -}; -LongPrototype.eq = LongPrototype.equals; -LongPrototype.notEquals = function notEquals(other) { - return !this.eq( - /* validates */ - other + const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger); + const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown"; + logger.debug( + `Attempting to obtain CodeQL tools. CLI version: ${cliVersion2 ?? "unknown"}, bundle tag name: ${tagName ?? "unknown"}, URL: ${url2 ?? "unspecified"}.` ); -}; -LongPrototype.neq = LongPrototype.notEquals; -LongPrototype.ne = LongPrototype.notEquals; -LongPrototype.lessThan = function lessThan(other) { - return this.comp( - /* validates */ - other - ) < 0; -}; -LongPrototype.lt = LongPrototype.lessThan; -LongPrototype.lessThanOrEqual = function lessThanOrEqual(other) { - return this.comp( - /* validates */ - other - ) <= 0; -}; -LongPrototype.lte = LongPrototype.lessThanOrEqual; -LongPrototype.le = LongPrototype.lessThanOrEqual; -LongPrototype.greaterThan = function greaterThan(other) { - return this.comp( - /* validates */ - other - ) > 0; -}; -LongPrototype.gt = LongPrototype.greaterThan; -LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) { - return this.comp( - /* validates */ - other - ) >= 0; -}; -LongPrototype.gte = LongPrototype.greaterThanOrEqual; -LongPrototype.ge = LongPrototype.greaterThanOrEqual; -LongPrototype.compare = function compare2(other) { - if (!isLong(other)) other = fromValue(other); - if (this.eq(other)) return 0; - var thisNeg = this.isNegative(), otherNeg = other.isNegative(); - if (thisNeg && !otherNeg) return -1; - if (!thisNeg && otherNeg) return 1; - if (!this.unsigned) return this.sub(other).isNegative() ? -1 : 1; - return other.high >>> 0 > this.high >>> 0 || other.high === this.high && other.low >>> 0 > this.low >>> 0 ? -1 : 1; -}; -LongPrototype.comp = LongPrototype.compare; -LongPrototype.negate = function negate() { - if (!this.unsigned && this.eq(MIN_VALUE)) return MIN_VALUE; - return this.not().add(ONE); -}; -LongPrototype.neg = LongPrototype.negate; -LongPrototype.add = function add(addend) { - if (!isLong(addend)) addend = fromValue(addend); - var a48 = this.high >>> 16; - var a32 = this.high & 65535; - var a16 = this.low >>> 16; - var a00 = this.low & 65535; - var b48 = addend.high >>> 16; - var b32 = addend.high & 65535; - var b16 = addend.low >>> 16; - var b00 = addend.low & 65535; - var c48 = 0, c32 = 0, c16 = 0, c00 = 0; - c00 += a00 + b00; - c16 += c00 >>> 16; - c00 &= 65535; - c16 += a16 + b16; - c32 += c16 >>> 16; - c16 &= 65535; - c32 += a32 + b32; - c48 += c32 >>> 16; - c32 &= 65535; - c48 += a48 + b48; - c48 &= 65535; - return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); -}; -LongPrototype.subtract = function subtract(subtrahend) { - if (!isLong(subtrahend)) subtrahend = fromValue(subtrahend); - return this.add(subtrahend.neg()); -}; -LongPrototype.sub = LongPrototype.subtract; -LongPrototype.multiply = function multiply(multiplier) { - if (this.isZero()) return this; - if (!isLong(multiplier)) multiplier = fromValue(multiplier); - if (wasm) { - var low = wasm["mul"](this.low, this.high, multiplier.low, multiplier.high); - return fromBits(low, wasm["get_high"](), this.unsigned); - } - if (multiplier.isZero()) return this.unsigned ? UZERO : ZERO; - if (this.eq(MIN_VALUE)) return multiplier.isOdd() ? MIN_VALUE : ZERO; - if (multiplier.eq(MIN_VALUE)) return this.isOdd() ? MIN_VALUE : ZERO; - if (this.isNegative()) { - if (multiplier.isNegative()) return this.neg().mul(multiplier.neg()); - else return this.neg().mul(multiplier).neg(); - } else if (multiplier.isNegative()) return this.mul(multiplier.neg()).neg(); - if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24)) - return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned); - var a48 = this.high >>> 16; - var a32 = this.high & 65535; - var a16 = this.low >>> 16; - var a00 = this.low & 65535; - var b48 = multiplier.high >>> 16; - var b32 = multiplier.high & 65535; - var b16 = multiplier.low >>> 16; - var b00 = multiplier.low & 65535; - var c48 = 0, c32 = 0, c16 = 0, c00 = 0; - c00 += a00 * b00; - c16 += c00 >>> 16; - c00 &= 65535; - c16 += a16 * b00; - c32 += c16 >>> 16; - c16 &= 65535; - c16 += a00 * b16; - c32 += c16 >>> 16; - c16 &= 65535; - c32 += a32 * b00; - c48 += c32 >>> 16; - c32 &= 65535; - c32 += a16 * b16; - c48 += c32 >>> 16; - c32 &= 65535; - c32 += a00 * b32; - c48 += c32 >>> 16; - c32 &= 65535; - c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48; - c48 &= 65535; - return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned); -}; -LongPrototype.mul = LongPrototype.multiply; -LongPrototype.divide = function divide(divisor) { - if (!isLong(divisor)) divisor = fromValue(divisor); - if (divisor.isZero()) throw Error("division by zero"); - if (wasm) { - if (!this.unsigned && this.high === -2147483648 && divisor.low === -1 && divisor.high === -1) { - return this; + let codeqlFolder; + if (cliVersion2) { + codeqlFolder = toolcache3.find("CodeQL", cliVersion2); + if (!codeqlFolder) { + logger.debug( + `Didn't find a version of the CodeQL tools in the toolcache with a version number exactly matching ${cliVersion2}.` + ); + const allVersions = toolcache3.findAllVersions("CodeQL"); + logger.debug( + `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( + allVersions + )}.` + ); + const candidateVersions = allVersions.filter( + (version) => version.startsWith(`${cliVersion2}-`) + ); + if (candidateVersions.length === 1) { + logger.debug( + `Exactly one version of the CodeQL tools starting with ${cliVersion2} found in the toolcache, using that.` + ); + codeqlFolder = toolcache3.find("CodeQL", candidateVersions[0]); + } else if (candidateVersions.length === 0) { + logger.debug( + `Didn't find any versions of the CodeQL tools starting with ${cliVersion2} in the toolcache. Trying next fallback method.` + ); + } else { + logger.warning( + `Found ${candidateVersions.length} versions of the CodeQL tools starting with ${cliVersion2} in the toolcache, but at most one was expected.` + ); + logger.debug("Trying next fallback method."); + } } - var low = (this.unsigned ? wasm["div_u"] : wasm["div_s"])( - this.low, - this.high, - divisor.low, - divisor.high + } + if (!codeqlFolder && tagName) { + const fallbackVersion = await tryGetFallbackToolcacheVersion( + cliVersion2, + tagName, + logger ); - return fromBits(low, wasm["get_high"](), this.unsigned); + if (fallbackVersion) { + codeqlFolder = toolcache3.find("CodeQL", fallbackVersion); + } else { + logger.debug( + `Could not determine a fallback toolcache version number for CodeQL tools version ${humanReadableVersion}.` + ); + } } - if (this.isZero()) return this.unsigned ? UZERO : ZERO; - var approx, rem, res; - if (!this.unsigned) { - if (this.eq(MIN_VALUE)) { - if (divisor.eq(ONE) || divisor.eq(NEG_ONE)) - return MIN_VALUE; - else if (divisor.eq(MIN_VALUE)) return ONE; - else { - var halfThis = this.shr(1); - approx = halfThis.div(divisor).shl(1); - if (approx.eq(ZERO)) { - return divisor.isNegative() ? ONE : NEG_ONE; - } else { - rem = this.sub(divisor.mul(approx)); - res = approx.add(rem.div(divisor)); - return res; - } - } - } else if (divisor.eq(MIN_VALUE)) return this.unsigned ? UZERO : ZERO; - if (this.isNegative()) { - if (divisor.isNegative()) return this.neg().div(divisor.neg()); - return this.neg().div(divisor).neg(); - } else if (divisor.isNegative()) return this.div(divisor.neg()).neg(); - res = ZERO; + if (codeqlFolder) { + logger.info( + `Found CodeQL tools version ${humanReadableVersion} in the toolcache.` + ); } else { - if (!divisor.unsigned) divisor = divisor.toUnsigned(); - if (divisor.gt(this)) return UZERO; - if (divisor.gt(this.shru(1))) - return UONE; - res = UZERO; + logger.info( + `Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.` + ); } - rem = this; - while (rem.gte(divisor)) { - approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber())); - var log2 = Math.ceil(Math.log(approx) / Math.LN2), delta = log2 <= 48 ? 1 : pow_dbl(2, log2 - 48), approxRes = fromNumber(approx), approxRem = approxRes.mul(divisor); - while (approxRem.isNegative() || approxRem.gt(rem)) { - approx -= delta; - approxRes = fromNumber(approx, this.unsigned); - approxRem = approxRes.mul(divisor); + if (codeqlFolder) { + if (cliVersion2) { + logger.info( + `Using CodeQL CLI version ${cliVersion2} from toolcache at ${codeqlFolder}` + ); + } else { + logger.info(`Using CodeQL CLI from toolcache at ${codeqlFolder}`); } - if (approxRes.isZero()) approxRes = ONE; - res = res.add(approxRes); - rem = rem.sub(approxRem); + return { + codeqlFolder, + sourceType: "toolcache", + toolsVersion: cliVersion2 ?? humanReadableVersion + }; } - return res; -}; -LongPrototype.div = LongPrototype.divide; -LongPrototype.modulo = function modulo(divisor) { - if (!isLong(divisor)) divisor = fromValue(divisor); - if (wasm) { - var low = (this.unsigned ? wasm["rem_u"] : wasm["rem_s"])( - this.low, - this.high, - divisor.low, - divisor.high + if (variant === "GitHub Enterprise Server" /* GHES */ && !forceShippedTools && !toolsInput) { + const result = await findOverridingToolsInCache( + humanReadableVersion, + logger ); - return fromBits(low, wasm["get_high"](), this.unsigned); + if (result !== void 0) { + return result; + } } - return this.sub(this.div(divisor).mul(divisor)); -}; -LongPrototype.mod = LongPrototype.modulo; -LongPrototype.rem = LongPrototype.modulo; -LongPrototype.not = function not() { - return fromBits(~this.low, ~this.high, this.unsigned); -}; -LongPrototype.countLeadingZeros = function countLeadingZeros() { - return this.high ? Math.clz32(this.high) : Math.clz32(this.low) + 32; -}; -LongPrototype.clz = LongPrototype.countLeadingZeros; -LongPrototype.countTrailingZeros = function countTrailingZeros() { - return this.low ? ctz32(this.low) : ctz32(this.high) + 32; -}; -LongPrototype.ctz = LongPrototype.countTrailingZeros; -LongPrototype.and = function and(other) { - if (!isLong(other)) other = fromValue(other); - return fromBits(this.low & other.low, this.high & other.high, this.unsigned); -}; -LongPrototype.or = function or(other) { - if (!isLong(other)) other = fromValue(other); - return fromBits(this.low | other.low, this.high | other.high, this.unsigned); -}; -LongPrototype.xor = function xor(other) { - if (!isLong(other)) other = fromValue(other); - return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned); -}; -LongPrototype.shiftLeft = function shiftLeft(numBits) { - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - else if (numBits < 32) - return fromBits( - this.low << numBits, - this.high << numBits | this.low >>> 32 - numBits, - this.unsigned - ); - else return fromBits(0, this.low << numBits - 32, this.unsigned); -}; -LongPrototype.shl = LongPrototype.shiftLeft; -LongPrototype.shiftRight = function shiftRight(numBits) { - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - else if (numBits < 32) - return fromBits( - this.low >>> numBits | this.high << 32 - numBits, - this.high >> numBits, - this.unsigned + let compressionMethod; + if (!url2) { + compressionMethod = cliVersion2 !== void 0 && await useZstdBundle(cliVersion2, tarSupportsZstd) ? "zstd" : "gzip"; + url2 = await getCodeQLBundleDownloadURL( + tagName, + apiDetails, + compressionMethod, + logger ); - else - return fromBits( - this.high >> numBits - 32, - this.high >= 0 ? 0 : -1, - this.unsigned + } else { + const method = inferCompressionMethod(url2); + if (method === void 0) { + throw new ConfigurationError( + `Could not infer compression method from URL ${url2}. Please specify a URL ending in '.tar.gz' or '.tar.zst'.` + ); + } + compressionMethod = method; + } + if (cliVersion2) { + logger.info(`Using CodeQL CLI version ${cliVersion2} sourced from ${url2} .`); + } else { + logger.info(`Using CodeQL CLI sourced from ${url2} .`); + } + return { + bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger), + cliVersion: cliVersion2, + codeqlURL: url2, + compressionMethod, + sourceType: "download", + toolsVersion: cliVersion2 ?? humanReadableVersion + }; +} +async function tryGetFallbackToolcacheVersion(cliVersion2, tagName, logger) { + const bundleVersion2 = tryGetBundleVersionFromTagName(tagName, logger); + if (!bundleVersion2) { + return void 0; + } + const fallbackVersion = convertToSemVer(bundleVersion2, logger); + logger.debug( + `Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ${cliVersion2 ?? tagName}.` + ); + return fallbackVersion; +} +var downloadCodeQL = async function(codeqlURL, compressionMethod, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) { + const parsedCodeQLURL = new URL(codeqlURL); + const searchParams = new URLSearchParams(parsedCodeQLURL.search); + const headers = { + accept: "application/octet-stream" + }; + let authorization = void 0; + if (searchParams.has("token")) { + logger.debug("CodeQL tools URL contains an authorization token."); + } else { + authorization = getAuthorizationHeaderFor( + logger, + apiDetails, + codeqlURL ); -}; -LongPrototype.shr = LongPrototype.shiftRight; -LongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) { - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - if (numBits < 32) - return fromBits( - this.low >>> numBits | this.high << 32 - numBits, - this.high >>> numBits, - this.unsigned + } + const toolcacheInfo = getToolcacheDestinationInfo( + maybeBundleVersion, + maybeCliVersion, + logger + ); + const extractedBundlePath = toolcacheInfo?.path ?? getTempExtractionDir(tempDir); + const statusReport = await downloadAndExtract( + codeqlURL, + compressionMethod, + extractedBundlePath, + authorization, + { "User-Agent": "CodeQL Action", ...headers }, + tarVersion, + logger + ); + if (!toolcacheInfo) { + logger.debug( + `Could not cache CodeQL tools because we could not determine the bundle version from the URL ${codeqlURL}.` ); - if (numBits === 32) return fromBits(this.high, 0, this.unsigned); - return fromBits(this.high >>> numBits - 32, 0, this.unsigned); -}; -LongPrototype.shru = LongPrototype.shiftRightUnsigned; -LongPrototype.shr_u = LongPrototype.shiftRightUnsigned; -LongPrototype.rotateLeft = function rotateLeft(numBits) { - var b; - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); - if (numBits < 32) { - b = 32 - numBits; - return fromBits( - this.low << numBits | this.high >>> b, - this.high << numBits | this.low >>> b, - this.unsigned + return { + codeqlFolder: extractedBundlePath, + statusReport, + toolsVersion: maybeCliVersion ?? "unknown" + }; + } + writeToolcacheMarkerFile(toolcacheInfo.path, logger); + return { + codeqlFolder: extractedBundlePath, + statusReport, + toolsVersion: maybeCliVersion ?? toolcacheInfo.version + }; +}; +function getToolcacheDestinationInfo(maybeBundleVersion, maybeCliVersion, logger) { + if (maybeBundleVersion) { + const version = getCanonicalToolcacheVersion( + maybeCliVersion, + maybeBundleVersion, + logger ); + return { + path: getToolcacheDirectory(version), + version + }; } - numBits -= 32; - b = 32 - numBits; - return fromBits( - this.high << numBits | this.low >>> b, - this.low << numBits | this.high >>> b, - this.unsigned - ); -}; -LongPrototype.rotl = LongPrototype.rotateLeft; -LongPrototype.rotateRight = function rotateRight(numBits) { - var b; - if (isLong(numBits)) numBits = numBits.toInt(); - if ((numBits &= 63) === 0) return this; - if (numBits === 32) return fromBits(this.high, this.low, this.unsigned); - if (numBits < 32) { - b = 32 - numBits; - return fromBits( - this.high << b | this.low >>> numBits, - this.low << b | this.high >>> numBits, - this.unsigned + return void 0; +} +function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) { + if (!cliVersion2?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) { + return convertToSemVer(bundleVersion2, logger); + } + return cliVersion2; +} +async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { + if (!await isBinaryAccessible("tar", logger)) { + throw new ConfigurationError( + "Could not find tar in PATH, so unable to extract CodeQL bundle." ); } - numBits -= 32; - b = 32 - numBits; - return fromBits( - this.low << b | this.high >>> numBits, - this.high << b | this.low >>> numBits, - this.unsigned + const zstdAvailability = await isZstdAvailable(logger); + const source = await getCodeQLSource( + toolsInput, + defaultCliVersion, + apiDetails, + variant, + zstdAvailability.available, + features, + logger ); -}; -LongPrototype.rotr = LongPrototype.rotateRight; -LongPrototype.toSigned = function toSigned() { - if (!this.unsigned) return this; - return fromBits(this.low, this.high, false); -}; -LongPrototype.toUnsigned = function toUnsigned() { - if (this.unsigned) return this; - return fromBits(this.low, this.high, true); -}; -LongPrototype.toBytes = function toBytes(le) { - return le ? this.toBytesLE() : this.toBytesBE(); -}; -LongPrototype.toBytesLE = function toBytesLE() { - var hi = this.high, lo = this.low; - return [ - lo & 255, - lo >>> 8 & 255, - lo >>> 16 & 255, - lo >>> 24, - hi & 255, - hi >>> 8 & 255, - hi >>> 16 & 255, - hi >>> 24 - ]; -}; -LongPrototype.toBytesBE = function toBytesBE() { - var hi = this.high, lo = this.low; - return [ - hi >>> 24, - hi >>> 16 & 255, - hi >>> 8 & 255, - hi & 255, - lo >>> 24, - lo >>> 16 & 255, - lo >>> 8 & 255, - lo & 255 - ]; -}; -Long.fromBytes = function fromBytes(bytes, unsigned, le) { - return le ? Long.fromBytesLE(bytes, unsigned) : Long.fromBytesBE(bytes, unsigned); -}; -Long.fromBytesLE = function fromBytesLE(bytes, unsigned) { - return new Long( - bytes[0] | bytes[1] << 8 | bytes[2] << 16 | bytes[3] << 24, - bytes[4] | bytes[5] << 8 | bytes[6] << 16 | bytes[7] << 24, - unsigned + let codeqlFolder; + let toolsVersion = source.toolsVersion; + let toolsDownloadStatusReport; + let toolsSource; + switch (source.sourceType) { + case "local": { + codeqlFolder = await extract( + source.codeqlTarPath, + getTempExtractionDir(tempDir), + source.compressionMethod, + zstdAvailability.version, + logger + ); + toolsSource = "LOCAL" /* Local */; + break; + } + case "toolcache": + codeqlFolder = source.codeqlFolder; + logger.debug(`CodeQL found in cache ${codeqlFolder}`); + toolsSource = "TOOLCACHE" /* Toolcache */; + break; + case "download": { + const result = await downloadCodeQL( + source.codeqlURL, + source.compressionMethod, + source.bundleVersion, + source.cliVersion, + apiDetails, + zstdAvailability.version, + tempDir, + logger + ); + toolsVersion = result.toolsVersion; + codeqlFolder = result.codeqlFolder; + toolsDownloadStatusReport = result.statusReport; + toolsSource = "DOWNLOAD" /* Download */; + break; + } + default: + assertNever(source); + } + return { + codeqlFolder, + toolsDownloadStatusReport, + toolsSource, + toolsVersion, + zstdAvailability + }; +} +async function useZstdBundle(cliVersion2, tarSupportsZstd) { + return ( + // In testing, gzip performs better than zstd on Windows. + process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE) ); -}; -Long.fromBytesBE = function fromBytesBE(bytes, unsigned) { - return new Long( - bytes[4] << 24 | bytes[5] << 16 | bytes[6] << 8 | bytes[7], - bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], - unsigned +} +function getTempExtractionDir(tempDir) { + return path10.join(tempDir, v4_default()); +} +async function getNightlyToolsUrl(logger) { + const zstdAvailability = await isZstdAvailable(logger); + const compressionMethod = await useZstdBundle( + CODEQL_VERSION_ZSTD_BUNDLE, + zstdAvailability.available + ) ? "zstd" : "gzip"; + try { + const release2 = await getApiClient().rest.repos.listReleases({ + owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER, + repo: CODEQL_NIGHTLIES_REPOSITORY_NAME, + per_page: 1, + page: 1, + prerelease: true + }); + const latestRelease = release2.data[0]; + if (!latestRelease) { + throw new Error("Could not find the latest nightly release."); + } + return `https://github.com/${CODEQL_NIGHTLIES_REPOSITORY_OWNER}/${CODEQL_NIGHTLIES_REPOSITORY_NAME}/releases/download/${latestRelease.tag_name}/${getCodeQLBundleName(compressionMethod)}`; + } catch (e) { + throw new Error( + `Failed to retrieve the latest nightly release: ${wrapError(e)}` + ); + } +} +function getLatestToolcacheVersion(logger) { + const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a)); + logger.debug( + `Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify( + allVersions + )}.` ); -}; -if (typeof BigInt === "function") { - Long.fromBigInt = function fromBigInt(value, unsigned) { - var lowBits = Number(BigInt.asIntN(32, value)); - var highBits = Number(BigInt.asIntN(32, value >> BigInt(32))); - return fromBits(lowBits, highBits, unsigned); - }; - Long.fromValue = function fromValueWithBigInt(value, unsigned) { - if (typeof value === "bigint") return Long.fromBigInt(value, unsigned); - return fromValue(value, unsigned); - }; - LongPrototype.toBigInt = function toBigInt() { - var lowBigInt = BigInt(this.low >>> 0); - var highBigInt = BigInt(this.unsigned ? this.high >>> 0 : this.high); - return highBigInt << BigInt(32) | lowBigInt; - }; + if (allVersions.length > 0) { + const latestToolcacheVersion = allVersions[0]; + logger.info( + `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.` + ); + return latestToolcacheVersion; + } + return void 0; } -var long_default = Long; +function isReservedToolsValue(tools) { + return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT; +} + +// src/tracer-config.ts +async function shouldEnableIndirectTracing(codeql, config) { + if (config.buildMode === "none" /* None */) { + return false; + } + if (config.buildMode === "autobuild" /* Autobuild */) { + return false; + } + return asyncSome(config.languages, (l) => codeql.isTracedLanguage(l)); +} + +// src/codeql.ts +var cachedCodeQL = void 0; +var CODEQL_MINIMUM_VERSION = "2.17.6"; +var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6"; +var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13"; +var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19"; +var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++"; +var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1"; +async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) { + try { + const { + codeqlFolder, + toolsDownloadStatusReport, + toolsSource, + toolsVersion, + zstdAvailability + } = await setupCodeQLBundle( + toolsInput, + apiDetails, + tempDir, + variant, + defaultCliVersion, + features, + logger + ); + logger.debug( + `Bundle download status report: ${JSON.stringify( + toolsDownloadStatusReport + )}` + ); + let codeqlCmd = path11.join(codeqlFolder, "codeql", "codeql"); + if (process.platform === "win32") { + codeqlCmd += ".exe"; + } else if (process.platform !== "linux" && process.platform !== "darwin") { + throw new ConfigurationError( + `Unsupported platform: ${process.platform}` + ); + } + cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion); + return { + codeql: cachedCodeQL, + toolsDownloadStatusReport, + toolsSource, + toolsVersion, + zstdAvailability + }; + } catch (rawError) { + const e = wrapApiConfigurationError(rawError); + const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error; + throw new ErrorClass( + `Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? ` -// src/fingerprints.ts -var tab = " ".charCodeAt(0); -var space = " ".charCodeAt(0); -var lf = "\n".charCodeAt(0); -var cr = "\r".charCodeAt(0); -var EOF = 65535; -var BLOCK_SIZE = 100; -var MOD = long_default.fromInt(37); -function computeFirstMod() { - let firstMod = long_default.ONE; - for (let i = 0; i < BLOCK_SIZE; i++) { - firstMod = firstMod.multiply(MOD); +Details: ${e.stack}` : ""}` + ); } - return firstMod; } -async function hash(callback, filepath) { - const window2 = Array(BLOCK_SIZE).fill(0); - const lineNumbers = Array(BLOCK_SIZE).fill(-1); - let hashRaw = long_default.ZERO; - const firstMod = computeFirstMod(); - let index = 0; - let lineNumber = 0; - let lineStart = true; - let prevCR = false; - const hashCounts = {}; - const outputHash = function() { - const hashValue = hashRaw.toUnsigned().toString(16); - if (!hashCounts[hashValue]) { - hashCounts[hashValue] = 0; - } - hashCounts[hashValue]++; - callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`); - lineNumbers[index] = -1; - }; - const updateHash = function(current) { - const begin = window2[index]; - window2[index] = current; - hashRaw = MOD.multiply(hashRaw).add(long_default.fromInt(current)).subtract(firstMod.multiply(long_default.fromInt(begin))); - index = (index + 1) % BLOCK_SIZE; - }; - const processCharacter = function(current) { - if (current === space || current === tab || prevCR && current === lf) { - prevCR = false; - return; - } - if (current === cr) { - current = lf; - prevCR = true; - } else { - prevCR = false; - } - if (lineNumbers[index] !== -1) { - outputHash(); - } - if (lineStart) { - lineStart = false; - lineNumber++; - lineNumbers[index] = lineNumber; - } - if (current === lf) { - lineStart = true; +async function getCodeQL(cmd) { + if (cachedCodeQL === void 0) { + cachedCodeQL = await getCodeQLForCmd(cmd, true); + } + return cachedCodeQL; +} +async function getCodeQLForCmd(cmd, checkVersion) { + const codeql = { + getPath() { + return cmd; + }, + async getVersion() { + let result = getCachedCodeQlVersion(); + if (result === void 0) { + const output = await runCli(cmd, ["version", "--format=json"], { + noStreamStdout: true + }); + try { + result = JSON.parse(output); + } catch { + throw Error( + `Invalid JSON output from \`version --format=json\`: ${output}` + ); + } + cacheCodeQlVersion(result); + } + return result; + }, + async printVersion() { + await runCli(cmd, ["version", "--format=json"]); + }, + async supportsFeature(feature) { + return isSupportedToolsFeature(await this.getVersion(), feature); + }, + async isTracedLanguage(language) { + const extractorPath = await this.resolveExtractor(language); + const tracingConfigPath = path11.join( + extractorPath, + "tools", + "tracing-config.lua" + ); + return fs11.existsSync(tracingConfigPath); + }, + async isScannedLanguage(language) { + return !await this.isTracedLanguage(language); + }, + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + const extraArgs = config.languages.map( + (language) => `--language=${language}` + ); + if (await shouldEnableIndirectTracing(codeql, config)) { + extraArgs.push("--begin-tracing"); + extraArgs.push(...await getTrapCachingExtractorConfigArgs(config)); + extraArgs.push(`--trace-process-name=${processName}`); + } + const codeScanningConfigFile = await writeCodeScanningConfigFile( + config, + logger + ); + const externalRepositoryToken = getOptionalInput( + "external-repository-token" + ); + extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`); + if (externalRepositoryToken) { + extraArgs.push("--external-repository-token-stdin"); + } + if (config.buildMode !== void 0) { + extraArgs.push(`--build-mode=${config.buildMode}`); + } + if (qlconfigFile !== void 0) { + extraArgs.push(`--qlconfig-file=${qlconfigFile}`); + } + const overwriteFlag = isSupportedToolsFeature( + await this.getVersion(), + "forceOverwrite" /* ForceOverwrite */ + ) ? "--force-overwrite" : "--overwrite"; + const overlayDatabaseMode = config.overlayDatabaseMode; + if (overlayDatabaseMode === "overlay" /* Overlay */) { + const overlayChangesFile = await writeOverlayChangesFile( + config, + sourceRoot, + logger + ); + extraArgs.push(`--overlay-changes=${overlayChangesFile}`); + } else if (overlayDatabaseMode === "overlay-base" /* OverlayBase */) { + extraArgs.push("--overlay-base"); + } + const baselineFilesOptions = config.enableFileCoverageInformation ? [ + "--calculate-language-specific-baseline", + "--sublanguage-file-coverage" + ] : ["--no-calculate-baseline"]; + await runCli( + cmd, + [ + "database", + "init", + ...overlayDatabaseMode === "overlay" /* Overlay */ ? [] : [overwriteFlag], + "--db-cluster", + config.dbLocation, + `--source-root=${sourceRoot}`, + ...baselineFilesOptions, + "--extractor-include-aliases", + ...extraArgs, + ...getExtraOptionsFromEnv(["database", "init"], { + // Some user configs specify `--no-calculate-baseline` as an additional + // argument to `codeql database init`. Therefore ignore the baseline file + // options here to avoid specifying the same argument twice and erroring. + ignoringOptions: ["--overwrite", ...baselineFilesOptions] + }) + ], + { stdin: externalRepositoryToken } + ); + if (overlayDatabaseMode === "overlay-base" /* OverlayBase */) { + await writeBaseDatabaseOidsFile(config, sourceRoot); + } + }, + async runAutobuild(config, language) { + applyAutobuildAzurePipelinesTimeoutFix(); + const autobuildCmd = path11.join( + await this.resolveExtractor(language), + "tools", + process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" + ); + if (config.debugMode) { + process.env["CODEQL_VERBOSITY" /* CLI_VERBOSITY */] = process.env["CODEQL_VERBOSITY" /* CLI_VERBOSITY */] || EXTRACTION_DEBUG_MODE_VERBOSITY; + } + await runCli(autobuildCmd); + }, + async extractScannedLanguage(config, language) { + await runCli(cmd, [ + "database", + "trace-command", + "--index-traceless-dbs", + ...await getTrapCachingExtractorConfigArgsForLang(config, language), + ...getExtractionVerbosityArguments(config.debugMode), + ...getExtraOptionsFromEnv(["database", "trace-command"]), + getCodeQLDatabasePath(config, language) + ]); + }, + async extractUsingBuildMode(config, language) { + if (config.buildMode === "autobuild" /* Autobuild */) { + applyAutobuildAzurePipelinesTimeoutFix(); + } + try { + await runCli(cmd, [ + "database", + "trace-command", + "--use-build-mode", + "--working-dir", + process.cwd(), + ...await getTrapCachingExtractorConfigArgsForLang(config, language), + ...getExtractionVerbosityArguments(config.debugMode), + ...getExtraOptionsFromEnv(["database", "trace-command"]), + getCodeQLDatabasePath(config, language) + ]); + } catch (e) { + if (config.buildMode === "autobuild" /* Autobuild */) { + const prefix = `We were unable to automatically build your code. Please change the build mode for this language to manual and specify build steps for your project. See ${"https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed" /* AUTOMATIC_BUILD_FAILED */} for more information.`; + throw new ConfigurationError(`${prefix} ${getErrorMessage(e)}`); + } else { + throw e; + } + } + }, + async finalizeDatabase(databasePath, threadsFlag, memoryFlag, enableDebugLogging) { + const args = [ + "database", + "finalize", + "--finalize-dataset", + threadsFlag, + memoryFlag, + ...getExtractionVerbosityArguments(enableDebugLogging), + ...getExtraOptionsFromEnv(["database", "finalize"]), + databasePath + ]; + await runCli(cmd, args); + }, + async resolveLanguages() { + const codeqlArgs = [ + "resolve", + "languages", + "--format=json", + ...getExtraOptionsFromEnv(["resolve", "languages"]) + ]; + const output = await runCli(cmd, codeqlArgs); + try { + return JSON.parse(output); + } catch (e) { + throw new Error( + `Unexpected output from codeql resolve languages: ${e}` + ); + } + }, + async betterResolveLanguages({ + filterToLanguagesWithQueries + } = { filterToLanguagesWithQueries: false }) { + const codeqlArgs = [ + "resolve", + "languages", + "--format=betterjson", + "--extractor-options-verbosity=4", + "--extractor-include-aliases", + ...filterToLanguagesWithQueries ? ["--filter-to-languages-with-queries"] : [], + ...getExtraOptionsFromEnv(["resolve", "languages"]) + ]; + const output = await runCli(cmd, codeqlArgs); + try { + return JSON.parse(output); + } catch (e) { + throw new Error( + `Unexpected output from codeql resolve languages with --format=betterjson: ${e}` + ); + } + }, + async resolveBuildEnvironment(workingDir, language) { + const codeqlArgs = [ + "resolve", + "build-environment", + `--language=${language}`, + "--extractor-include-aliases", + ...getExtraOptionsFromEnv(["resolve", "build-environment"]) + ]; + if (workingDir !== void 0) { + codeqlArgs.push("--working-dir", workingDir); + } + const output = await runCli(cmd, codeqlArgs); + try { + return JSON.parse(output); + } catch (e) { + throw new Error( + `Unexpected output from codeql resolve build-environment: ${e} in +${output}` + ); + } + }, + async databaseRunQueries(databasePath, flags, queries = []) { + const codeqlArgs = [ + "database", + "run-queries", + ...flags, + databasePath, + "--min-disk-free=1024", + // Try to leave at least 1GB free + "-v", + ...queries, + ...getExtraOptionsFromEnv(["database", "run-queries"], { + ignoringOptions: ["--expect-discarded-cache"] + }) + ]; + await runCli(cmd, codeqlArgs); + }, + async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, threadsFlag, verbosityFlag, sarifRunPropertyFlag, automationDetailsId, config, features) { + const shouldExportDiagnostics = await features.getValue( + "export_diagnostics_enabled" /* ExportDiagnosticsEnabled */, + this + ); + const codeqlArgs = [ + "database", + "interpret-results", + threadsFlag, + "--format=sarif-latest", + verbosityFlag, + `--output=${sarifFile}`, + "--print-diagnostics-summary", + "--print-metrics-summary", + "--sarif-add-baseline-file-info", + `--sarif-codescanning-config=${getGeneratedCodeScanningConfigPath( + config + )}`, + "--sarif-group-rules-by-pack", + "--sarif-include-query-help=always", + "--sublanguage-file-coverage", + ...await getJobRunUuidSarifOptions(this), + ...getExtraOptionsFromEnv(["database", "interpret-results"]) + ]; + if (sarifRunPropertyFlag !== void 0) { + codeqlArgs.push(sarifRunPropertyFlag); + } + if (automationDetailsId !== void 0) { + codeqlArgs.push("--sarif-category", automationDetailsId); + } + if (shouldExportDiagnostics) { + codeqlArgs.push("--sarif-include-diagnostics"); + } else { + codeqlArgs.push("--no-sarif-include-diagnostics"); + } + codeqlArgs.push(databasePath); + if (querySuitePaths) { + codeqlArgs.push(...querySuitePaths); + } + return await runCli(cmd, codeqlArgs, { + noStreamStdout: true + }); + }, + async databaseCleanupCluster(config, cleanupLevel) { + const cacheCleanupFlag = await codeQlVersionAtLeast( + this, + CODEQL_VERSION_CACHE_CLEANUP + ) ? "--cache-cleanup" : "--mode"; + for (const language of config.languages) { + const databasePath = getCodeQLDatabasePath(config, language); + const codeqlArgs = [ + "database", + "cleanup", + databasePath, + `${cacheCleanupFlag}=${cleanupLevel}`, + ...getExtraOptionsFromEnv(["database", "cleanup"]) + ]; + await runCli(cmd, codeqlArgs); + } + }, + async databaseBundle(databasePath, outputFilePath, databaseName, includeDiagnostics, alsoIncludeRelativePaths) { + const includeDiagnosticsArgs = includeDiagnostics ? ["--include-diagnostics"] : []; + const args = [ + "database", + "bundle", + databasePath, + `--output=${outputFilePath}`, + `--name=${databaseName}`, + ...includeDiagnosticsArgs, + ...getExtraOptionsFromEnv(["database", "bundle"], { + ignoringOptions: includeDiagnosticsArgs + }) + ]; + if (await this.supportsFeature("bundleSupportsIncludeOption" /* BundleSupportsIncludeOption */)) { + args.push( + ...alsoIncludeRelativePaths.flatMap((relativePath) => [ + "--include", + relativePath + ]) + ); + } + await new toolrunner3.ToolRunner(cmd, args).exec(); + }, + async databaseExportDiagnostics(databasePath, sarifFile, automationDetailsId) { + const args = [ + "database", + "export-diagnostics", + `${databasePath}`, + "--db-cluster", + // Database is always a cluster for CodeQL versions that support diagnostics. + "--format=sarif-latest", + `--output=${sarifFile}`, + "--sarif-include-diagnostics", + // ExportDiagnosticsEnabled is always true if this command is run. + "-vvv", + ...getExtraOptionsFromEnv(["diagnostics", "export"]) + ]; + if (automationDetailsId !== void 0) { + args.push("--sarif-category", automationDetailsId); + } + await new toolrunner3.ToolRunner(cmd, args).exec(); + }, + async diagnosticsExport(sarifFile, automationDetailsId, config) { + const args = [ + "diagnostics", + "export", + "--format=sarif-latest", + `--output=${sarifFile}`, + `--sarif-codescanning-config=${getGeneratedCodeScanningConfigPath( + config + )}`, + ...getExtraOptionsFromEnv(["diagnostics", "export"]) + ]; + if (automationDetailsId !== void 0) { + args.push("--sarif-category", automationDetailsId); + } + await new toolrunner3.ToolRunner(cmd, args).exec(); + }, + async resolveExtractor(language) { + let extractorPath = ""; + await new toolrunner3.ToolRunner( + cmd, + [ + "resolve", + "extractor", + "--format=json", + `--language=${language}`, + "--extractor-include-aliases", + ...getExtraOptionsFromEnv(["resolve", "extractor"]) + ], + { + silent: true, + listeners: { + stdout: (data) => { + extractorPath += data.toString(); + }, + stderr: (data) => { + process.stderr.write(data); + } + } + } + ).exec(); + return JSON.parse(extractorPath); + }, + async resolveQueriesStartingPacks(queries) { + const codeqlArgs = [ + "resolve", + "queries", + "--format=startingpacks", + ...getExtraOptionsFromEnv(["resolve", "queries"]), + ...queries + ]; + const output = await runCli(cmd, codeqlArgs, { noStreamStdout: true }); + try { + return JSON.parse(output); + } catch (e) { + throw new Error( + `Unexpected output from codeql resolve queries --format=startingpacks: ${e}` + ); + } + }, + async resolveDatabase(databasePath) { + const codeqlArgs = [ + "resolve", + "database", + databasePath, + "--format=json", + ...getExtraOptionsFromEnv(["resolve", "database"]) + ]; + const output = await runCli(cmd, codeqlArgs, { noStreamStdout: true }); + try { + return JSON.parse(output); + } catch (e) { + throw new Error( + `Unexpected output from codeql resolve database --format=json: ${e}` + ); + } + }, + async mergeResults(sarifFiles, outputFile, { + mergeRunsFromEqualCategory = false + }) { + const args = [ + "github", + "merge-results", + "--output", + outputFile, + ...getExtraOptionsFromEnv(["github", "merge-results"]) + ]; + for (const sarifFile of sarifFiles) { + args.push("--sarif", sarifFile); + } + if (mergeRunsFromEqualCategory) { + args.push("--sarif-merge-runs-from-equal-category"); + } + await runCli(cmd, args); } - updateHash(current); }; - const readStream = fs11.createReadStream(filepath, "utf8"); - for await (const data of readStream) { - for (let i = 0; i < data.length; ++i) { - processCharacter(data.charCodeAt(i)); - } - } - processCharacter(EOF); - for (let i = 0; i < BLOCK_SIZE; i++) { - if (lineNumbers[index] !== -1) { - outputHash(); - } - updateHash(0); + if (checkVersion && !await codeQlVersionAtLeast(codeql, CODEQL_MINIMUM_VERSION)) { + throw new ConfigurationError( + `Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${(await codeql.getVersion()).version}` + ); + } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { + const result = await codeql.getVersion(); + core11.warning( + `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. + +Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` + ); + core11.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } + return codeql; } -function locationUpdateCallback(result, location, logger) { - let locationStartLine = location.physicalLocation?.region?.startLine; - if (locationStartLine === void 0) { - locationStartLine = 1; - } - return function(lineNumber, hashValue) { - if (locationStartLine !== lineNumber) { - return; - } - if (!result.partialFingerprints) { - result.partialFingerprints = {}; - } - const existingFingerprint = result.partialFingerprints.primaryLocationLineHash; - if (!existingFingerprint) { - result.partialFingerprints.primaryLocationLineHash = hashValue; - } else if (existingFingerprint !== hashValue) { - logger.warning( - `Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}` - ); - } - }; +function getExtraOptionsFromEnv(paths, { ignoringOptions } = {}) { + const options = getExtraOptionsEnvParam(); + return getExtraOptions(options, paths, []).filter( + (option) => !ignoringOptions?.includes(option) + ); } -function resolveUriToFile(location, artifacts, sourceRoot, logger) { - if (!location.uri && location.index !== void 0) { - if (typeof location.index !== "number" || location.index < 0 || location.index >= artifacts.length || typeof artifacts[location.index].location !== "object") { - logger.debug(`Ignoring location as index "${location.index}" is invalid`); - return void 0; - } - location = artifacts[location.index].location; +function asExtraOptions(options, pathInfo) { + if (options === void 0) { + return []; } - if (typeof location.uri !== "string") { - logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); - return void 0; + if (!Array.isArray(options)) { + const msg = `The extra options for '${pathInfo.join( + "." + )}' ('${JSON.stringify(options)}') are not in an array.`; + throw new Error(msg); } - let uri; + return options.map((o) => { + const t = typeof o; + if (t !== "string" && t !== "number" && t !== "boolean") { + const msg = `The extra option for '${pathInfo.join( + "." + )}' ('${JSON.stringify(o)}') is not a primitive value.`; + throw new Error(msg); + } + return `${o}`; + }); +} +function getExtraOptions(options, paths, pathInfo) { + const all = asExtraOptions(options?.["*"], pathInfo.concat("*")); + const specific = paths.length === 0 ? asExtraOptions(options, pathInfo) : getExtraOptions( + options?.[paths[0]], + paths?.slice(1), + pathInfo.concat(paths[0]) + ); + return all.concat(specific); +} +async function runCli(cmd, args = [], opts = {}) { try { - uri = decodeURIComponent(location.uri); - } catch { - logger.debug(`Ignoring location as URI "${location.uri}" is invalid`); - return void 0; - } - const fileUriPrefix = "file://"; - if (uri.startsWith(fileUriPrefix)) { - uri = uri.substring(fileUriPrefix.length); - } - if (uri.indexOf("://") !== -1) { - logger.debug( - `Ignoring location URI "${uri}" as the scheme is not recognised` - ); - return void 0; - } - const srcRootPrefix = `${sourceRoot}/`; - if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) { - logger.debug( - `Ignoring location URI "${uri}" as it is outside of the src root` - ); - return void 0; - } - if (!import_path2.default.isAbsolute(uri)) { - uri = srcRootPrefix + uri; - } - if (!fs11.existsSync(uri)) { - logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); - return void 0; - } - if (fs11.statSync(uri).isDirectory()) { - logger.debug(`Unable to compute fingerprint for directory: ${uri}`); - return void 0; + return await runTool(cmd, args, opts); + } catch (e) { + if (e instanceof CommandInvocationError) { + throw wrapCliConfigurationError(new CliError(e)); + } + throw e; } - return uri; } -async function addFingerprints(sarif, sourceRoot, logger) { +async function writeCodeScanningConfigFile(config, logger) { + const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config); + const augmentedConfig = appendExtraQueryExclusions( + config.extraQueryExclusions, + config.computedConfig + ); logger.info( - `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` + `Writing augmented user configuration file to ${codeScanningConfigFile}` ); - const callbacksByFile = {}; - for (const run2 of sarif.runs || []) { - const artifacts = run2.artifacts || []; - for (const result of run2.results || []) { - const primaryLocation = (result.locations || [])[0]; - if (!primaryLocation?.physicalLocation?.artifactLocation) { - logger.debug( - `Unable to compute fingerprint for invalid location: ${JSON.stringify( - primaryLocation - )}` - ); - continue; - } - if (primaryLocation?.physicalLocation?.region?.startLine === void 0) { - continue; - } - const filepath = resolveUriToFile( - primaryLocation.physicalLocation.artifactLocation, - artifacts, - sourceRoot, - logger - ); - if (!filepath) { - continue; - } - if (!callbacksByFile[filepath]) { - callbacksByFile[filepath] = []; - } - callbacksByFile[filepath].push( - locationUpdateCallback(result, primaryLocation, logger) - ); - } - } - for (const [filepath, callbacks] of Object.entries(callbacksByFile)) { - const teeCallback = function(lineNumber, hashValue) { - for (const c of Object.values(callbacks)) { - c(lineNumber, hashValue); - } - }; - await hash(teeCallback, filepath); - } - return sarif; + logger.startGroup("Augmented user configuration file contents"); + logger.info(dump(augmentedConfig)); + logger.endGroup(); + fs11.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + return codeScanningConfigFile; +} +var TRAP_CACHE_SIZE_MB = 1024; +async function getTrapCachingExtractorConfigArgs(config) { + const result = []; + for (const language of config.languages) + result.push( + await getTrapCachingExtractorConfigArgsForLang(config, language) + ); + return result.flat(); +} +async function getTrapCachingExtractorConfigArgsForLang(config, language) { + const cacheDir = config.trapCaches[language]; + if (cacheDir === void 0) return []; + const write = await isAnalyzingDefaultBranch(); + return [ + `-O=${language}.trap.cache.dir=${cacheDir}`, + `-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`, + `-O=${language}.trap.cache.write=${write}` + ]; +} +function getGeneratedCodeScanningConfigPath(config) { + return path11.resolve(config.tempDir, "user-config.yaml"); +} +function getExtractionVerbosityArguments(enableDebugLogging) { + return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; +} +function applyAutobuildAzurePipelinesTimeoutFix() { + const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || ""; + process.env["JAVA_TOOL_OPTIONS"] = [ + ...javaToolOptions.split(/\s+/), + "-Dhttp.keepAlive=false", + "-Dmaven.wagon.http.pool=false" + ].join(" "); +} +async function getJobRunUuidSarifOptions(codeql) { + const jobRunUuid = process.env["JOB_RUN_UUID" /* JOB_RUN_UUID */]; + return jobRunUuid && await codeql.supportsFeature( + "databaseInterpretResultsSupportsSarifRunProperty" /* DatabaseInterpretResultsSupportsSarifRunProperty */ + ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } // src/init.ts -var toolrunner4 = __toESM(require_toolrunner()); -var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { - codeql: codeql2, + codeql, toolsDownloadStatusReport, toolsSource, toolsVersion, @@ -110732,10 +110734,10 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe logger, true ); - await codeql2.printVersion(); + await codeql.printVersion(); logger.endGroup(); return { - codeql: codeql2, + codeql, toolsDownloadStatusReport, toolsSource, toolsVersion, @@ -111368,6 +111370,20 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/upload-sarif.ts +var cachedCodeQL2; +async function getOrInitCodeQL(logger, gitHubVersion, features, config) { + if (cachedCodeQL2 !== void 0) return cachedCodeQL2; + if (config !== void 0) { + cachedCodeQL2 = await getCodeQL(config.codeQLCmd); + } else { + cachedCodeQL2 = await minimalInitCodeQL( + logger, + gitHubVersion, + features + ); + } + return cachedCodeQL2; +} async function postProcessAndUploadSarif(logger, tempPath, features, getCodeQL2, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { const sarifGroups = await getGroupedSarifFilePaths( logger, @@ -111424,20 +111440,6 @@ async function sendSuccessStatusReport(startedAt, uploadStats, logger) { await sendStatusReport(statusReport); } } -var codeql; -async function getOrInitCodeQL(logger, gitHubVersion, features, config) { - if (codeql !== void 0) return codeql; - if (config !== void 0) { - codeql = await getCodeQL(config.codeQLCmd); - } else { - codeql = await minimalInitCodeQL( - logger, - gitHubVersion, - features - ); - } - return codeql; -} async function run(startedAt) { const logger = getActionsLogger(); try { diff --git a/src/upload-sarif-action.ts b/src/upload-sarif-action.ts index 737396dd62..4cc4bfc49d 100644 --- a/src/upload-sarif-action.ts +++ b/src/upload-sarif-action.ts @@ -3,9 +3,8 @@ import * as core from "@actions/core"; import * as actionsUtil from "./actions-util"; import * as analyses from "./analyses"; import { getGitHubVersion } from "./api-client"; -import { CodeQL, getCodeQL } from "./codeql"; -import { Config, getConfig } from "./config-utils"; -import { FeatureEnablement, initFeatures } from "./feature-flags"; +import { getConfig } from "./config-utils"; +import { initFeatures } from "./feature-flags"; import { Logger, getActionsLogger } from "./logging"; import { getRepositoryNwo } from "./repository"; import { @@ -18,10 +17,9 @@ import { isThirdPartyAnalysis, } from "./status-report"; import * as upload_lib from "./upload-lib"; -import { postProcessAndUploadSarif } from "./upload-sarif"; +import { getOrInitCodeQL, postProcessAndUploadSarif } from "./upload-sarif"; import { ConfigurationError, - GitHubVersion, checkActionVersion, checkDiskUsage, getErrorMessage, @@ -56,35 +54,6 @@ async function sendSuccessStatusReport( } } -/** The cached `CodeQL` instance, if any. */ -let codeql: CodeQL | undefined; - -/** Get or initialise a `CodeQL` instance for use by the `upload-sarif` action. */ -async function getOrInitCodeQL( - logger: Logger, - gitHubVersion: GitHubVersion, - features: FeatureEnablement, - config: Config | undefined, -): Promise { - // Return the cached instance, if we have one. - if (codeql !== undefined) return codeql; - - // If we have been able to load a `Config` from an earlier CodeQL Action step in the job, - // then use the CodeQL executable that we have used previously. Otherwise, initialise the - // CLI specifically for `upload-sarif`. Either way, we cache the instance. - if (config !== undefined) { - codeql = await getCodeQL(config.codeQLCmd); - } else { - codeql = await upload_lib.minimalInitCodeQL( - logger, - gitHubVersion, - features, - ); - } - - return codeql; -} - async function run(startedAt: Date) { // To capture errors appropriately, keep as much code within the try-catch as // possible, and only use safe functions outside. diff --git a/src/upload-sarif.ts b/src/upload-sarif.ts index 372b2d1f47..f704c2c28d 100644 --- a/src/upload-sarif.ts +++ b/src/upload-sarif.ts @@ -1,15 +1,47 @@ import { UploadKind } from "./actions-util"; import * as analyses from "./analyses"; +import type { CodeQL } from "./codeql"; +import * as codeql from "./codeql"; +import { Config } from "./config-utils"; import { FeatureEnablement } from "./feature-flags"; import { Logger } from "./logging"; import * as upload_lib from "./upload-lib"; -import { unsafeEntriesInvariant } from "./util"; +import { GitHubVersion, unsafeEntriesInvariant } from "./util"; // Maps analysis kinds to SARIF IDs. export type UploadSarifResults = Partial< Record >; +/** The cached `CodeQL` instance, if any. */ +let cachedCodeQL: CodeQL | undefined; + +/** Get or initialise a `CodeQL` instance for use by the `upload-sarif` action. */ +export async function getOrInitCodeQL( + logger: Logger, + gitHubVersion: GitHubVersion, + features: FeatureEnablement, + config: Config | undefined, +): Promise { + // Return the cached instance, if we have one. + if (cachedCodeQL !== undefined) return cachedCodeQL; + + // If we have been able to load a `Config` from an earlier CodeQL Action step in the job, + // then use the CodeQL executable that we have used previously. Otherwise, initialise the + // CLI specifically for `upload-sarif`. Either way, we cache the instance. + if (config !== undefined) { + cachedCodeQL = await codeql.getCodeQL(config.codeQLCmd); + } else { + cachedCodeQL = await upload_lib.minimalInitCodeQL( + logger, + gitHubVersion, + features, + ); + } + + return cachedCodeQL; +} + /** * Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services. * From 6d90f4c71ec0bf27b0fab6ea0cb6dc4ef7e9cbea Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 20:09:10 +0000 Subject: [PATCH 5/6] Avoid using a global for `getOrInitCodeQL` --- lib/upload-sarif-action.js | 14 +++++++------- src/upload-sarif-action.ts | 9 +++++++-- src/upload-sarif.ts | 17 ++++++++++------- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index d36868fa24..d9e0fea11f 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -111370,19 +111370,18 @@ function filterAlertsByDiffRange(logger, sarif) { } // src/upload-sarif.ts -var cachedCodeQL2; -async function getOrInitCodeQL(logger, gitHubVersion, features, config) { - if (cachedCodeQL2 !== void 0) return cachedCodeQL2; +async function getOrInitCodeQL(actionState, logger, gitHubVersion, features, config) { + if (actionState.cachedCodeQL !== void 0) return actionState.cachedCodeQL; if (config !== void 0) { - cachedCodeQL2 = await getCodeQL(config.codeQLCmd); + actionState.cachedCodeQL = await getCodeQL(config.codeQLCmd); } else { - cachedCodeQL2 = await minimalInitCodeQL( + actionState.cachedCodeQL = await minimalInitCodeQL( logger, gitHubVersion, features ); } - return cachedCodeQL2; + return actionState.cachedCodeQL; } async function postProcessAndUploadSarif(logger, tempPath, features, getCodeQL2, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) { const sarifGroups = await getGroupedSarifFilePaths( @@ -111442,6 +111441,7 @@ async function sendSuccessStatusReport(startedAt, uploadStats, logger) { } async function run(startedAt) { const logger = getActionsLogger(); + const state = { cachedCodeQL: void 0 }; try { initializeEnvironment(getActionVersion()); const gitHubVersion = await getGitHubVersion(); @@ -111477,7 +111477,7 @@ async function run(startedAt) { logger, tempDir, features, - () => getOrInitCodeQL(logger, gitHubVersion, features, config), + () => getOrInitCodeQL(state, logger, gitHubVersion, features, config), "always", checkoutPath, sarifPath, diff --git a/src/upload-sarif-action.ts b/src/upload-sarif-action.ts index 4cc4bfc49d..86894cce1a 100644 --- a/src/upload-sarif-action.ts +++ b/src/upload-sarif-action.ts @@ -17,7 +17,11 @@ import { isThirdPartyAnalysis, } from "./status-report"; import * as upload_lib from "./upload-lib"; -import { getOrInitCodeQL, postProcessAndUploadSarif } from "./upload-sarif"; +import { + getOrInitCodeQL, + postProcessAndUploadSarif, + UploadSarifState, +} from "./upload-sarif"; import { ConfigurationError, checkActionVersion, @@ -59,6 +63,7 @@ async function run(startedAt: Date) { // possible, and only use safe functions outside. const logger = getActionsLogger(); + const state: UploadSarifState = { cachedCodeQL: undefined }; try { initializeEnvironment(actionsUtil.getActionVersion()); @@ -107,7 +112,7 @@ async function run(startedAt: Date) { logger, tempDir, features, - () => getOrInitCodeQL(logger, gitHubVersion, features, config), + () => getOrInitCodeQL(state, logger, gitHubVersion, features, config), "always", checkoutPath, sarifPath, diff --git a/src/upload-sarif.ts b/src/upload-sarif.ts index f704c2c28d..95e2bf807b 100644 --- a/src/upload-sarif.ts +++ b/src/upload-sarif.ts @@ -8,38 +8,41 @@ import { Logger } from "./logging"; import * as upload_lib from "./upload-lib"; import { GitHubVersion, unsafeEntriesInvariant } from "./util"; +export interface UploadSarifState { + /** The cached `CodeQL` instance, if any. */ + cachedCodeQL: CodeQL | undefined; +} + // Maps analysis kinds to SARIF IDs. export type UploadSarifResults = Partial< Record >; -/** The cached `CodeQL` instance, if any. */ -let cachedCodeQL: CodeQL | undefined; - /** Get or initialise a `CodeQL` instance for use by the `upload-sarif` action. */ export async function getOrInitCodeQL( + actionState: UploadSarifState, logger: Logger, gitHubVersion: GitHubVersion, features: FeatureEnablement, config: Config | undefined, ): Promise { // Return the cached instance, if we have one. - if (cachedCodeQL !== undefined) return cachedCodeQL; + if (actionState.cachedCodeQL !== undefined) return actionState.cachedCodeQL; // If we have been able to load a `Config` from an earlier CodeQL Action step in the job, // then use the CodeQL executable that we have used previously. Otherwise, initialise the // CLI specifically for `upload-sarif`. Either way, we cache the instance. if (config !== undefined) { - cachedCodeQL = await codeql.getCodeQL(config.codeQLCmd); + actionState.cachedCodeQL = await codeql.getCodeQL(config.codeQLCmd); } else { - cachedCodeQL = await upload_lib.minimalInitCodeQL( + actionState.cachedCodeQL = await upload_lib.minimalInitCodeQL( logger, gitHubVersion, features, ); } - return cachedCodeQL; + return actionState.cachedCodeQL; } /** From 26812c842e185b4121e5137e2725901cf3f0fe3d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 20:21:30 +0000 Subject: [PATCH 6/6] Add tests for getOrInitCodeQL --- src/upload-sarif.test.ts | 90 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 88 insertions(+), 2 deletions(-) diff --git a/src/upload-sarif.test.ts b/src/upload-sarif.test.ts index 601dbfd46d..e0caaa8bf6 100644 --- a/src/upload-sarif.test.ts +++ b/src/upload-sarif.test.ts @@ -6,15 +6,101 @@ import * as sinon from "sinon"; import { AnalysisKind, getAnalysisConfig } from "./analyses"; import { getCodeQLForTesting } from "./codeql"; +import * as codeql from "./codeql"; import { getRunnerLogger } from "./logging"; -import { createFeatures, setupTests } from "./testing-utils"; +import { createFeatures, createTestConfig, setupTests } from "./testing-utils"; import { UploadResult } from "./upload-lib"; import * as uploadLib from "./upload-lib"; -import { postProcessAndUploadSarif } from "./upload-sarif"; +import { + getOrInitCodeQL, + postProcessAndUploadSarif, + UploadSarifState, +} from "./upload-sarif"; import * as util from "./util"; setupTests(test); +test("getOrInitCodeQL - gets cached CodeQL instance when available", async (t) => { + const cachedCodeQL = await getCodeQLForTesting(); + const getCodeQL = sinon.stub(codeql, "getCodeQL").resolves(undefined); + const minimalInitCodeQL = sinon + .stub(uploadLib, "minimalInitCodeQL") + .resolves(undefined); + + const result = await getOrInitCodeQL( + { cachedCodeQL }, + getRunnerLogger(true), + { type: util.GitHubVariant.GHES, version: "3.0" }, + createFeatures([]), + undefined, + ); + + // Neither of the two functions to get a CodeQL instance were called. + t.true(getCodeQL.notCalled); + t.true(minimalInitCodeQL.notCalled); + + // But we have an instance that refers to the same object as the one we put into the state. + t.truthy(result); + t.is(result, cachedCodeQL); +}); + +test("getOrInitCodeQL - uses minimalInitCodeQL when there's no config", async (t) => { + const newInstance = await getCodeQLForTesting(); + const getCodeQL = sinon.stub(codeql, "getCodeQL").resolves(undefined); + const minimalInitCodeQL = sinon + .stub(uploadLib, "minimalInitCodeQL") + .resolves(newInstance); + + const state: UploadSarifState = { cachedCodeQL: undefined }; + const result = await getOrInitCodeQL( + state, + getRunnerLogger(true), + { type: util.GitHubVariant.GHES, version: "3.0" }, + createFeatures([]), + undefined, + ); + + // Check that the right function was called. + t.true(getCodeQL.notCalled); + t.true(minimalInitCodeQL.calledOnce); + + // And that we received the instance that we expected. + t.truthy(result); + t.is(result, newInstance); + + // And that it was cached. + t.is(state.cachedCodeQL, newInstance); +}); + +test("getOrInitCodeQL - uses getCodeQL when there's a config", async (t) => { + const newInstance = await getCodeQLForTesting(); + const getCodeQL = sinon.stub(codeql, "getCodeQL").resolves(newInstance); + const minimalInitCodeQL = sinon + .stub(uploadLib, "minimalInitCodeQL") + .resolves(undefined); + const config = createTestConfig({}); + + const state: UploadSarifState = { cachedCodeQL: undefined }; + const result = await getOrInitCodeQL( + state, + getRunnerLogger(true), + { type: util.GitHubVariant.GHES, version: "3.0" }, + createFeatures([]), + config, + ); + + // Check that the right function was called. + t.true(getCodeQL.calledOnce); + t.true(minimalInitCodeQL.notCalled); + + // And that we received the instance that we expected. + t.truthy(result); + t.is(result, newInstance); + + // And that it was cached. + t.is(state.cachedCodeQL, newInstance); +}); + interface UploadSarifExpectedResult { uploadResult?: UploadResult; expectedFiles?: string[];