From 1ba48b83f24b0e1b99652e26f72ed0bb80fde578 Mon Sep 17 00:00:00 2001 From: Aditya Maru Date: Wed, 24 Jul 2024 00:42:33 +0100 Subject: [PATCH] *: integrate the blacksmith cache --- dist/index.js | 7274 +++++++++++++++++++++++++++++++++++++++++++-- package-lock.json | 928 +++++- package.json | 4 +- 3 files changed, 7952 insertions(+), 254 deletions(-) diff --git a/dist/index.js b/dist/index.js index 4afc06f31..abd3551e7 100644 --- a/dist/index.js +++ b/dist/index.js @@ -45,6 +45,7 @@ const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); const cacheHttpClient = __importStar(__nccwpck_require__(8245)); const tar_1 = __nccwpck_require__(6490); +const cacheHttpClient_1 = __nccwpck_require__(8245); class ValidationError extends Error { constructor(message) { super(message); @@ -84,6 +85,31 @@ function isFeatureAvailable() { return !!process.env['ACTIONS_CACHE_URL']; } exports.isFeatureAvailable = isFeatureAvailable; +const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise(resolve => { + timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then(result => { + clearTimeout(timeoutHandle); + return result; + }); +}); +function reportFailure() { + return __awaiter(this, void 0, void 0, function* () { + try { + core.info('Reporting failure to api.blacksmith.sh'); + const message = `${process.env.GITHUB_JOB} failed for ${process.env.GITHUB_REPOSITORY} with run ID: ${process.env.GITHUB_RUN_ID}; Sender: ${process.env.GITHUB_TRIGGERING_ACTOR}`; + const httpClient = (0, cacheHttpClient_1.createHttpClient)(); + yield promiseWithTimeout(10000, httpClient.postJson((0, cacheHttpClient_1.getCacheApiUrl)('report-failed'), { + message + })); + } + catch (error) { + core.warning('Failed to report failure to api.blacksmith.sh'); + } + }); +} /** * Restores cache from keys * @@ -143,13 +169,21 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch } else { // Supress all non-validation cache related errors because caching should be optional - core.warning(`Failed to restore: ${error.message}`); + if (error.message.includes(`Cache service responded with 404`)) { + core.info(`Did not get a cache hit; proceeding as an uncached run`); + } + else { + core.warning(`Failed to restore: ${error.message}`); + yield reportFailure(); + } } } finally { // Try to delete the archive to save space try { - yield utils.unlinkFile(archivePath); + const before = Date.now(); + yield unlinkWithTimeout(archivePath, 5000); + core.info(`cleaning up archive took ${Date.now() - before}ms`); } catch (error) { core.debug(`Failed to delete archive: ${error}`); @@ -159,6 +193,27 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch }); } exports.restoreCache = restoreCache; +function unlinkWithTimeout(path, timeoutMs) { + return __awaiter(this, void 0, void 0, function* () { + const timeout = new Promise((_, reject) => { + setTimeout(() => { + reject(new Error('Unlink operation timed out')); + }, timeoutMs); + }); + try { + yield Promise.race([utils.unlinkFile(path), timeout]); + } + catch (error) { + if (error.message === 'Unlink operation timed out') { + core.warning(`Unlink operation exceeded the timeout of ${timeoutMs}ms`); + } + else { + core.debug(`Failed to delete archive: ${error}`); + } + throw error; + } + }); +} /** * Saves a list of files with the specified key * @@ -169,7 +224,7 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options, enableCrossOsArchive = false) { - var _a, _b, _c, _d, _e; + var _a, _b, _c, _d, _e, _f, _g, _h, _j; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); @@ -189,12 +244,12 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) { if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit + const fileSizeLimit = 25 * 1024 * 1024 * 1024; // 25GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); // For GHES, this check will take place in ReserveCache API with enterprise file size limit if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 25GB limit, not saving cache.`); } core.debug('Reserving Cache'); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { @@ -212,7 +267,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } core.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, options); + yield cacheHttpClient.saveCache(cacheId, archivePath, (_g = (_f = reserveCacheResponse.result) === null || _f === void 0 ? void 0 : _f.uploadUrls) !== null && _g !== void 0 ? _g : [], (_j = (_h = reserveCacheResponse.result) === null || _h === void 0 ? void 0 : _h.uploadId) !== null && _j !== void 0 ? _j : ''); } catch (error) { const typedError = error; @@ -221,6 +276,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) { } else if (typedError.name === ReserveCacheError.name) { core.info(`Failed to save: ${typedError.message}`); + core.debug(JSON.stringify(error)); } else { core.warning(`Failed to save: ${typedError.message}`); @@ -281,7 +337,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; +exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = exports.createHttpClient = exports.getCacheApiUrl = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(6255); const auth_1 = __nccwpck_require__(5526); @@ -294,30 +350,34 @@ const options_1 = __nccwpck_require__(6215); const requestUtils_1 = __nccwpck_require__(3981); const versionSalt = '1.0'; function getCacheApiUrl(resource) { - const baseUrl = process.env['ACTIONS_CACHE_URL'] || ''; + const baseUrl = process.env['BLACKSMITH_CACHE_URL'] || 'https://api.blacksmith.sh/cache'; if (!baseUrl) { throw new Error('Cache Service Url not found, unable to restore cache.'); } - const url = `${baseUrl}_apis/artifactcache/${resource}`; - core.debug(`Resource Url: ${url}`); + const url = `${baseUrl}/${resource}`; + core.debug(`Blacksmith cache resource URL: ${url}; version: 3.2.40`); return url; } +exports.getCacheApiUrl = getCacheApiUrl; function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getRequestOptions() { + core.debug(`Setting GITHUB_REPO_NAME: ${process.env['GITHUB_REPO_NAME']}`); const requestOptions = { headers: { - Accept: createAcceptHeader('application/json', '6.0-preview.1') + Accept: createAcceptHeader('application/json', '6.0-preview.1'), + 'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME'] } }; return requestOptions; } function createHttpClient() { - const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); + const token = process.env['BLACKSMITH_CACHE_TOKEN']; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token !== null && token !== void 0 ? token : ''); + return new http_client_1.HttpClient('useblacksmith/cache', [bearerCredentialHandler], getRequestOptions()); } +exports.createHttpClient = createHttpClient; function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { // don't pass changes upstream const components = paths.slice(); @@ -339,7 +399,7 @@ function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); // Cache not found if (response.statusCode === 204) { @@ -400,7 +460,12 @@ function downloadCache(archiveLocation, archivePath, options) { } } else { +<<<<<<< HEAD yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); +======= + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + // await downloadCacheAxiosMultiPart(archiveLocation, archivePath) +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } }); } @@ -430,77 +495,92 @@ function getContentRange(start, end) { // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } -function uploadChunk(httpClient, resourceUrl, openStream, start, end) { +function uploadChunk(resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', - 'Content-Range': getContentRange(start, end) + 'Content-Length': end - start + 1 }; + const s3HttpClient = new http_client_1.HttpClient('useblacksmith/cache'); const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { - return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + return s3HttpClient.sendStream('PUT', resourceUrl, openStream(), additionalHeaders); })); if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { + core.debug(`Upload chunk failed with status message: ${JSON.stringify(uploadChunkResponse.message.statusMessage)}`); + core.debug(`Upload chunk failed with headers: ${JSON.stringify(uploadChunkResponse.message.headers)}`); + core.debug(`Upload chunk failed with response body: ${yield uploadChunkResponse.readBody()}`); throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return uploadChunkResponse.message.headers.etag; }); } -function uploadFile(httpClient, cacheId, archivePath, options) { +function uploadFile(archivePath, urls) { return __awaiter(this, void 0, void 0, function* () { // Upload Chunks + core.debug(`archivePath: ${archivePath}`); const fileSize = utils.getArchiveFileSizeInBytes(archivePath); - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs.openSync(archivePath, 'r'); - const uploadOptions = (0, options_1.getUploadOptions)(options); - const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; + const maxChunkSize = 25 * 1024 * 1024; // Matches the chunkSize in our cache service. core.debug('Awaiting all uploads'); - let offset = 0; + let eTags = []; try { - yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs - .createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }) - .on('error', error => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), start, end); - } + eTags = yield Promise.all(urls.map((url, index) => __awaiter(this, void 0, void 0, function* () { + const offset = index * maxChunkSize; + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + let end = offset + chunkSize - 1; + if (chunkSize !== maxChunkSize) { + end = fileSize - 1; + } + core.debug(`Uploading chunk to ${url}: ${start}-${end}/${fileSize}`); + const eTag = yield uploadChunk(url, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + core.debug(`Upload to ${url} complete`); + return eTag !== null && eTag !== void 0 ? eTag : ''; }))); } + catch (error) { + core.debug(`Cache upload failed: ${JSON.stringify(error)}`); + throw error; + } finally { fs.closeSync(fd); } - return; + return eTags; }); } -function commitCache(httpClient, cacheId, filesize) { +function commitCache(httpClient, cacheId, filesize, eTags, uploadId) { return __awaiter(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; + const commitCacheRequest = { + size: filesize, + eTags, + uploadId + }; return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); })); }); } -function saveCache(cacheId, archivePath, options) { +function saveCache(cacheId, archivePath, urls, uploadId) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); core.debug('Upload cache'); - yield uploadFile(httpClient, cacheId, archivePath, options); + const eTags = yield uploadFile(archivePath, urls); // Commit Cache core.debug('Commiting cache'); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize, eTags, uploadId); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } @@ -609,6 +689,7 @@ function resolvePaths(patterns) { implicitDescendants: false }); try { +<<<<<<< HEAD for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { _c = _g.value; _e = false; @@ -621,9 +702,28 @@ function resolvePaths(patterns) { if (relativeFile === '') { // path.relative returns empty string if workspace and file are equal paths.push('.'); +======= + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { + _c = _g.value; + _e = false; + try { + const file = _c; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } - else { - paths.push(`${relativeFile}`); + finally { + _e = true; } } } @@ -797,8 +897,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); +<<<<<<< HEAD exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +======= +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.downloadCacheAxiosMultiPart = exports.DownloadProgress = void 0; +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(6255); const storage_blob_1 = __nccwpck_require__(4100); @@ -806,20 +913,57 @@ const buffer = __importStar(__nccwpck_require__(4300)); const fs = __importStar(__nccwpck_require__(7147)); const stream = __importStar(__nccwpck_require__(2781)); const util = __importStar(__nccwpck_require__(3837)); -const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); const requestUtils_1 = __nccwpck_require__(3981); const abort_controller_1 = __nccwpck_require__(2557); +const axios_retry_1 = __importDefault(__nccwpck_require__(8709)); +const axios_1 = __importDefault(__nccwpck_require__(8757)); +const cacheHttpClient_1 = __nccwpck_require__(8245); /** * Pipes the body of a HTTP response to a stream * * @param response the HTTP response * @param output the writable stream */ -function pipeResponseToStream(response, output) { +function pipeResponseToStream(response, output, progress) { return __awaiter(this, void 0, void 0, function* () { const pipeline = util.promisify(stream.pipeline); - yield pipeline(response.message, output); + const reportProgress = new stream.Transform({ + transform(chunk, _encoding, callback) { + if (progress) { + progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length); + } + this.push(chunk); + callback(); + } + }); + yield pipeline(response.message, reportProgress, output); + }); +} +function pipeAxiosResponseToStream(response, output, progress) { + return __awaiter(this, void 0, void 0, function* () { + const reportProgress = new stream.Transform({ + transform(chunk, _encoding, callback) { + if (progress) { + progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length); + } + this.push(chunk); + callback(); + } + }); + yield response.data.pipe(reportProgress).pipe(output); + }); +} +function reportStall() { + return __awaiter(this, void 0, void 0, function* () { + try { + core.debug('Reporting stall to api.blacksmith.sh'); + const httpClient = (0, cacheHttpClient_1.createHttpClient)(); + yield promiseWithTimeout(10000, httpClient.postJson((0, cacheHttpClient_1.getCacheApiUrl)('report-stall'), {})); + } + catch (error) { + core.warning('Failed to report failure to api.blacksmith.sh'); + } }); } /** @@ -833,6 +977,7 @@ class DownloadProgress { this.segmentOffset = 0; this.receivedBytes = 0; this.displayedComplete = false; + this.highWaterTime = 0; this.startTime = Date.now(); } /** @@ -892,6 +1037,7 @@ class DownloadProgress { */ onProgress() { return (progress) => { + this.highWaterTime = Date.now(); this.setReceivedBytes(progress.loadedBytes); }; } @@ -923,6 +1069,112 @@ class DownloadProgress { } } exports.DownloadProgress = DownloadProgress; +function downloadCacheAxiosMultiPart(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const CONCURRENCY = 10; + core.info(`Downloading with ${CONCURRENCY} concurrent requests`); + // Open a file descriptor for the cache file + const fdesc = yield fs.promises.open(archivePath, 'w+'); + // Set file permissions so that other users can untar the cache + yield fdesc.chmod(0o644); + let progressLogger; + // Configure axios-retry + (0, axios_retry_1.default)(axios_1.default, { + retries: 3, + // No retry delay for axios-retry. + shouldResetTimeout: true, + retryCondition: error => { + var _a; + // Retry on all errors except 404. + return ((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) !== 404; + } + }); + try { + core.debug(`Downloading from ${archiveLocation} to ${archivePath}`); + const metadataResponse = yield axios_1.default.get(archiveLocation, { + headers: { Range: 'bytes=0-1' } + }); + const contentRangeHeader = metadataResponse.headers['content-range']; + if (!contentRangeHeader) { + throw new Error('Content-Range is not defined; unable to determine file size'); + } + // Parse the total file size from the Content-Range header + const fileSize = parseInt(contentRangeHeader.split('/')[1]); + if (isNaN(fileSize)) { + throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`); + } + core.info(`Cached file size: ${fileSize}`); + // Truncate the file to the correct size + yield fdesc.truncate(fileSize); + yield fdesc.sync(); + // Now that we've truncated the file to the correct size, we can close the file descriptor. + yield fdesc.close(); + progressLogger = new DownloadProgress(fileSize); + progressLogger.startDisplayTimer(); + core.info(`Downloading ${archivePath}`); + // Divvy up the download into chunks based on CONCURRENCY + const chunkSize = Math.ceil(fileSize / CONCURRENCY); + const chunkRanges = []; + const fileDescriptors = []; + for (let i = 0; i < CONCURRENCY; i++) { + const start = i * chunkSize; + const end = i === CONCURRENCY - 1 ? fileSize - 1 : (i + 1) * chunkSize - 1; + chunkRanges.push(`bytes=${start}-${end}`); + fileDescriptors.push(yield fs.promises.open(archivePath, 'r+')); + } + const downloads = chunkRanges.map((range, index) => __awaiter(this, void 0, void 0, function* () { + core.debug(`Downloading range: ${range}`); + const response = yield axios_1.default.get(archiveLocation, { + headers: { Range: range }, + responseType: 'stream' + }); + const reportProgress = new stream.Transform({ + transform(chunk, _encoding, callback) { + if (progressLogger) { + progressLogger.setReceivedBytes(progressLogger.getTransferredBytes() + chunk.length); + } + this.push(chunk); + callback(); + } + }); + const chunkFileDesc = fileDescriptors[index]; + try { + const finished = util.promisify(stream.finished); + const writer = fs.createWriteStream(archivePath, { + fd: chunkFileDesc.fd, + start: parseInt(range.split('=')[1].split('-')[0]), + autoClose: false + }); + yield response.data.pipe(reportProgress).pipe(writer); + yield finished(writer); + } + catch (err) { + core.warning(`Range ${range} failed to download: ${err.message}`); + throw err; + } + finally { + if (chunkFileDesc) { + try { + yield chunkFileDesc.close(); + } + catch (err) { + core.warning(`Failed to close file descriptor: ${err}`); + } + } + } + })); + yield Promise.all(downloads); + } + catch (err) { + core.warning(`Failed to download cache: ${err.message}`); + throw err; + } + finally { + progressLogger === null || progressLogger === void 0 ? void 0 : progressLogger.stopDisplayTimer(true); + } + }); +} +exports.downloadCacheAxiosMultiPart = downloadCacheAxiosMultiPart; /** * Download the cache using the Actions toolkit http-client * @@ -931,26 +1183,84 @@ exports.DownloadProgress = DownloadProgress; */ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { - const writeStream = fs.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient('actions/cache'); - const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); - // Abort download if no traffic received over the socket. - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - // Validate download size. - const contentLengthHeader = downloadResponse.message.headers['content-length']; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } + const CONCURRENCY = 1; + const fdesc = yield fs.promises.open(archivePath, 'w+'); + // Set file permissions so that other users can untar the cache + yield fdesc.chmod(0o644); + let progressLogger; + try { + core.debug(`Downloading from ${archiveLocation} to ${archivePath}`); + const httpClient = new http_client_1.HttpClient('useblacksmith/cache'); + const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation, { + Range: 'bytes=0-1' + }); + })); + // Abort download if no traffic received over the socket. + metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + metadataResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + const contentRangeHeader = metadataResponse.message.headers['content-range']; + if (!contentRangeHeader) { + throw new Error('Content-Range is not defined; unable to determine file size'); + } + // Parse the total file size from the Content-Range header + const fileSize = parseInt(contentRangeHeader.split('/')[1]); + if (isNaN(fileSize)) { + throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`); + } + core.debug(`fileSize: ${fileSize}`); + // Truncate the file to the correct size + yield fdesc.truncate(fileSize); + yield fdesc.sync(); + progressLogger = new DownloadProgress(fileSize); + progressLogger.startDisplayTimer(); + core.info(`Downloading ${archivePath}`); + // Divvy up the download into chunks based on CONCURRENCY + const chunkSize = Math.ceil(fileSize / CONCURRENCY); + const chunkRanges = []; + for (let i = 0; i < CONCURRENCY; i++) { + const start = i * chunkSize; + const end = i === CONCURRENCY - 1 ? fileSize - 1 : (i + 1) * chunkSize - 1; + chunkRanges.push(`bytes=${start}-${end}`); + } + const downloads = chunkRanges.map((range) => __awaiter(this, void 0, void 0, function* () { + core.debug(`Downloading range: ${range}`); + const response = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation, { + Range: range + }); + })); + const writeStream = fs.createWriteStream(archivePath, { + fd: fdesc.fd, + start: parseInt(range.split('=')[1].split('-')[0]), + autoClose: false + }); + yield pipeResponseToStream(response, writeStream, progressLogger); + core.debug(`Finished downloading range: ${range}`); + })); + yield Promise.all(downloads); } - else { - core.debug('Unable to validate download, no Content-Length header'); + catch (err) { + core.warning(`Failed to download cache: ${err}`); + throw err; + } + finally { + // Stop the progress logger regardless of whether the download succeeded or failed. + // Not doing this will cause the entire action to halt if the download fails. + progressLogger === null || progressLogger === void 0 ? void 0 : progressLogger.stopDisplayTimer(); + try { + // NB: We're unsure why we're sometimes seeing a "EBADF: Bad file descriptor" error here. + // It seems to be related to the fact that the file descriptor is closed before all + // the chunks are written to it. This is a workaround to avoid the error. + yield new Promise(resolve => setTimeout(resolve, 1000)); + yield fdesc.close(); + } + catch (err) { + // Intentionally swallow any errors in closing the file descriptor. + core.warning(`Failed to close file descriptor: ${err}`); + } } }); } @@ -964,11 +1274,20 @@ exports.downloadCacheHttpClient = downloadCacheHttpClient; function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter(this, void 0, void 0, function* () { +<<<<<<< HEAD const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); +======= + core.info('Downloading from cache using Blacksmith Actions http-client'); + const archiveDescriptor = yield fs.promises.open(archivePath, 'w+'); + // Set file permissions so that other users can untar the cache + yield archiveDescriptor.chmod(0o644); + core.debug(`Downloading from ${archiveLocation} to ${archivePath}`); +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { socketTimeout: options.timeoutInMs, keepAlive: true }); +<<<<<<< HEAD try { const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); const lengthHeader = res.message.headers['content-length']; @@ -981,6 +1300,37 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options } const downloads = []; const blockSize = 4 * 1024 * 1024; +======= + let progress; + const stallTimeout = setTimeout(() => { + reportStall(); + }, 300000); + stallTimeout.unref(); // Don't keep the process alive if the download is stalled. + try { + const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation, { + Range: 'bytes=0-1' + }); + })); + // Abort download if no traffic received over the socket. + metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + metadataResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + const contentRangeHeader = metadataResponse.message.headers['content-range']; + if (!contentRangeHeader) { + throw new Error('Content-Range is not defined; unable to determine file size'); + } + // Parse the total file size from the Content-Range header + const length = parseInt(contentRangeHeader.split('/')[1]); + if (isNaN(length)) { + throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`); + } + progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const downloads = []; + const blockSize = 2 * 1024 * 1024; +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) for (let offset = 0; offset < length; offset += blockSize) { const count = Math.min(blockSize, length - offset); downloads.push({ @@ -994,14 +1344,24 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options downloads.reverse(); let actives = 0; let bytesDownloaded = 0; +<<<<<<< HEAD const progress = new DownloadProgress(length); progress.startDisplayTimer(); +======= +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const progressFn = progress.onProgress(); const activeDownloads = []; let nextDownload; const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { const segment = yield Promise.race(Object.values(activeDownloads)); +<<<<<<< HEAD yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); +======= + const result = yield promiseWithTimeout(10000, archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset)); + if (result === 'timeout') { + throw new Error('Unable to download from cache using Blacksmith Actions http-client'); + } +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) actives--; delete activeDownloads[segment.offset]; bytesDownloaded += segment.count; @@ -1010,7 +1370,11 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options while ((nextDownload = downloads.pop())) { activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); actives++; +<<<<<<< HEAD if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { +======= + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 12)) { +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) yield waitAndWrite(); } } @@ -1019,6 +1383,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options } } finally { +<<<<<<< HEAD +======= + // Stop the progress logger regardless of whether the download succeeded or failed. + if (progress) { + progress.stopDisplayTimer(); + } + clearTimeout(stallTimeout); +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) httpClient.dispose(); yield archiveDescriptor.close(); } @@ -1031,7 +1403,11 @@ function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { let failures = 0; while (true) { try { +<<<<<<< HEAD const timeout = 30000; +======= + const timeout = 15000; +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); if (typeof result === 'string') { throw new Error('downloadSegmentRetry failed due to timeout'); @@ -1043,6 +1419,12 @@ function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { throw err; } failures++; +<<<<<<< HEAD +======= + // Jitter a bit before retrying + yield new Promise(resolve => setTimeout(resolve, Math.random() * 300)); + core.info(`Retrying download segment ${offset} of ${count} (${failures} of ${retries})`); +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } } }); @@ -1264,6 +1646,7 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry // If the error object contains the statusCode property, extract it and return // an TypedResponse so it can be processed by the retry logic. (error) => { + core.debug(`Error occurred during ${name}: ${JSON.stringify(error)}`); if (error instanceof http_client_1.HttpClientError) { return { statusCode: error.statusCode, @@ -1631,7 +2014,11 @@ function getDownloadOptions(copy) { const result = { useAzureSdk: false, concurrentBlobDownloads: true, +<<<<<<< HEAD downloadConcurrency: 8, +======= + downloadConcurrency: 10, +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) timeoutInMs: 30000, segmentTimeoutInMs: 600000, lookupOnly: false @@ -5242,7 +5629,11 @@ class HttpClient { } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { +<<<<<<< HEAD token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` +======= + token: `${proxyUrl.username}:${proxyUrl.password}` +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { @@ -5356,11 +5747,19 @@ function getProxyUrl(reqUrl) { })(); if (proxyVar) { try { +<<<<<<< HEAD return new DecodedURL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) return new DecodedURL(`http://${proxyVar}`); +======= + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } } else { @@ -5419,6 +5818,7 @@ function isLoopbackAddress(host) { hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } +<<<<<<< HEAD class DecodedURL extends URL { constructor(url, base) { super(url, base); @@ -5432,6 +5832,8 @@ class DecodedURL extends URL { return this._decodedPassword; } } +======= +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) //# sourceMappingURL=proxy.js.map /***/ }), @@ -42551,13 +42953,19 @@ var ValueType; * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; +exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_GAUGE_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopGaugeMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; /** * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses * constant NoopMetrics for all of its methods. */ class NoopMeter { constructor() { } + /** + * @see {@link Meter.createGauge} + */ + createGauge(_name, _options) { + return exports.NOOP_GAUGE_METRIC; + } /** * @see {@link Meter.createHistogram} */ @@ -42615,6 +43023,10 @@ class NoopUpDownCounterMetric extends NoopMetric { add(_value, _attributes) { } } exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; +class NoopGaugeMetric extends NoopMetric { + record(_value, _attributes) { } +} +exports.NoopGaugeMetric = NoopGaugeMetric; class NoopHistogramMetric extends NoopMetric { record(_value, _attributes) { } } @@ -42636,6 +43048,7 @@ exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; exports.NOOP_METER = new NoopMeter(); // Synchronous instruments exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); +exports.NOOP_GAUGE_METRIC = new NoopGaugeMetric(); exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); // Asynchronous instruments @@ -42991,6 +43404,12 @@ class NonRecordingSpan { addEvent(_name, _attributes) { return this; } + addLink(_link) { + return this; + } + addLinks(_links) { + return this; + } // By default does nothing setStatus(_status) { return this; @@ -43818,7 +44237,7 @@ var TraceFlags; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.4.1'; +exports.VERSION = '1.9.0'; //# sourceMappingURL=version.js.map /***/ }), @@ -45746,230 +46165,755 @@ function descending(a, b) /***/ }), -/***/ 9417: -/***/ ((module) => { +/***/ 1403: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var CombinedStream = __nccwpck_require__(5443); +var util = __nccwpck_require__(3837); +var path = __nccwpck_require__(1017); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var parseUrl = (__nccwpck_require__(7310).parse); +var fs = __nccwpck_require__(7147); +var Stream = (__nccwpck_require__(2781).Stream); +var mime = __nccwpck_require__(3583); +var asynckit = __nccwpck_require__(4812); +var populate = __nccwpck_require__(7027); -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); +// Public API +module.exports = FormData; - var r = range(a, b, str); +// make it a Stream +util.inherits(FormData, CombinedStream); - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; + CombinedStream.call(this); - if (ai >= 0 && bi > 0) { - if(a===b) { - return [ai, bi]; - } - begs = []; - left = str.length; + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; - bi = str.indexOf(b, i + 1); - } +FormData.prototype.append = function(field, value, options) { - i = ai < bi && ai >= 0 ? ai : bi; - } + options = options || {}; - if (begs.length) { - result = [ left, right ]; - } + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; } - return result; -} + var append = CombinedStream.prototype.append.bind(this); + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } -/***/ }), + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); -var register = __nccwpck_require__(4670); -var addHook = __nccwpck_require__(5549); -var removeHook = __nccwpck_require__(6819); + append(header); + append(value); + append(footer); -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind; -var bindable = bind.bind(bind); + // pass along options.knownLength + this._trackLength(header, value, options); +}; -function bindApi(hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply( - null, - name ? [state, name] : [state] - ); - hook.api = { remove: removeHookRef }; - hook.remove = removeHookRef; - ["before", "error", "after", "wrap"].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind]; - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); - }); -} +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; -function HookSingular() { - var singularHookName = "h"; - var singularHookState = { - registry: {}, - }; - var singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); - return singularHook; -} + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } -function HookCollection() { - var state = { - registry: {}, - }; + this._valueLength += valueLength; - var hook = register.bind(null, state); - bindApi(hook, state); + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; - return hook; -} + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } -var collectionHookDeprecationMessageDisplayed = false; -function Hook() { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn( - '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' - ); - collectionHookDeprecationMessageDisplayed = true; + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); } - return HookCollection(); -} +}; -Hook.Singular = HookSingular.bind(); -Hook.Collection = HookCollection.bind(); +FormData.prototype._lengthRetriever = function(value, callback) { -module.exports = Hook; -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook; -module.exports.Singular = Hook.Singular; -module.exports.Collection = Hook.Collection; + if (value.hasOwnProperty('fd')) { + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { -/***/ }), + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); -/***/ 5549: -/***/ ((module) => { + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { -module.exports = addHook; + var fileSize; -function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } + if (err) { + callback(err); + return; + } - if (kind === "before") { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)); - }; - } + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } - if (kind === "after") { - hook = function (method, options) { - var result; - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_; - return orig(result, options); - }) - .then(function () { - return result; - }); - }; + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); } +}; - if (kind === "error") { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options); - }); - }; +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; } - state.registry[name].push({ - hook: hook, - orig: orig, - }); -} + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; -/***/ }), + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } -/***/ 4670: -/***/ ((module) => { + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; -module.exports = register; + // skip nullish headers. + if (header == null) { + continue; + } -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } - if (!options) { - options = {}; + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } } - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options); - }, method)(); - } + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; - return Promise.resolve().then(function () { - if (!state.registry[name]) { - return method(options); - } +FormData.prototype._getContentDisposition = function(value, options) { - return state.registry[name].reduce(function (method, registered) { - return registered.hook.bind(null, method, options); - }, method)(); - }); -} + var filename + , contentDisposition + ; + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } -/***/ }), + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; + + +/***/ }), + +/***/ 7027: +/***/ ((module) => { + +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; + + +/***/ }), + +/***/ 9417: +/***/ ((module) => { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), + +/***/ 3682: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var register = __nccwpck_require__(4670); +var addHook = __nccwpck_require__(5549); +var removeHook = __nccwpck_require__(6819); + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind; +var bindable = bind.bind(bind); + +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); +} + +function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} + +function HookCollection() { + var state = { + registry: {}, + }; + + var hook = register.bind(null, state); + bindApi(hook, state); + + return hook; +} + +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); +} + +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); + +module.exports = Hook; +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; + + +/***/ }), + +/***/ 5549: +/***/ ((module) => { + +module.exports = addHook; + +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } + + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } + + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } + + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } + + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} + + +/***/ }), + +/***/ 4670: +/***/ ((module) => { + +module.exports = register; + +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } + + if (!options) { + options = {}; + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } + + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); +} + + +/***/ }), /***/ 6819: /***/ ((module) => { @@ -49847,6 +50791,707 @@ function readPackageJSON(dir) { } +/***/ }), + +/***/ 1133: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var debug; + +module.exports = function () { + if (!debug) { + try { + /* eslint global-require: off */ + debug = __nccwpck_require__(8237)("follow-redirects"); + } + catch (error) { /* */ } + if (typeof debug !== "function") { + debug = function () { /* */ }; + } + } + debug.apply(null, arguments); +}; + + +/***/ }), + +/***/ 7707: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var url = __nccwpck_require__(7310); +var URL = url.URL; +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var Writable = (__nccwpck_require__(2781).Writable); +var assert = __nccwpck_require__(9491); +var debug = __nccwpck_require__(1133); + +// Whether to use the native URL object or the legacy url module +var useNativeURL = false; +try { + assert(new URL()); +} +catch (error) { + useNativeURL = error.code === "ERR_INVALID_URL"; +} + +// URL fields to preserve in copy operations +var preservedUrlFields = [ + "auth", + "host", + "hostname", + "href", + "path", + "pathname", + "port", + "protocol", + "query", + "search", + "hash", +]; + +// Create handlers that pass events from native requests +var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; +var eventHandlers = Object.create(null); +events.forEach(function (event) { + eventHandlers[event] = function (arg1, arg2, arg3) { + this._redirectable.emit(event, arg1, arg2, arg3); + }; +}); + +// Error types with codes +var InvalidUrlError = createErrorType( + "ERR_INVALID_URL", + "Invalid URL", + TypeError +); +var RedirectionError = createErrorType( + "ERR_FR_REDIRECTION_FAILURE", + "Redirected request failed" +); +var TooManyRedirectsError = createErrorType( + "ERR_FR_TOO_MANY_REDIRECTS", + "Maximum number of redirects exceeded", + RedirectionError +); +var MaxBodyLengthExceededError = createErrorType( + "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", + "Request body larger than maxBodyLength limit" +); +var WriteAfterEndError = createErrorType( + "ERR_STREAM_WRITE_AFTER_END", + "write after end" +); + +// istanbul ignore next +var destroy = Writable.prototype.destroy || noop; + +// An HTTP(S) request that can be redirected +function RedirectableRequest(options, responseCallback) { + // Initialize the request + Writable.call(this); + this._sanitizeOptions(options); + this._options = options; + this._ended = false; + this._ending = false; + this._redirectCount = 0; + this._redirects = []; + this._requestBodyLength = 0; + this._requestBodyBuffers = []; + + // Attach a callback if passed + if (responseCallback) { + this.on("response", responseCallback); + } + + // React to responses of native requests + var self = this; + this._onNativeResponse = function (response) { + try { + self._processResponse(response); + } + catch (cause) { + self.emit("error", cause instanceof RedirectionError ? + cause : new RedirectionError({ cause: cause })); + } + }; + + // Perform the first request + this._performRequest(); +} +RedirectableRequest.prototype = Object.create(Writable.prototype); + +RedirectableRequest.prototype.abort = function () { + destroyRequest(this._currentRequest); + this._currentRequest.abort(); + this.emit("abort"); +}; + +RedirectableRequest.prototype.destroy = function (error) { + destroyRequest(this._currentRequest, error); + destroy.call(this, error); + return this; +}; + +// Writes buffered data to the current native request +RedirectableRequest.prototype.write = function (data, encoding, callback) { + // Writing is not allowed if end has been called + if (this._ending) { + throw new WriteAfterEndError(); + } + + // Validate input and shift parameters if necessary + if (!isString(data) && !isBuffer(data)) { + throw new TypeError("data should be a string, Buffer or Uint8Array"); + } + if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } + + // Ignore empty buffers, since writing them doesn't invoke the callback + // https://github.com/nodejs/node/issues/22066 + if (data.length === 0) { + if (callback) { + callback(); + } + return; + } + // Only write when we don't exceed the maximum body length + if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { + this._requestBodyLength += data.length; + this._requestBodyBuffers.push({ data: data, encoding: encoding }); + this._currentRequest.write(data, encoding, callback); + } + // Error when we exceed the maximum body length + else { + this.emit("error", new MaxBodyLengthExceededError()); + this.abort(); + } +}; + +// Ends the current native request +RedirectableRequest.prototype.end = function (data, encoding, callback) { + // Shift parameters if necessary + if (isFunction(data)) { + callback = data; + data = encoding = null; + } + else if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } + + // Write data if needed and end + if (!data) { + this._ended = this._ending = true; + this._currentRequest.end(null, null, callback); + } + else { + var self = this; + var currentRequest = this._currentRequest; + this.write(data, encoding, function () { + self._ended = true; + currentRequest.end(null, null, callback); + }); + this._ending = true; + } +}; + +// Sets a header value on the current native request +RedirectableRequest.prototype.setHeader = function (name, value) { + this._options.headers[name] = value; + this._currentRequest.setHeader(name, value); +}; + +// Clears a header value on the current native request +RedirectableRequest.prototype.removeHeader = function (name) { + delete this._options.headers[name]; + this._currentRequest.removeHeader(name); +}; + +// Global timeout for all underlying requests +RedirectableRequest.prototype.setTimeout = function (msecs, callback) { + var self = this; + + // Destroys the socket on timeout + function destroyOnTimeout(socket) { + socket.setTimeout(msecs); + socket.removeListener("timeout", socket.destroy); + socket.addListener("timeout", socket.destroy); + } + + // Sets up a timer to trigger a timeout event + function startTimer(socket) { + if (self._timeout) { + clearTimeout(self._timeout); + } + self._timeout = setTimeout(function () { + self.emit("timeout"); + clearTimer(); + }, msecs); + destroyOnTimeout(socket); + } + + // Stops a timeout from triggering + function clearTimer() { + // Clear the timeout + if (self._timeout) { + clearTimeout(self._timeout); + self._timeout = null; + } + + // Clean up all attached listeners + self.removeListener("abort", clearTimer); + self.removeListener("error", clearTimer); + self.removeListener("response", clearTimer); + self.removeListener("close", clearTimer); + if (callback) { + self.removeListener("timeout", callback); + } + if (!self.socket) { + self._currentRequest.removeListener("socket", startTimer); + } + } + + // Attach callback if passed + if (callback) { + this.on("timeout", callback); + } + + // Start the timer if or when the socket is opened + if (this.socket) { + startTimer(this.socket); + } + else { + this._currentRequest.once("socket", startTimer); + } + + // Clean up on events + this.on("socket", destroyOnTimeout); + this.on("abort", clearTimer); + this.on("error", clearTimer); + this.on("response", clearTimer); + this.on("close", clearTimer); + + return this; +}; + +// Proxy all other public ClientRequest methods +[ + "flushHeaders", "getHeader", + "setNoDelay", "setSocketKeepAlive", +].forEach(function (method) { + RedirectableRequest.prototype[method] = function (a, b) { + return this._currentRequest[method](a, b); + }; +}); + +// Proxy all public ClientRequest properties +["aborted", "connection", "socket"].forEach(function (property) { + Object.defineProperty(RedirectableRequest.prototype, property, { + get: function () { return this._currentRequest[property]; }, + }); +}); + +RedirectableRequest.prototype._sanitizeOptions = function (options) { + // Ensure headers are always present + if (!options.headers) { + options.headers = {}; + } + + // Since http.request treats host as an alias of hostname, + // but the url module interprets host as hostname plus port, + // eliminate the host property to avoid confusion. + if (options.host) { + // Use hostname if set, because it has precedence + if (!options.hostname) { + options.hostname = options.host; + } + delete options.host; + } + + // Complete the URL object when necessary + if (!options.pathname && options.path) { + var searchPos = options.path.indexOf("?"); + if (searchPos < 0) { + options.pathname = options.path; + } + else { + options.pathname = options.path.substring(0, searchPos); + options.search = options.path.substring(searchPos); + } + } +}; + + +// Executes the next native request (initial or redirect) +RedirectableRequest.prototype._performRequest = function () { + // Load the native protocol + var protocol = this._options.protocol; + var nativeProtocol = this._options.nativeProtocols[protocol]; + if (!nativeProtocol) { + throw new TypeError("Unsupported protocol " + protocol); + } + + // If specified, use the agent corresponding to the protocol + // (HTTP and HTTPS use different types of agents) + if (this._options.agents) { + var scheme = protocol.slice(0, -1); + this._options.agent = this._options.agents[scheme]; + } + + // Create the native request and set up its event handlers + var request = this._currentRequest = + nativeProtocol.request(this._options, this._onNativeResponse); + request._redirectable = this; + for (var event of events) { + request.on(event, eventHandlers[event]); + } + + // RFC7230§5.3.1: When making a request directly to an origin server, […] + // a client MUST send only the absolute path […] as the request-target. + this._currentUrl = /^\//.test(this._options.path) ? + url.format(this._options) : + // When making a request to a proxy, […] + // a client MUST send the target URI in absolute-form […]. + this._options.path; + + // End a redirected request + // (The first request must be ended explicitly with RedirectableRequest#end) + if (this._isRedirect) { + // Write the request entity and end + var i = 0; + var self = this; + var buffers = this._requestBodyBuffers; + (function writeNext(error) { + // Only write if this request has not been redirected yet + /* istanbul ignore else */ + if (request === self._currentRequest) { + // Report any write errors + /* istanbul ignore if */ + if (error) { + self.emit("error", error); + } + // Write the next buffer if there are still left + else if (i < buffers.length) { + var buffer = buffers[i++]; + /* istanbul ignore else */ + if (!request.finished) { + request.write(buffer.data, buffer.encoding, writeNext); + } + } + // End the request if `end` has been called on us + else if (self._ended) { + request.end(); + } + } + }()); + } +}; + +// Processes a response from the current native request +RedirectableRequest.prototype._processResponse = function (response) { + // Store the redirected response + var statusCode = response.statusCode; + if (this._options.trackRedirects) { + this._redirects.push({ + url: this._currentUrl, + headers: response.headers, + statusCode: statusCode, + }); + } + + // RFC7231§6.4: The 3xx (Redirection) class of status code indicates + // that further action needs to be taken by the user agent in order to + // fulfill the request. If a Location header field is provided, + // the user agent MAY automatically redirect its request to the URI + // referenced by the Location field value, + // even if the specific status code is not understood. + + // If the response is not a redirect; return it as-is + var location = response.headers.location; + if (!location || this._options.followRedirects === false || + statusCode < 300 || statusCode >= 400) { + response.responseUrl = this._currentUrl; + response.redirects = this._redirects; + this.emit("response", response); + + // Clean up + this._requestBodyBuffers = []; + return; + } + + // The response is a redirect, so abort the current request + destroyRequest(this._currentRequest); + // Discard the remainder of the response to avoid waiting for data + response.destroy(); + + // RFC7231§6.4: A client SHOULD detect and intervene + // in cyclical redirections (i.e., "infinite" redirection loops). + if (++this._redirectCount > this._options.maxRedirects) { + throw new TooManyRedirectsError(); + } + + // Store the request headers if applicable + var requestHeaders; + var beforeRedirect = this._options.beforeRedirect; + if (beforeRedirect) { + requestHeaders = Object.assign({ + // The Host header was set by nativeProtocol.request + Host: response.req.getHeader("host"), + }, this._options.headers); + } + + // RFC7231§6.4: Automatic redirection needs to done with + // care for methods not known to be safe, […] + // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change + // the request method from POST to GET for the subsequent request. + var method = this._options.method; + if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || + // RFC7231§6.4.4: The 303 (See Other) status code indicates that + // the server is redirecting the user agent to a different resource […] + // A user agent can perform a retrieval request targeting that URI + // (a GET or HEAD request if using HTTP) […] + (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { + this._options.method = "GET"; + // Drop a possible entity and headers related to it + this._requestBodyBuffers = []; + removeMatchingHeaders(/^content-/i, this._options.headers); + } + + // Drop the Host header, as the redirect might lead to a different host + var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); + + // If the redirect is relative, carry over the host of the last request + var currentUrlParts = parseUrl(this._currentUrl); + var currentHost = currentHostHeader || currentUrlParts.host; + var currentUrl = /^\w+:/.test(location) ? this._currentUrl : + url.format(Object.assign(currentUrlParts, { host: currentHost })); + + // Create the redirected request + var redirectUrl = resolveUrl(location, currentUrl); + debug("redirecting to", redirectUrl.href); + this._isRedirect = true; + spreadUrlObject(redirectUrl, this._options); + + // Drop confidential headers when redirecting to a less secure protocol + // or to a different domain that is not a superdomain + if (redirectUrl.protocol !== currentUrlParts.protocol && + redirectUrl.protocol !== "https:" || + redirectUrl.host !== currentHost && + !isSubdomain(redirectUrl.host, currentHost)) { + removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); + } + + // Evaluate the beforeRedirect callback + if (isFunction(beforeRedirect)) { + var responseDetails = { + headers: response.headers, + statusCode: statusCode, + }; + var requestDetails = { + url: currentUrl, + method: method, + headers: requestHeaders, + }; + beforeRedirect(this._options, responseDetails, requestDetails); + this._sanitizeOptions(this._options); + } + + // Perform the redirected request + this._performRequest(); +}; + +// Wraps the key/value object of protocols with redirect functionality +function wrap(protocols) { + // Default settings + var exports = { + maxRedirects: 21, + maxBodyLength: 10 * 1024 * 1024, + }; + + // Wrap each protocol + var nativeProtocols = {}; + Object.keys(protocols).forEach(function (scheme) { + var protocol = scheme + ":"; + var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; + var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); + + // Executes a request, following redirects + function request(input, options, callback) { + // Parse parameters, ensuring that input is an object + if (isURL(input)) { + input = spreadUrlObject(input); + } + else if (isString(input)) { + input = spreadUrlObject(parseUrl(input)); + } + else { + callback = options; + options = validateUrl(input); + input = { protocol: protocol }; + } + if (isFunction(options)) { + callback = options; + options = null; + } + + // Set defaults + options = Object.assign({ + maxRedirects: exports.maxRedirects, + maxBodyLength: exports.maxBodyLength, + }, input, options); + options.nativeProtocols = nativeProtocols; + if (!isString(options.host) && !isString(options.hostname)) { + options.hostname = "::1"; + } + + assert.equal(options.protocol, protocol, "protocol mismatch"); + debug("options", options); + return new RedirectableRequest(options, callback); + } + + // Executes a GET request, following redirects + function get(input, options, callback) { + var wrappedRequest = wrappedProtocol.request(input, options, callback); + wrappedRequest.end(); + return wrappedRequest; + } + + // Expose the properties on the wrapped protocol + Object.defineProperties(wrappedProtocol, { + request: { value: request, configurable: true, enumerable: true, writable: true }, + get: { value: get, configurable: true, enumerable: true, writable: true }, + }); + }); + return exports; +} + +function noop() { /* empty */ } + +function parseUrl(input) { + var parsed; + /* istanbul ignore else */ + if (useNativeURL) { + parsed = new URL(input); + } + else { + // Ensure the URL is valid and absolute + parsed = validateUrl(url.parse(input)); + if (!isString(parsed.protocol)) { + throw new InvalidUrlError({ input }); + } + } + return parsed; +} + +function resolveUrl(relative, base) { + /* istanbul ignore next */ + return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); +} + +function validateUrl(input) { + if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { + throw new InvalidUrlError({ input: input.href || input }); + } + if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { + throw new InvalidUrlError({ input: input.href || input }); + } + return input; +} + +function spreadUrlObject(urlObject, target) { + var spread = target || {}; + for (var key of preservedUrlFields) { + spread[key] = urlObject[key]; + } + + // Fix IPv6 hostname + if (spread.hostname.startsWith("[")) { + spread.hostname = spread.hostname.slice(1, -1); + } + // Ensure port is a number + if (spread.port !== "") { + spread.port = Number(spread.port); + } + // Concatenate path + spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; + + return spread; +} + +function removeMatchingHeaders(regex, headers) { + var lastValue; + for (var header in headers) { + if (regex.test(header)) { + lastValue = headers[header]; + delete headers[header]; + } + } + return (lastValue === null || typeof lastValue === "undefined") ? + undefined : String(lastValue).trim(); +} + +function createErrorType(code, message, baseClass) { + // Create constructor + function CustomError(properties) { + Error.captureStackTrace(this, this.constructor); + Object.assign(this, properties || {}); + this.code = code; + this.message = this.cause ? message + ": " + this.cause.message : message; + } + + // Attach constructor and set default properties + CustomError.prototype = new (baseClass || Error)(); + Object.defineProperties(CustomError.prototype, { + constructor: { + value: CustomError, + enumerable: false, + }, + name: { + value: "Error [" + code + "]", + enumerable: false, + }, + }); + return CustomError; +} + +function destroyRequest(request, error) { + for (var event of events) { + request.removeListener(event, eventHandlers[event]); + } + request.on("error", noop); + request.destroy(error); +} + +function isSubdomain(subdomain, domain) { + assert(isString(subdomain) && isString(domain)); + var dot = subdomain.length - domain.length - 1; + return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); +} + +function isString(value) { + return typeof value === "string" || value instanceof String; +} + +function isFunction(value) { + return typeof value === "function"; +} + +function isBuffer(value) { + return typeof value === "object" && ("length" in value); +} + +function isURL(value) { + return URL && value instanceof URL; +} + +// Exports +module.exports = wrap({ http: http, https: https }); +module.exports.wrap = wrap; + + /***/ }), /***/ 6457: @@ -55153,6 +56798,53 @@ function isPlainObject(o) { exports.isPlainObject = isPlainObject; +/***/ }), + +/***/ 841: +/***/ ((module) => { + +"use strict"; + + +const denyList = new Set([ + 'ENOTFOUND', + 'ENETUNREACH', + + // SSL errors from https://github.com/nodejs/node/blob/fc8e3e2cdc521978351de257030db0076d79e0ab/src/crypto/crypto_common.cc#L301-L328 + 'UNABLE_TO_GET_ISSUER_CERT', + 'UNABLE_TO_GET_CRL', + 'UNABLE_TO_DECRYPT_CERT_SIGNATURE', + 'UNABLE_TO_DECRYPT_CRL_SIGNATURE', + 'UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY', + 'CERT_SIGNATURE_FAILURE', + 'CRL_SIGNATURE_FAILURE', + 'CERT_NOT_YET_VALID', + 'CERT_HAS_EXPIRED', + 'CRL_NOT_YET_VALID', + 'CRL_HAS_EXPIRED', + 'ERROR_IN_CERT_NOT_BEFORE_FIELD', + 'ERROR_IN_CERT_NOT_AFTER_FIELD', + 'ERROR_IN_CRL_LAST_UPDATE_FIELD', + 'ERROR_IN_CRL_NEXT_UPDATE_FIELD', + 'OUT_OF_MEM', + 'DEPTH_ZERO_SELF_SIGNED_CERT', + 'SELF_SIGNED_CERT_IN_CHAIN', + 'UNABLE_TO_GET_ISSUER_CERT_LOCALLY', + 'UNABLE_TO_VERIFY_LEAF_SIGNATURE', + 'CERT_CHAIN_TOO_LONG', + 'CERT_REVOKED', + 'INVALID_CA', + 'PATH_LENGTH_EXCEEDED', + 'INVALID_PURPOSE', + 'CERT_UNTRUSTED', + 'CERT_REJECTED', + 'HOSTNAME_MISMATCH' +]); + +// TODO: Use `error?.code` when targeting Node.js 14 +module.exports = error => !denyList.has(error && error.code); + + /***/ }), /***/ 2820: @@ -61583,6 +63275,122 @@ exports.wrapOutput = (input, state = {}, options = {}) => { }; +/***/ }), + +/***/ 3329: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var parseUrl = (__nccwpck_require__(7310).parse); + +var DEFAULT_PORTS = { + ftp: 21, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443, +}; + +var stringEndsWith = String.prototype.endsWith || function(s) { + return s.length <= this.length && + this.indexOf(s, this.length - s.length) !== -1; +}; + +/** + * @param {string|object} url - The URL, or the result from url.parse. + * @return {string} The URL of the proxy that should handle the request to the + * given URL. If no proxy is set, this will be an empty string. + */ +function getProxyForUrl(url) { + var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; + var proto = parsedUrl.protocol; + var hostname = parsedUrl.host; + var port = parsedUrl.port; + if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { + return ''; // Don't proxy URLs without a valid scheme or host. + } + + proto = proto.split(':', 1)[0]; + // Stripping ports in this way instead of using parsedUrl.hostname to make + // sure that the brackets around IPv6 addresses are kept. + hostname = hostname.replace(/:\d*$/, ''); + port = parseInt(port) || DEFAULT_PORTS[proto] || 0; + if (!shouldProxy(hostname, port)) { + return ''; // Don't proxy URLs that match NO_PROXY. + } + + var proxy = + getEnv('npm_config_' + proto + '_proxy') || + getEnv(proto + '_proxy') || + getEnv('npm_config_proxy') || + getEnv('all_proxy'); + if (proxy && proxy.indexOf('://') === -1) { + // Missing scheme in proxy, default to the requested URL's scheme. + proxy = proto + '://' + proxy; + } + return proxy; +} + +/** + * Determines whether a given URL should be proxied. + * + * @param {string} hostname - The host name of the URL. + * @param {number} port - The effective port of the URL. + * @returns {boolean} Whether the given URL should be proxied. + * @private + */ +function shouldProxy(hostname, port) { + var NO_PROXY = + (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); + if (!NO_PROXY) { + return true; // Always proxy if NO_PROXY is not set. + } + if (NO_PROXY === '*') { + return false; // Never proxy if wildcard is set. + } + + return NO_PROXY.split(/[,\s]/).every(function(proxy) { + if (!proxy) { + return true; // Skip zero-length hosts. + } + var parsedProxy = proxy.match(/^(.+):(\d+)$/); + var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; + var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; + if (parsedProxyPort && parsedProxyPort !== port) { + return true; // Skip if ports don't match. + } + + if (!/^[.*]/.test(parsedProxyHostname)) { + // No wildcards, so stop proxying if there is an exact match. + return hostname !== parsedProxyHostname; + } + + if (parsedProxyHostname.charAt(0) === '*') { + // Remove leading wildcard. + parsedProxyHostname = parsedProxyHostname.slice(1); + } + // Stop proxying if the hostname ends with the no_proxy host. + return !stringEndsWith.call(hostname, parsedProxyHostname); + }); +} + +/** + * Get the value for an environment variable. + * + * @param {string} key - The name of the environment variable. + * @return {string} The value of the environment variable. + * @private + */ +function getEnv(key) { + return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; +} + +exports.getProxyForUrl = getProxyForUrl; + + /***/ }), /***/ 8341: @@ -73177,7 +74985,11 @@ class Request { } if (!extractBody) { +<<<<<<< HEAD extractBody = (__nccwpck_require__(1472).extractBody) +======= + extractBody = (__nccwpck_require__(9990).extractBody) +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } const [bodyStream, contentType] = extractBody(body) @@ -74304,7 +76116,11 @@ module.exports = Dispatcher /***/ }), +<<<<<<< HEAD /***/ 1472: +======= +/***/ 9990: +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -77030,7 +78846,11 @@ const { } = __nccwpck_require__(2538) const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) const assert = __nccwpck_require__(9491) +<<<<<<< HEAD const { safelyExtractBody } = __nccwpck_require__(1472) +======= +const { safelyExtractBody } = __nccwpck_require__(9990) +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const { redirectStatusSet, nullBodyStatus, @@ -79144,7 +80964,11 @@ module.exports = { +<<<<<<< HEAD const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(1472) +======= +const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(9990) +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(554) const { FinalizationRegistry } = __nccwpck_require__(6436)() const util = __nccwpck_require__(3983) @@ -80097,7 +81921,11 @@ module.exports = { Request, makeRequest } const { Headers, HeadersList, fill } = __nccwpck_require__(554) +<<<<<<< HEAD const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(1472) +======= +const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(9990) +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) const util = __nccwpck_require__(3983) const { kEnumerableProperty } = util const { @@ -97450,6 +99278,4956 @@ module.exports = parseParams /***/ }), +<<<<<<< HEAD +======= +/***/ 8709: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_OPTIONS = exports.exponentialDelay = exports.retryAfter = exports.isNetworkOrIdempotentRequestError = exports.isIdempotentRequestError = exports.isSafeRequestError = exports.isRetryableError = exports.isNetworkError = exports.namespace = void 0; +const is_retry_allowed_1 = __importDefault(__nccwpck_require__(841)); +exports.namespace = 'axios-retry'; +function isNetworkError(error) { + const CODE_EXCLUDE_LIST = ['ERR_CANCELED', 'ECONNABORTED']; + if (error.response) { + return false; + } + if (!error.code) { + return false; + } + // Prevents retrying timed out & cancelled requests + if (CODE_EXCLUDE_LIST.includes(error.code)) { + return false; + } + // Prevents retrying unsafe errors + return (0, is_retry_allowed_1.default)(error); +} +exports.isNetworkError = isNetworkError; +const SAFE_HTTP_METHODS = ['get', 'head', 'options']; +const IDEMPOTENT_HTTP_METHODS = SAFE_HTTP_METHODS.concat(['put', 'delete']); +function isRetryableError(error) { + return (error.code !== 'ECONNABORTED' && + (!error.response || + error.response.status === 429 || + (error.response.status >= 500 && error.response.status <= 599))); +} +exports.isRetryableError = isRetryableError; +function isSafeRequestError(error) { + var _a; + if (!((_a = error.config) === null || _a === void 0 ? void 0 : _a.method)) { + // Cannot determine if the request can be retried + return false; + } + return isRetryableError(error) && SAFE_HTTP_METHODS.indexOf(error.config.method) !== -1; +} +exports.isSafeRequestError = isSafeRequestError; +function isIdempotentRequestError(error) { + var _a; + if (!((_a = error.config) === null || _a === void 0 ? void 0 : _a.method)) { + // Cannot determine if the request can be retried + return false; + } + return isRetryableError(error) && IDEMPOTENT_HTTP_METHODS.indexOf(error.config.method) !== -1; +} +exports.isIdempotentRequestError = isIdempotentRequestError; +function isNetworkOrIdempotentRequestError(error) { + return isNetworkError(error) || isIdempotentRequestError(error); +} +exports.isNetworkOrIdempotentRequestError = isNetworkOrIdempotentRequestError; +function retryAfter(error = undefined) { + var _a; + const retryAfterHeader = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.headers['retry-after']; + if (!retryAfterHeader) { + return 0; + } + // if the retry after header is a number, convert it to milliseconds + let retryAfterMs = (Number(retryAfterHeader) || 0) * 1000; + // If the retry after header is a date, get the number of milliseconds until that date + if (retryAfterMs === 0) { + retryAfterMs = (new Date(retryAfterHeader).valueOf() || 0) - Date.now(); + } + return Math.max(0, retryAfterMs); +} +exports.retryAfter = retryAfter; +function noDelay(_retryNumber = 0, error = undefined) { + return Math.max(0, retryAfter(error)); +} +function exponentialDelay(retryNumber = 0, error = undefined, delayFactor = 100) { + const calculatedDelay = Math.pow(2, retryNumber) * delayFactor; + const delay = Math.max(calculatedDelay, retryAfter(error)); + const randomSum = delay * 0.2 * Math.random(); // 0-20% of the delay + return delay + randomSum; +} +exports.exponentialDelay = exponentialDelay; +exports.DEFAULT_OPTIONS = { + retries: 3, + retryCondition: isNetworkOrIdempotentRequestError, + retryDelay: noDelay, + shouldResetTimeout: false, + onRetry: () => { }, + onMaxRetryTimesExceeded: () => { }, + validateResponse: null +}; +function getRequestOptions(config, defaultOptions) { + return Object.assign(Object.assign(Object.assign({}, exports.DEFAULT_OPTIONS), defaultOptions), config[exports.namespace]); +} +function setCurrentState(config, defaultOptions, resetLastRequestTime = false) { + const currentState = getRequestOptions(config, defaultOptions || {}); + currentState.retryCount = currentState.retryCount || 0; + if (!currentState.lastRequestTime || resetLastRequestTime) { + currentState.lastRequestTime = Date.now(); + } + config[exports.namespace] = currentState; + return currentState; +} +function fixConfig(axiosInstance, config) { + // @ts-ignore + if (axiosInstance.defaults.agent === config.agent) { + // @ts-ignore + delete config.agent; + } + if (axiosInstance.defaults.httpAgent === config.httpAgent) { + delete config.httpAgent; + } + if (axiosInstance.defaults.httpsAgent === config.httpsAgent) { + delete config.httpsAgent; + } +} +function shouldRetry(currentState, error) { + return __awaiter(this, void 0, void 0, function* () { + const { retries, retryCondition } = currentState; + const shouldRetryOrPromise = (currentState.retryCount || 0) < retries && retryCondition(error); + // This could be a promise + if (typeof shouldRetryOrPromise === 'object') { + try { + const shouldRetryPromiseResult = yield shouldRetryOrPromise; + // keep return true unless shouldRetryPromiseResult return false for compatibility + return shouldRetryPromiseResult !== false; + } + catch (_err) { + return false; + } + } + return shouldRetryOrPromise; + }); +} +function handleRetry(axiosInstance, currentState, error, config) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + currentState.retryCount += 1; + const { retryDelay, shouldResetTimeout, onRetry } = currentState; + const delay = retryDelay(currentState.retryCount, error); + // Axios fails merging this configuration to the default configuration because it has an issue + // with circular structures: https://github.com/mzabriskie/axios/issues/370 + fixConfig(axiosInstance, config); + if (!shouldResetTimeout && config.timeout && currentState.lastRequestTime) { + const lastRequestDuration = Date.now() - currentState.lastRequestTime; + const timeout = config.timeout - lastRequestDuration - delay; + if (timeout <= 0) { + return Promise.reject(error); + } + config.timeout = timeout; + } + config.transformRequest = [(data) => data]; + yield onRetry(currentState.retryCount, error, config); + if ((_a = config.signal) === null || _a === void 0 ? void 0 : _a.aborted) { + return Promise.resolve(axiosInstance(config)); + } + return new Promise((resolve) => { + var _a; + const abortListener = () => { + clearTimeout(timeout); + resolve(axiosInstance(config)); + }; + const timeout = setTimeout(() => { + var _a; + resolve(axiosInstance(config)); + if ((_a = config.signal) === null || _a === void 0 ? void 0 : _a.removeEventListener) { + config.signal.removeEventListener('abort', abortListener); + } + }, delay); + if ((_a = config.signal) === null || _a === void 0 ? void 0 : _a.addEventListener) { + config.signal.addEventListener('abort', abortListener, { once: true }); + } + }); + }); +} +function handleMaxRetryTimesExceeded(currentState, error) { + return __awaiter(this, void 0, void 0, function* () { + if (currentState.retryCount >= currentState.retries) + yield currentState.onMaxRetryTimesExceeded(error, currentState.retryCount); + }); +} +const axiosRetry = (axiosInstance, defaultOptions) => { + const requestInterceptorId = axiosInstance.interceptors.request.use((config) => { + var _a; + setCurrentState(config, defaultOptions, true); + if ((_a = config[exports.namespace]) === null || _a === void 0 ? void 0 : _a.validateResponse) { + // by setting this, all HTTP responses will be go through the error interceptor first + config.validateStatus = () => false; + } + return config; + }); + const responseInterceptorId = axiosInstance.interceptors.response.use(null, (error) => __awaiter(void 0, void 0, void 0, function* () { + var _a; + const { config } = error; + // If we have no information to retry the request + if (!config) { + return Promise.reject(error); + } + const currentState = setCurrentState(config, defaultOptions); + if (error.response && ((_a = currentState.validateResponse) === null || _a === void 0 ? void 0 : _a.call(currentState, error.response))) { + // no issue with response + return error.response; + } + if (yield shouldRetry(currentState, error)) { + return handleRetry(axiosInstance, currentState, error, config); + } + yield handleMaxRetryTimesExceeded(currentState, error); + return Promise.reject(error); + })); + return { requestInterceptorId, responseInterceptorId }; +}; +// Compatibility with CommonJS +axiosRetry.isNetworkError = isNetworkError; +axiosRetry.isSafeRequestError = isSafeRequestError; +axiosRetry.isIdempotentRequestError = isIdempotentRequestError; +axiosRetry.isNetworkOrIdempotentRequestError = isNetworkOrIdempotentRequestError; +axiosRetry.exponentialDelay = exponentialDelay; +axiosRetry.isRetryableError = isRetryableError; +exports["default"] = axiosRetry; + + +/***/ }), + +/***/ 8757: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Axios v1.7.2 Copyright (c) 2024 Matt Zabriskie and contributors + + +const FormData$1 = __nccwpck_require__(1403); +const url = __nccwpck_require__(7310); +const proxyFromEnv = __nccwpck_require__(3329); +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const util = __nccwpck_require__(3837); +const followRedirects = __nccwpck_require__(7707); +const zlib = __nccwpck_require__(9796); +const stream = __nccwpck_require__(2781); +const events = __nccwpck_require__(2361); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +const FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData$1); +const url__default = /*#__PURE__*/_interopDefaultLegacy(url); +const http__default = /*#__PURE__*/_interopDefaultLegacy(http); +const https__default = /*#__PURE__*/_interopDefaultLegacy(https); +const util__default = /*#__PURE__*/_interopDefaultLegacy(util); +const followRedirects__default = /*#__PURE__*/_interopDefaultLegacy(followRedirects); +const zlib__default = /*#__PURE__*/_interopDefaultLegacy(zlib); +const stream__default = /*#__PURE__*/_interopDefaultLegacy(stream); + +function bind(fn, thisArg) { + return function wrap() { + return fn.apply(thisArg, arguments); + }; +} + +// utils is a library of generic helper functions non-specific to axios + +const {toString} = Object.prototype; +const {getPrototypeOf} = Object; + +const kindOf = (cache => thing => { + const str = toString.call(thing); + return cache[str] || (cache[str] = str.slice(8, -1).toLowerCase()); +})(Object.create(null)); + +const kindOfTest = (type) => { + type = type.toLowerCase(); + return (thing) => kindOf(thing) === type +}; + +const typeOfTest = type => thing => typeof thing === type; + +/** + * Determine if a value is an Array + * + * @param {Object} val The value to test + * + * @returns {boolean} True if value is an Array, otherwise false + */ +const {isArray} = Array; + +/** + * Determine if a value is undefined + * + * @param {*} val The value to test + * + * @returns {boolean} True if the value is undefined, otherwise false + */ +const isUndefined = typeOfTest('undefined'); + +/** + * Determine if a value is a Buffer + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a Buffer, otherwise false + */ +function isBuffer(val) { + return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor) + && isFunction(val.constructor.isBuffer) && val.constructor.isBuffer(val); +} + +/** + * Determine if a value is an ArrayBuffer + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is an ArrayBuffer, otherwise false + */ +const isArrayBuffer = kindOfTest('ArrayBuffer'); + + +/** + * Determine if a value is a view on an ArrayBuffer + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false + */ +function isArrayBufferView(val) { + let result; + if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) { + result = ArrayBuffer.isView(val); + } else { + result = (val) && (val.buffer) && (isArrayBuffer(val.buffer)); + } + return result; +} + +/** + * Determine if a value is a String + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a String, otherwise false + */ +const isString = typeOfTest('string'); + +/** + * Determine if a value is a Function + * + * @param {*} val The value to test + * @returns {boolean} True if value is a Function, otherwise false + */ +const isFunction = typeOfTest('function'); + +/** + * Determine if a value is a Number + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a Number, otherwise false + */ +const isNumber = typeOfTest('number'); + +/** + * Determine if a value is an Object + * + * @param {*} thing The value to test + * + * @returns {boolean} True if value is an Object, otherwise false + */ +const isObject = (thing) => thing !== null && typeof thing === 'object'; + +/** + * Determine if a value is a Boolean + * + * @param {*} thing The value to test + * @returns {boolean} True if value is a Boolean, otherwise false + */ +const isBoolean = thing => thing === true || thing === false; + +/** + * Determine if a value is a plain Object + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a plain Object, otherwise false + */ +const isPlainObject = (val) => { + if (kindOf(val) !== 'object') { + return false; + } + + const prototype = getPrototypeOf(val); + return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in val) && !(Symbol.iterator in val); +}; + +/** + * Determine if a value is a Date + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a Date, otherwise false + */ +const isDate = kindOfTest('Date'); + +/** + * Determine if a value is a File + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a File, otherwise false + */ +const isFile = kindOfTest('File'); + +/** + * Determine if a value is a Blob + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a Blob, otherwise false + */ +const isBlob = kindOfTest('Blob'); + +/** + * Determine if a value is a FileList + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a File, otherwise false + */ +const isFileList = kindOfTest('FileList'); + +/** + * Determine if a value is a Stream + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a Stream, otherwise false + */ +const isStream = (val) => isObject(val) && isFunction(val.pipe); + +/** + * Determine if a value is a FormData + * + * @param {*} thing The value to test + * + * @returns {boolean} True if value is an FormData, otherwise false + */ +const isFormData = (thing) => { + let kind; + return thing && ( + (typeof FormData === 'function' && thing instanceof FormData) || ( + isFunction(thing.append) && ( + (kind = kindOf(thing)) === 'formdata' || + // detect form-data instance + (kind === 'object' && isFunction(thing.toString) && thing.toString() === '[object FormData]') + ) + ) + ) +}; + +/** + * Determine if a value is a URLSearchParams object + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a URLSearchParams object, otherwise false + */ +const isURLSearchParams = kindOfTest('URLSearchParams'); + +const [isReadableStream, isRequest, isResponse, isHeaders] = ['ReadableStream', 'Request', 'Response', 'Headers'].map(kindOfTest); + +/** + * Trim excess whitespace off the beginning and end of a string + * + * @param {String} str The String to trim + * + * @returns {String} The String freed of excess whitespace + */ +const trim = (str) => str.trim ? + str.trim() : str.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, ''); + +/** + * Iterate over an Array or an Object invoking a function for each item. + * + * If `obj` is an Array callback will be called passing + * the value, index, and complete array for each item. + * + * If 'obj' is an Object callback will be called passing + * the value, key, and complete object for each property. + * + * @param {Object|Array} obj The object to iterate + * @param {Function} fn The callback to invoke for each item + * + * @param {Boolean} [allOwnKeys = false] + * @returns {any} + */ +function forEach(obj, fn, {allOwnKeys = false} = {}) { + // Don't bother if no value provided + if (obj === null || typeof obj === 'undefined') { + return; + } + + let i; + let l; + + // Force an array if not already something iterable + if (typeof obj !== 'object') { + /*eslint no-param-reassign:0*/ + obj = [obj]; + } + + if (isArray(obj)) { + // Iterate over array values + for (i = 0, l = obj.length; i < l; i++) { + fn.call(null, obj[i], i, obj); + } + } else { + // Iterate over object keys + const keys = allOwnKeys ? Object.getOwnPropertyNames(obj) : Object.keys(obj); + const len = keys.length; + let key; + + for (i = 0; i < len; i++) { + key = keys[i]; + fn.call(null, obj[key], key, obj); + } + } +} + +function findKey(obj, key) { + key = key.toLowerCase(); + const keys = Object.keys(obj); + let i = keys.length; + let _key; + while (i-- > 0) { + _key = keys[i]; + if (key === _key.toLowerCase()) { + return _key; + } + } + return null; +} + +const _global = (() => { + /*eslint no-undef:0*/ + if (typeof globalThis !== "undefined") return globalThis; + return typeof self !== "undefined" ? self : (typeof window !== 'undefined' ? window : global) +})(); + +const isContextDefined = (context) => !isUndefined(context) && context !== _global; + +/** + * Accepts varargs expecting each argument to be an object, then + * immutably merges the properties of each object and returns result. + * + * When multiple objects contain the same key the later object in + * the arguments list will take precedence. + * + * Example: + * + * ```js + * var result = merge({foo: 123}, {foo: 456}); + * console.log(result.foo); // outputs 456 + * ``` + * + * @param {Object} obj1 Object to merge + * + * @returns {Object} Result of all merge properties + */ +function merge(/* obj1, obj2, obj3, ... */) { + const {caseless} = isContextDefined(this) && this || {}; + const result = {}; + const assignValue = (val, key) => { + const targetKey = caseless && findKey(result, key) || key; + if (isPlainObject(result[targetKey]) && isPlainObject(val)) { + result[targetKey] = merge(result[targetKey], val); + } else if (isPlainObject(val)) { + result[targetKey] = merge({}, val); + } else if (isArray(val)) { + result[targetKey] = val.slice(); + } else { + result[targetKey] = val; + } + }; + + for (let i = 0, l = arguments.length; i < l; i++) { + arguments[i] && forEach(arguments[i], assignValue); + } + return result; +} + +/** + * Extends object a by mutably adding to it the properties of object b. + * + * @param {Object} a The object to be extended + * @param {Object} b The object to copy properties from + * @param {Object} thisArg The object to bind function to + * + * @param {Boolean} [allOwnKeys] + * @returns {Object} The resulting value of object a + */ +const extend = (a, b, thisArg, {allOwnKeys}= {}) => { + forEach(b, (val, key) => { + if (thisArg && isFunction(val)) { + a[key] = bind(val, thisArg); + } else { + a[key] = val; + } + }, {allOwnKeys}); + return a; +}; + +/** + * Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + * + * @param {string} content with BOM + * + * @returns {string} content value without BOM + */ +const stripBOM = (content) => { + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1); + } + return content; +}; + +/** + * Inherit the prototype methods from one constructor into another + * @param {function} constructor + * @param {function} superConstructor + * @param {object} [props] + * @param {object} [descriptors] + * + * @returns {void} + */ +const inherits = (constructor, superConstructor, props, descriptors) => { + constructor.prototype = Object.create(superConstructor.prototype, descriptors); + constructor.prototype.constructor = constructor; + Object.defineProperty(constructor, 'super', { + value: superConstructor.prototype + }); + props && Object.assign(constructor.prototype, props); +}; + +/** + * Resolve object with deep prototype chain to a flat object + * @param {Object} sourceObj source object + * @param {Object} [destObj] + * @param {Function|Boolean} [filter] + * @param {Function} [propFilter] + * + * @returns {Object} + */ +const toFlatObject = (sourceObj, destObj, filter, propFilter) => { + let props; + let i; + let prop; + const merged = {}; + + destObj = destObj || {}; + // eslint-disable-next-line no-eq-null,eqeqeq + if (sourceObj == null) return destObj; + + do { + props = Object.getOwnPropertyNames(sourceObj); + i = props.length; + while (i-- > 0) { + prop = props[i]; + if ((!propFilter || propFilter(prop, sourceObj, destObj)) && !merged[prop]) { + destObj[prop] = sourceObj[prop]; + merged[prop] = true; + } + } + sourceObj = filter !== false && getPrototypeOf(sourceObj); + } while (sourceObj && (!filter || filter(sourceObj, destObj)) && sourceObj !== Object.prototype); + + return destObj; +}; + +/** + * Determines whether a string ends with the characters of a specified string + * + * @param {String} str + * @param {String} searchString + * @param {Number} [position= 0] + * + * @returns {boolean} + */ +const endsWith = (str, searchString, position) => { + str = String(str); + if (position === undefined || position > str.length) { + position = str.length; + } + position -= searchString.length; + const lastIndex = str.indexOf(searchString, position); + return lastIndex !== -1 && lastIndex === position; +}; + + +/** + * Returns new array from array like object or null if failed + * + * @param {*} [thing] + * + * @returns {?Array} + */ +const toArray = (thing) => { + if (!thing) return null; + if (isArray(thing)) return thing; + let i = thing.length; + if (!isNumber(i)) return null; + const arr = new Array(i); + while (i-- > 0) { + arr[i] = thing[i]; + } + return arr; +}; + +/** + * Checking if the Uint8Array exists and if it does, it returns a function that checks if the + * thing passed in is an instance of Uint8Array + * + * @param {TypedArray} + * + * @returns {Array} + */ +// eslint-disable-next-line func-names +const isTypedArray = (TypedArray => { + // eslint-disable-next-line func-names + return thing => { + return TypedArray && thing instanceof TypedArray; + }; +})(typeof Uint8Array !== 'undefined' && getPrototypeOf(Uint8Array)); + +/** + * For each entry in the object, call the function with the key and value. + * + * @param {Object} obj - The object to iterate over. + * @param {Function} fn - The function to call for each entry. + * + * @returns {void} + */ +const forEachEntry = (obj, fn) => { + const generator = obj && obj[Symbol.iterator]; + + const iterator = generator.call(obj); + + let result; + + while ((result = iterator.next()) && !result.done) { + const pair = result.value; + fn.call(obj, pair[0], pair[1]); + } +}; + +/** + * It takes a regular expression and a string, and returns an array of all the matches + * + * @param {string} regExp - The regular expression to match against. + * @param {string} str - The string to search. + * + * @returns {Array} + */ +const matchAll = (regExp, str) => { + let matches; + const arr = []; + + while ((matches = regExp.exec(str)) !== null) { + arr.push(matches); + } + + return arr; +}; + +/* Checking if the kindOfTest function returns true when passed an HTMLFormElement. */ +const isHTMLForm = kindOfTest('HTMLFormElement'); + +const toCamelCase = str => { + return str.toLowerCase().replace(/[-_\s]([a-z\d])(\w*)/g, + function replacer(m, p1, p2) { + return p1.toUpperCase() + p2; + } + ); +}; + +/* Creating a function that will check if an object has a property. */ +const hasOwnProperty = (({hasOwnProperty}) => (obj, prop) => hasOwnProperty.call(obj, prop))(Object.prototype); + +/** + * Determine if a value is a RegExp object + * + * @param {*} val The value to test + * + * @returns {boolean} True if value is a RegExp object, otherwise false + */ +const isRegExp = kindOfTest('RegExp'); + +const reduceDescriptors = (obj, reducer) => { + const descriptors = Object.getOwnPropertyDescriptors(obj); + const reducedDescriptors = {}; + + forEach(descriptors, (descriptor, name) => { + let ret; + if ((ret = reducer(descriptor, name, obj)) !== false) { + reducedDescriptors[name] = ret || descriptor; + } + }); + + Object.defineProperties(obj, reducedDescriptors); +}; + +/** + * Makes all methods read-only + * @param {Object} obj + */ + +const freezeMethods = (obj) => { + reduceDescriptors(obj, (descriptor, name) => { + // skip restricted props in strict mode + if (isFunction(obj) && ['arguments', 'caller', 'callee'].indexOf(name) !== -1) { + return false; + } + + const value = obj[name]; + + if (!isFunction(value)) return; + + descriptor.enumerable = false; + + if ('writable' in descriptor) { + descriptor.writable = false; + return; + } + + if (!descriptor.set) { + descriptor.set = () => { + throw Error('Can not rewrite read-only method \'' + name + '\''); + }; + } + }); +}; + +const toObjectSet = (arrayOrString, delimiter) => { + const obj = {}; + + const define = (arr) => { + arr.forEach(value => { + obj[value] = true; + }); + }; + + isArray(arrayOrString) ? define(arrayOrString) : define(String(arrayOrString).split(delimiter)); + + return obj; +}; + +const noop = () => {}; + +const toFiniteNumber = (value, defaultValue) => { + return value != null && Number.isFinite(value = +value) ? value : defaultValue; +}; + +const ALPHA = 'abcdefghijklmnopqrstuvwxyz'; + +const DIGIT = '0123456789'; + +const ALPHABET = { + DIGIT, + ALPHA, + ALPHA_DIGIT: ALPHA + ALPHA.toUpperCase() + DIGIT +}; + +const generateString = (size = 16, alphabet = ALPHABET.ALPHA_DIGIT) => { + let str = ''; + const {length} = alphabet; + while (size--) { + str += alphabet[Math.random() * length|0]; + } + + return str; +}; + +/** + * If the thing is a FormData object, return true, otherwise return false. + * + * @param {unknown} thing - The thing to check. + * + * @returns {boolean} + */ +function isSpecCompliantForm(thing) { + return !!(thing && isFunction(thing.append) && thing[Symbol.toStringTag] === 'FormData' && thing[Symbol.iterator]); +} + +const toJSONObject = (obj) => { + const stack = new Array(10); + + const visit = (source, i) => { + + if (isObject(source)) { + if (stack.indexOf(source) >= 0) { + return; + } + + if(!('toJSON' in source)) { + stack[i] = source; + const target = isArray(source) ? [] : {}; + + forEach(source, (value, key) => { + const reducedValue = visit(value, i + 1); + !isUndefined(reducedValue) && (target[key] = reducedValue); + }); + + stack[i] = undefined; + + return target; + } + } + + return source; + }; + + return visit(obj, 0); +}; + +const isAsyncFn = kindOfTest('AsyncFunction'); + +const isThenable = (thing) => + thing && (isObject(thing) || isFunction(thing)) && isFunction(thing.then) && isFunction(thing.catch); + +const utils$1 = { + isArray, + isArrayBuffer, + isBuffer, + isFormData, + isArrayBufferView, + isString, + isNumber, + isBoolean, + isObject, + isPlainObject, + isReadableStream, + isRequest, + isResponse, + isHeaders, + isUndefined, + isDate, + isFile, + isBlob, + isRegExp, + isFunction, + isStream, + isURLSearchParams, + isTypedArray, + isFileList, + forEach, + merge, + extend, + trim, + stripBOM, + inherits, + toFlatObject, + kindOf, + kindOfTest, + endsWith, + toArray, + forEachEntry, + matchAll, + isHTMLForm, + hasOwnProperty, + hasOwnProp: hasOwnProperty, // an alias to avoid ESLint no-prototype-builtins detection + reduceDescriptors, + freezeMethods, + toObjectSet, + toCamelCase, + noop, + toFiniteNumber, + findKey, + global: _global, + isContextDefined, + ALPHABET, + generateString, + isSpecCompliantForm, + toJSONObject, + isAsyncFn, + isThenable +}; + +/** + * Create an Error with the specified message, config, error code, request and response. + * + * @param {string} message The error message. + * @param {string} [code] The error code (for example, 'ECONNABORTED'). + * @param {Object} [config] The config. + * @param {Object} [request] The request. + * @param {Object} [response] The response. + * + * @returns {Error} The created error. + */ +function AxiosError(message, code, config, request, response) { + Error.call(this); + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } else { + this.stack = (new Error()).stack; + } + + this.message = message; + this.name = 'AxiosError'; + code && (this.code = code); + config && (this.config = config); + request && (this.request = request); + response && (this.response = response); +} + +utils$1.inherits(AxiosError, Error, { + toJSON: function toJSON() { + return { + // Standard + message: this.message, + name: this.name, + // Microsoft + description: this.description, + number: this.number, + // Mozilla + fileName: this.fileName, + lineNumber: this.lineNumber, + columnNumber: this.columnNumber, + stack: this.stack, + // Axios + config: utils$1.toJSONObject(this.config), + code: this.code, + status: this.response && this.response.status ? this.response.status : null + }; + } +}); + +const prototype$1 = AxiosError.prototype; +const descriptors = {}; + +[ + 'ERR_BAD_OPTION_VALUE', + 'ERR_BAD_OPTION', + 'ECONNABORTED', + 'ETIMEDOUT', + 'ERR_NETWORK', + 'ERR_FR_TOO_MANY_REDIRECTS', + 'ERR_DEPRECATED', + 'ERR_BAD_RESPONSE', + 'ERR_BAD_REQUEST', + 'ERR_CANCELED', + 'ERR_NOT_SUPPORT', + 'ERR_INVALID_URL' +// eslint-disable-next-line func-names +].forEach(code => { + descriptors[code] = {value: code}; +}); + +Object.defineProperties(AxiosError, descriptors); +Object.defineProperty(prototype$1, 'isAxiosError', {value: true}); + +// eslint-disable-next-line func-names +AxiosError.from = (error, code, config, request, response, customProps) => { + const axiosError = Object.create(prototype$1); + + utils$1.toFlatObject(error, axiosError, function filter(obj) { + return obj !== Error.prototype; + }, prop => { + return prop !== 'isAxiosError'; + }); + + AxiosError.call(axiosError, error.message, code, config, request, response); + + axiosError.cause = error; + + axiosError.name = error.name; + + customProps && Object.assign(axiosError, customProps); + + return axiosError; +}; + +/** + * Determines if the given thing is a array or js object. + * + * @param {string} thing - The object or array to be visited. + * + * @returns {boolean} + */ +function isVisitable(thing) { + return utils$1.isPlainObject(thing) || utils$1.isArray(thing); +} + +/** + * It removes the brackets from the end of a string + * + * @param {string} key - The key of the parameter. + * + * @returns {string} the key without the brackets. + */ +function removeBrackets(key) { + return utils$1.endsWith(key, '[]') ? key.slice(0, -2) : key; +} + +/** + * It takes a path, a key, and a boolean, and returns a string + * + * @param {string} path - The path to the current key. + * @param {string} key - The key of the current object being iterated over. + * @param {string} dots - If true, the key will be rendered with dots instead of brackets. + * + * @returns {string} The path to the current key. + */ +function renderKey(path, key, dots) { + if (!path) return key; + return path.concat(key).map(function each(token, i) { + // eslint-disable-next-line no-param-reassign + token = removeBrackets(token); + return !dots && i ? '[' + token + ']' : token; + }).join(dots ? '.' : ''); +} + +/** + * If the array is an array and none of its elements are visitable, then it's a flat array. + * + * @param {Array} arr - The array to check + * + * @returns {boolean} + */ +function isFlatArray(arr) { + return utils$1.isArray(arr) && !arr.some(isVisitable); +} + +const predicates = utils$1.toFlatObject(utils$1, {}, null, function filter(prop) { + return /^is[A-Z]/.test(prop); +}); + +/** + * Convert a data object to FormData + * + * @param {Object} obj + * @param {?Object} [formData] + * @param {?Object} [options] + * @param {Function} [options.visitor] + * @param {Boolean} [options.metaTokens = true] + * @param {Boolean} [options.dots = false] + * @param {?Boolean} [options.indexes = false] + * + * @returns {Object} + **/ + +/** + * It converts an object into a FormData object + * + * @param {Object} obj - The object to convert to form data. + * @param {string} formData - The FormData object to append to. + * @param {Object} options + * + * @returns + */ +function toFormData(obj, formData, options) { + if (!utils$1.isObject(obj)) { + throw new TypeError('target must be an object'); + } + + // eslint-disable-next-line no-param-reassign + formData = formData || new (FormData__default["default"] || FormData)(); + + // eslint-disable-next-line no-param-reassign + options = utils$1.toFlatObject(options, { + metaTokens: true, + dots: false, + indexes: false + }, false, function defined(option, source) { + // eslint-disable-next-line no-eq-null,eqeqeq + return !utils$1.isUndefined(source[option]); + }); + + const metaTokens = options.metaTokens; + // eslint-disable-next-line no-use-before-define + const visitor = options.visitor || defaultVisitor; + const dots = options.dots; + const indexes = options.indexes; + const _Blob = options.Blob || typeof Blob !== 'undefined' && Blob; + const useBlob = _Blob && utils$1.isSpecCompliantForm(formData); + + if (!utils$1.isFunction(visitor)) { + throw new TypeError('visitor must be a function'); + } + + function convertValue(value) { + if (value === null) return ''; + + if (utils$1.isDate(value)) { + return value.toISOString(); + } + + if (!useBlob && utils$1.isBlob(value)) { + throw new AxiosError('Blob is not supported. Use a Buffer instead.'); + } + + if (utils$1.isArrayBuffer(value) || utils$1.isTypedArray(value)) { + return useBlob && typeof Blob === 'function' ? new Blob([value]) : Buffer.from(value); + } + + return value; + } + + /** + * Default visitor. + * + * @param {*} value + * @param {String|Number} key + * @param {Array} path + * @this {FormData} + * + * @returns {boolean} return true to visit the each prop of the value recursively + */ + function defaultVisitor(value, key, path) { + let arr = value; + + if (value && !path && typeof value === 'object') { + if (utils$1.endsWith(key, '{}')) { + // eslint-disable-next-line no-param-reassign + key = metaTokens ? key : key.slice(0, -2); + // eslint-disable-next-line no-param-reassign + value = JSON.stringify(value); + } else if ( + (utils$1.isArray(value) && isFlatArray(value)) || + ((utils$1.isFileList(value) || utils$1.endsWith(key, '[]')) && (arr = utils$1.toArray(value)) + )) { + // eslint-disable-next-line no-param-reassign + key = removeBrackets(key); + + arr.forEach(function each(el, index) { + !(utils$1.isUndefined(el) || el === null) && formData.append( + // eslint-disable-next-line no-nested-ternary + indexes === true ? renderKey([key], index, dots) : (indexes === null ? key : key + '[]'), + convertValue(el) + ); + }); + return false; + } + } + + if (isVisitable(value)) { + return true; + } + + formData.append(renderKey(path, key, dots), convertValue(value)); + + return false; + } + + const stack = []; + + const exposedHelpers = Object.assign(predicates, { + defaultVisitor, + convertValue, + isVisitable + }); + + function build(value, path) { + if (utils$1.isUndefined(value)) return; + + if (stack.indexOf(value) !== -1) { + throw Error('Circular reference detected in ' + path.join('.')); + } + + stack.push(value); + + utils$1.forEach(value, function each(el, key) { + const result = !(utils$1.isUndefined(el) || el === null) && visitor.call( + formData, el, utils$1.isString(key) ? key.trim() : key, path, exposedHelpers + ); + + if (result === true) { + build(el, path ? path.concat(key) : [key]); + } + }); + + stack.pop(); + } + + if (!utils$1.isObject(obj)) { + throw new TypeError('data must be an object'); + } + + build(obj); + + return formData; +} + +/** + * It encodes a string by replacing all characters that are not in the unreserved set with + * their percent-encoded equivalents + * + * @param {string} str - The string to encode. + * + * @returns {string} The encoded string. + */ +function encode$1(str) { + const charMap = { + '!': '%21', + "'": '%27', + '(': '%28', + ')': '%29', + '~': '%7E', + '%20': '+', + '%00': '\x00' + }; + return encodeURIComponent(str).replace(/[!'()~]|%20|%00/g, function replacer(match) { + return charMap[match]; + }); +} + +/** + * It takes a params object and converts it to a FormData object + * + * @param {Object} params - The parameters to be converted to a FormData object. + * @param {Object} options - The options object passed to the Axios constructor. + * + * @returns {void} + */ +function AxiosURLSearchParams(params, options) { + this._pairs = []; + + params && toFormData(params, this, options); +} + +const prototype = AxiosURLSearchParams.prototype; + +prototype.append = function append(name, value) { + this._pairs.push([name, value]); +}; + +prototype.toString = function toString(encoder) { + const _encode = encoder ? function(value) { + return encoder.call(this, value, encode$1); + } : encode$1; + + return this._pairs.map(function each(pair) { + return _encode(pair[0]) + '=' + _encode(pair[1]); + }, '').join('&'); +}; + +/** + * It replaces all instances of the characters `:`, `$`, `,`, `+`, `[`, and `]` with their + * URI encoded counterparts + * + * @param {string} val The value to be encoded. + * + * @returns {string} The encoded value. + */ +function encode(val) { + return encodeURIComponent(val). + replace(/%3A/gi, ':'). + replace(/%24/g, '$'). + replace(/%2C/gi, ','). + replace(/%20/g, '+'). + replace(/%5B/gi, '['). + replace(/%5D/gi, ']'); +} + +/** + * Build a URL by appending params to the end + * + * @param {string} url The base of the url (e.g., http://www.google.com) + * @param {object} [params] The params to be appended + * @param {?object} options + * + * @returns {string} The formatted url + */ +function buildURL(url, params, options) { + /*eslint no-param-reassign:0*/ + if (!params) { + return url; + } + + const _encode = options && options.encode || encode; + + const serializeFn = options && options.serialize; + + let serializedParams; + + if (serializeFn) { + serializedParams = serializeFn(params, options); + } else { + serializedParams = utils$1.isURLSearchParams(params) ? + params.toString() : + new AxiosURLSearchParams(params, options).toString(_encode); + } + + if (serializedParams) { + const hashmarkIndex = url.indexOf("#"); + + if (hashmarkIndex !== -1) { + url = url.slice(0, hashmarkIndex); + } + url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams; + } + + return url; +} + +class InterceptorManager { + constructor() { + this.handlers = []; + } + + /** + * Add a new interceptor to the stack + * + * @param {Function} fulfilled The function to handle `then` for a `Promise` + * @param {Function} rejected The function to handle `reject` for a `Promise` + * + * @return {Number} An ID used to remove interceptor later + */ + use(fulfilled, rejected, options) { + this.handlers.push({ + fulfilled, + rejected, + synchronous: options ? options.synchronous : false, + runWhen: options ? options.runWhen : null + }); + return this.handlers.length - 1; + } + + /** + * Remove an interceptor from the stack + * + * @param {Number} id The ID that was returned by `use` + * + * @returns {Boolean} `true` if the interceptor was removed, `false` otherwise + */ + eject(id) { + if (this.handlers[id]) { + this.handlers[id] = null; + } + } + + /** + * Clear all interceptors from the stack + * + * @returns {void} + */ + clear() { + if (this.handlers) { + this.handlers = []; + } + } + + /** + * Iterate over all the registered interceptors + * + * This method is particularly useful for skipping over any + * interceptors that may have become `null` calling `eject`. + * + * @param {Function} fn The function to call for each interceptor + * + * @returns {void} + */ + forEach(fn) { + utils$1.forEach(this.handlers, function forEachHandler(h) { + if (h !== null) { + fn(h); + } + }); + } +} + +const InterceptorManager$1 = InterceptorManager; + +const transitionalDefaults = { + silentJSONParsing: true, + forcedJSONParsing: true, + clarifyTimeoutError: false +}; + +const URLSearchParams = url__default["default"].URLSearchParams; + +const platform$1 = { + isNode: true, + classes: { + URLSearchParams, + FormData: FormData__default["default"], + Blob: typeof Blob !== 'undefined' && Blob || null + }, + protocols: [ 'http', 'https', 'file', 'data' ] +}; + +const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined'; + +/** + * Determine if we're running in a standard browser environment + * + * This allows axios to run in a web worker, and react-native. + * Both environments support XMLHttpRequest, but not fully standard globals. + * + * web workers: + * typeof window -> undefined + * typeof document -> undefined + * + * react-native: + * navigator.product -> 'ReactNative' + * nativescript + * navigator.product -> 'NativeScript' or 'NS' + * + * @returns {boolean} + */ +const hasStandardBrowserEnv = ( + (product) => { + return hasBrowserEnv && ['ReactNative', 'NativeScript', 'NS'].indexOf(product) < 0 + })(typeof navigator !== 'undefined' && navigator.product); + +/** + * Determine if we're running in a standard browser webWorker environment + * + * Although the `isStandardBrowserEnv` method indicates that + * `allows axios to run in a web worker`, the WebWorker will still be + * filtered out due to its judgment standard + * `typeof window !== 'undefined' && typeof document !== 'undefined'`. + * This leads to a problem when axios post `FormData` in webWorker + */ +const hasStandardBrowserWebWorkerEnv = (() => { + return ( + typeof WorkerGlobalScope !== 'undefined' && + // eslint-disable-next-line no-undef + self instanceof WorkerGlobalScope && + typeof self.importScripts === 'function' + ); +})(); + +const origin = hasBrowserEnv && window.location.href || 'http://localhost'; + +const utils = /*#__PURE__*/Object.freeze({ + __proto__: null, + hasBrowserEnv: hasBrowserEnv, + hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv, + hasStandardBrowserEnv: hasStandardBrowserEnv, + origin: origin +}); + +const platform = { + ...utils, + ...platform$1 +}; + +function toURLEncodedForm(data, options) { + return toFormData(data, new platform.classes.URLSearchParams(), Object.assign({ + visitor: function(value, key, path, helpers) { + if (platform.isNode && utils$1.isBuffer(value)) { + this.append(key, value.toString('base64')); + return false; + } + + return helpers.defaultVisitor.apply(this, arguments); + } + }, options)); +} + +/** + * It takes a string like `foo[x][y][z]` and returns an array like `['foo', 'x', 'y', 'z'] + * + * @param {string} name - The name of the property to get. + * + * @returns An array of strings. + */ +function parsePropPath(name) { + // foo[x][y][z] + // foo.x.y.z + // foo-x-y-z + // foo x y z + return utils$1.matchAll(/\w+|\[(\w*)]/g, name).map(match => { + return match[0] === '[]' ? '' : match[1] || match[0]; + }); +} + +/** + * Convert an array to an object. + * + * @param {Array} arr - The array to convert to an object. + * + * @returns An object with the same keys and values as the array. + */ +function arrayToObject(arr) { + const obj = {}; + const keys = Object.keys(arr); + let i; + const len = keys.length; + let key; + for (i = 0; i < len; i++) { + key = keys[i]; + obj[key] = arr[key]; + } + return obj; +} + +/** + * It takes a FormData object and returns a JavaScript object + * + * @param {string} formData The FormData object to convert to JSON. + * + * @returns {Object | null} The converted object. + */ +function formDataToJSON(formData) { + function buildPath(path, value, target, index) { + let name = path[index++]; + + if (name === '__proto__') return true; + + const isNumericKey = Number.isFinite(+name); + const isLast = index >= path.length; + name = !name && utils$1.isArray(target) ? target.length : name; + + if (isLast) { + if (utils$1.hasOwnProp(target, name)) { + target[name] = [target[name], value]; + } else { + target[name] = value; + } + + return !isNumericKey; + } + + if (!target[name] || !utils$1.isObject(target[name])) { + target[name] = []; + } + + const result = buildPath(path, value, target[name], index); + + if (result && utils$1.isArray(target[name])) { + target[name] = arrayToObject(target[name]); + } + + return !isNumericKey; + } + + if (utils$1.isFormData(formData) && utils$1.isFunction(formData.entries)) { + const obj = {}; + + utils$1.forEachEntry(formData, (name, value) => { + buildPath(parsePropPath(name), value, obj, 0); + }); + + return obj; + } + + return null; +} + +/** + * It takes a string, tries to parse it, and if it fails, it returns the stringified version + * of the input + * + * @param {any} rawValue - The value to be stringified. + * @param {Function} parser - A function that parses a string into a JavaScript object. + * @param {Function} encoder - A function that takes a value and returns a string. + * + * @returns {string} A stringified version of the rawValue. + */ +function stringifySafely(rawValue, parser, encoder) { + if (utils$1.isString(rawValue)) { + try { + (parser || JSON.parse)(rawValue); + return utils$1.trim(rawValue); + } catch (e) { + if (e.name !== 'SyntaxError') { + throw e; + } + } + } + + return (encoder || JSON.stringify)(rawValue); +} + +const defaults = { + + transitional: transitionalDefaults, + + adapter: ['xhr', 'http', 'fetch'], + + transformRequest: [function transformRequest(data, headers) { + const contentType = headers.getContentType() || ''; + const hasJSONContentType = contentType.indexOf('application/json') > -1; + const isObjectPayload = utils$1.isObject(data); + + if (isObjectPayload && utils$1.isHTMLForm(data)) { + data = new FormData(data); + } + + const isFormData = utils$1.isFormData(data); + + if (isFormData) { + return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data; + } + + if (utils$1.isArrayBuffer(data) || + utils$1.isBuffer(data) || + utils$1.isStream(data) || + utils$1.isFile(data) || + utils$1.isBlob(data) || + utils$1.isReadableStream(data) + ) { + return data; + } + if (utils$1.isArrayBufferView(data)) { + return data.buffer; + } + if (utils$1.isURLSearchParams(data)) { + headers.setContentType('application/x-www-form-urlencoded;charset=utf-8', false); + return data.toString(); + } + + let isFileList; + + if (isObjectPayload) { + if (contentType.indexOf('application/x-www-form-urlencoded') > -1) { + return toURLEncodedForm(data, this.formSerializer).toString(); + } + + if ((isFileList = utils$1.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) { + const _FormData = this.env && this.env.FormData; + + return toFormData( + isFileList ? {'files[]': data} : data, + _FormData && new _FormData(), + this.formSerializer + ); + } + } + + if (isObjectPayload || hasJSONContentType ) { + headers.setContentType('application/json', false); + return stringifySafely(data); + } + + return data; + }], + + transformResponse: [function transformResponse(data) { + const transitional = this.transitional || defaults.transitional; + const forcedJSONParsing = transitional && transitional.forcedJSONParsing; + const JSONRequested = this.responseType === 'json'; + + if (utils$1.isResponse(data) || utils$1.isReadableStream(data)) { + return data; + } + + if (data && utils$1.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) { + const silentJSONParsing = transitional && transitional.silentJSONParsing; + const strictJSONParsing = !silentJSONParsing && JSONRequested; + + try { + return JSON.parse(data); + } catch (e) { + if (strictJSONParsing) { + if (e.name === 'SyntaxError') { + throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response); + } + throw e; + } + } + } + + return data; + }], + + /** + * A timeout in milliseconds to abort a request. If set to 0 (default) a + * timeout is not created. + */ + timeout: 0, + + xsrfCookieName: 'XSRF-TOKEN', + xsrfHeaderName: 'X-XSRF-TOKEN', + + maxContentLength: -1, + maxBodyLength: -1, + + env: { + FormData: platform.classes.FormData, + Blob: platform.classes.Blob + }, + + validateStatus: function validateStatus(status) { + return status >= 200 && status < 300; + }, + + headers: { + common: { + 'Accept': 'application/json, text/plain, */*', + 'Content-Type': undefined + } + } +}; + +utils$1.forEach(['delete', 'get', 'head', 'post', 'put', 'patch'], (method) => { + defaults.headers[method] = {}; +}); + +const defaults$1 = defaults; + +// RawAxiosHeaders whose duplicates are ignored by node +// c.f. https://nodejs.org/api/http.html#http_message_headers +const ignoreDuplicateOf = utils$1.toObjectSet([ + 'age', 'authorization', 'content-length', 'content-type', 'etag', + 'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since', + 'last-modified', 'location', 'max-forwards', 'proxy-authorization', + 'referer', 'retry-after', 'user-agent' +]); + +/** + * Parse headers into an object + * + * ``` + * Date: Wed, 27 Aug 2014 08:58:49 GMT + * Content-Type: application/json + * Connection: keep-alive + * Transfer-Encoding: chunked + * ``` + * + * @param {String} rawHeaders Headers needing to be parsed + * + * @returns {Object} Headers parsed into an object + */ +const parseHeaders = rawHeaders => { + const parsed = {}; + let key; + let val; + let i; + + rawHeaders && rawHeaders.split('\n').forEach(function parser(line) { + i = line.indexOf(':'); + key = line.substring(0, i).trim().toLowerCase(); + val = line.substring(i + 1).trim(); + + if (!key || (parsed[key] && ignoreDuplicateOf[key])) { + return; + } + + if (key === 'set-cookie') { + if (parsed[key]) { + parsed[key].push(val); + } else { + parsed[key] = [val]; + } + } else { + parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val; + } + }); + + return parsed; +}; + +const $internals = Symbol('internals'); + +function normalizeHeader(header) { + return header && String(header).trim().toLowerCase(); +} + +function normalizeValue(value) { + if (value === false || value == null) { + return value; + } + + return utils$1.isArray(value) ? value.map(normalizeValue) : String(value); +} + +function parseTokens(str) { + const tokens = Object.create(null); + const tokensRE = /([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g; + let match; + + while ((match = tokensRE.exec(str))) { + tokens[match[1]] = match[2]; + } + + return tokens; +} + +const isValidHeaderName = (str) => /^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(str.trim()); + +function matchHeaderValue(context, value, header, filter, isHeaderNameFilter) { + if (utils$1.isFunction(filter)) { + return filter.call(this, value, header); + } + + if (isHeaderNameFilter) { + value = header; + } + + if (!utils$1.isString(value)) return; + + if (utils$1.isString(filter)) { + return value.indexOf(filter) !== -1; + } + + if (utils$1.isRegExp(filter)) { + return filter.test(value); + } +} + +function formatHeader(header) { + return header.trim() + .toLowerCase().replace(/([a-z\d])(\w*)/g, (w, char, str) => { + return char.toUpperCase() + str; + }); +} + +function buildAccessors(obj, header) { + const accessorName = utils$1.toCamelCase(' ' + header); + + ['get', 'set', 'has'].forEach(methodName => { + Object.defineProperty(obj, methodName + accessorName, { + value: function(arg1, arg2, arg3) { + return this[methodName].call(this, header, arg1, arg2, arg3); + }, + configurable: true + }); + }); +} + +class AxiosHeaders { + constructor(headers) { + headers && this.set(headers); + } + + set(header, valueOrRewrite, rewrite) { + const self = this; + + function setHeader(_value, _header, _rewrite) { + const lHeader = normalizeHeader(_header); + + if (!lHeader) { + throw new Error('header name must be a non-empty string'); + } + + const key = utils$1.findKey(self, lHeader); + + if(!key || self[key] === undefined || _rewrite === true || (_rewrite === undefined && self[key] !== false)) { + self[key || _header] = normalizeValue(_value); + } + } + + const setHeaders = (headers, _rewrite) => + utils$1.forEach(headers, (_value, _header) => setHeader(_value, _header, _rewrite)); + + if (utils$1.isPlainObject(header) || header instanceof this.constructor) { + setHeaders(header, valueOrRewrite); + } else if(utils$1.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) { + setHeaders(parseHeaders(header), valueOrRewrite); + } else if (utils$1.isHeaders(header)) { + for (const [key, value] of header.entries()) { + setHeader(value, key, rewrite); + } + } else { + header != null && setHeader(valueOrRewrite, header, rewrite); + } + + return this; + } + + get(header, parser) { + header = normalizeHeader(header); + + if (header) { + const key = utils$1.findKey(this, header); + + if (key) { + const value = this[key]; + + if (!parser) { + return value; + } + + if (parser === true) { + return parseTokens(value); + } + + if (utils$1.isFunction(parser)) { + return parser.call(this, value, key); + } + + if (utils$1.isRegExp(parser)) { + return parser.exec(value); + } + + throw new TypeError('parser must be boolean|regexp|function'); + } + } + } + + has(header, matcher) { + header = normalizeHeader(header); + + if (header) { + const key = utils$1.findKey(this, header); + + return !!(key && this[key] !== undefined && (!matcher || matchHeaderValue(this, this[key], key, matcher))); + } + + return false; + } + + delete(header, matcher) { + const self = this; + let deleted = false; + + function deleteHeader(_header) { + _header = normalizeHeader(_header); + + if (_header) { + const key = utils$1.findKey(self, _header); + + if (key && (!matcher || matchHeaderValue(self, self[key], key, matcher))) { + delete self[key]; + + deleted = true; + } + } + } + + if (utils$1.isArray(header)) { + header.forEach(deleteHeader); + } else { + deleteHeader(header); + } + + return deleted; + } + + clear(matcher) { + const keys = Object.keys(this); + let i = keys.length; + let deleted = false; + + while (i--) { + const key = keys[i]; + if(!matcher || matchHeaderValue(this, this[key], key, matcher, true)) { + delete this[key]; + deleted = true; + } + } + + return deleted; + } + + normalize(format) { + const self = this; + const headers = {}; + + utils$1.forEach(this, (value, header) => { + const key = utils$1.findKey(headers, header); + + if (key) { + self[key] = normalizeValue(value); + delete self[header]; + return; + } + + const normalized = format ? formatHeader(header) : String(header).trim(); + + if (normalized !== header) { + delete self[header]; + } + + self[normalized] = normalizeValue(value); + + headers[normalized] = true; + }); + + return this; + } + + concat(...targets) { + return this.constructor.concat(this, ...targets); + } + + toJSON(asStrings) { + const obj = Object.create(null); + + utils$1.forEach(this, (value, header) => { + value != null && value !== false && (obj[header] = asStrings && utils$1.isArray(value) ? value.join(', ') : value); + }); + + return obj; + } + + [Symbol.iterator]() { + return Object.entries(this.toJSON())[Symbol.iterator](); + } + + toString() { + return Object.entries(this.toJSON()).map(([header, value]) => header + ': ' + value).join('\n'); + } + + get [Symbol.toStringTag]() { + return 'AxiosHeaders'; + } + + static from(thing) { + return thing instanceof this ? thing : new this(thing); + } + + static concat(first, ...targets) { + const computed = new this(first); + + targets.forEach((target) => computed.set(target)); + + return computed; + } + + static accessor(header) { + const internals = this[$internals] = (this[$internals] = { + accessors: {} + }); + + const accessors = internals.accessors; + const prototype = this.prototype; + + function defineAccessor(_header) { + const lHeader = normalizeHeader(_header); + + if (!accessors[lHeader]) { + buildAccessors(prototype, _header); + accessors[lHeader] = true; + } + } + + utils$1.isArray(header) ? header.forEach(defineAccessor) : defineAccessor(header); + + return this; + } +} + +AxiosHeaders.accessor(['Content-Type', 'Content-Length', 'Accept', 'Accept-Encoding', 'User-Agent', 'Authorization']); + +// reserved names hotfix +utils$1.reduceDescriptors(AxiosHeaders.prototype, ({value}, key) => { + let mapped = key[0].toUpperCase() + key.slice(1); // map `set` => `Set` + return { + get: () => value, + set(headerValue) { + this[mapped] = headerValue; + } + } +}); + +utils$1.freezeMethods(AxiosHeaders); + +const AxiosHeaders$1 = AxiosHeaders; + +/** + * Transform the data for a request or a response + * + * @param {Array|Function} fns A single function or Array of functions + * @param {?Object} response The response object + * + * @returns {*} The resulting transformed data + */ +function transformData(fns, response) { + const config = this || defaults$1; + const context = response || config; + const headers = AxiosHeaders$1.from(context.headers); + let data = context.data; + + utils$1.forEach(fns, function transform(fn) { + data = fn.call(config, data, headers.normalize(), response ? response.status : undefined); + }); + + headers.normalize(); + + return data; +} + +function isCancel(value) { + return !!(value && value.__CANCEL__); +} + +/** + * A `CanceledError` is an object that is thrown when an operation is canceled. + * + * @param {string=} message The message. + * @param {Object=} config The config. + * @param {Object=} request The request. + * + * @returns {CanceledError} The created error. + */ +function CanceledError(message, config, request) { + // eslint-disable-next-line no-eq-null,eqeqeq + AxiosError.call(this, message == null ? 'canceled' : message, AxiosError.ERR_CANCELED, config, request); + this.name = 'CanceledError'; +} + +utils$1.inherits(CanceledError, AxiosError, { + __CANCEL__: true +}); + +/** + * Resolve or reject a Promise based on response status. + * + * @param {Function} resolve A function that resolves the promise. + * @param {Function} reject A function that rejects the promise. + * @param {object} response The response. + * + * @returns {object} The response. + */ +function settle(resolve, reject, response) { + const validateStatus = response.config.validateStatus; + if (!response.status || !validateStatus || validateStatus(response.status)) { + resolve(response); + } else { + reject(new AxiosError( + 'Request failed with status code ' + response.status, + [AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4], + response.config, + response.request, + response + )); + } +} + +/** + * Determines whether the specified URL is absolute + * + * @param {string} url The URL to test + * + * @returns {boolean} True if the specified URL is absolute, otherwise false + */ +function isAbsoluteURL(url) { + // A URL is considered absolute if it begins with "://" or "//" (protocol-relative URL). + // RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed + // by any combination of letters, digits, plus, period, or hyphen. + return /^([a-z][a-z\d+\-.]*:)?\/\//i.test(url); +} + +/** + * Creates a new URL by combining the specified URLs + * + * @param {string} baseURL The base URL + * @param {string} relativeURL The relative URL + * + * @returns {string} The combined URL + */ +function combineURLs(baseURL, relativeURL) { + return relativeURL + ? baseURL.replace(/\/?\/$/, '') + '/' + relativeURL.replace(/^\/+/, '') + : baseURL; +} + +/** + * Creates a new URL by combining the baseURL with the requestedURL, + * only when the requestedURL is not already an absolute URL. + * If the requestURL is absolute, this function returns the requestedURL untouched. + * + * @param {string} baseURL The base URL + * @param {string} requestedURL Absolute or relative URL to combine + * + * @returns {string} The combined full path + */ +function buildFullPath(baseURL, requestedURL) { + if (baseURL && !isAbsoluteURL(requestedURL)) { + return combineURLs(baseURL, requestedURL); + } + return requestedURL; +} + +const VERSION = "1.7.2"; + +function parseProtocol(url) { + const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url); + return match && match[1] || ''; +} + +const DATA_URL_PATTERN = /^(?:([^;]+);)?(?:[^;]+;)?(base64|),([\s\S]*)$/; + +/** + * Parse data uri to a Buffer or Blob + * + * @param {String} uri + * @param {?Boolean} asBlob + * @param {?Object} options + * @param {?Function} options.Blob + * + * @returns {Buffer|Blob} + */ +function fromDataURI(uri, asBlob, options) { + const _Blob = options && options.Blob || platform.classes.Blob; + const protocol = parseProtocol(uri); + + if (asBlob === undefined && _Blob) { + asBlob = true; + } + + if (protocol === 'data') { + uri = protocol.length ? uri.slice(protocol.length + 1) : uri; + + const match = DATA_URL_PATTERN.exec(uri); + + if (!match) { + throw new AxiosError('Invalid URL', AxiosError.ERR_INVALID_URL); + } + + const mime = match[1]; + const isBase64 = match[2]; + const body = match[3]; + const buffer = Buffer.from(decodeURIComponent(body), isBase64 ? 'base64' : 'utf8'); + + if (asBlob) { + if (!_Blob) { + throw new AxiosError('Blob is not supported', AxiosError.ERR_NOT_SUPPORT); + } + + return new _Blob([buffer], {type: mime}); + } + + return buffer; + } + + throw new AxiosError('Unsupported protocol ' + protocol, AxiosError.ERR_NOT_SUPPORT); +} + +/** + * Throttle decorator + * @param {Function} fn + * @param {Number} freq + * @return {Function} + */ +function throttle(fn, freq) { + let timestamp = 0; + const threshold = 1000 / freq; + let timer = null; + return function throttled() { + const force = this === true; + + const now = Date.now(); + if (force || now - timestamp > threshold) { + if (timer) { + clearTimeout(timer); + timer = null; + } + timestamp = now; + return fn.apply(null, arguments); + } + if (!timer) { + timer = setTimeout(() => { + timer = null; + timestamp = Date.now(); + return fn.apply(null, arguments); + }, threshold - (now - timestamp)); + } + }; +} + +/** + * Calculate data maxRate + * @param {Number} [samplesCount= 10] + * @param {Number} [min= 1000] + * @returns {Function} + */ +function speedometer(samplesCount, min) { + samplesCount = samplesCount || 10; + const bytes = new Array(samplesCount); + const timestamps = new Array(samplesCount); + let head = 0; + let tail = 0; + let firstSampleTS; + + min = min !== undefined ? min : 1000; + + return function push(chunkLength) { + const now = Date.now(); + + const startedAt = timestamps[tail]; + + if (!firstSampleTS) { + firstSampleTS = now; + } + + bytes[head] = chunkLength; + timestamps[head] = now; + + let i = tail; + let bytesCount = 0; + + while (i !== head) { + bytesCount += bytes[i++]; + i = i % samplesCount; + } + + head = (head + 1) % samplesCount; + + if (head === tail) { + tail = (tail + 1) % samplesCount; + } + + if (now - firstSampleTS < min) { + return; + } + + const passed = startedAt && now - startedAt; + + return passed ? Math.round(bytesCount * 1000 / passed) : undefined; + }; +} + +const kInternals = Symbol('internals'); + +class AxiosTransformStream extends stream__default["default"].Transform{ + constructor(options) { + options = utils$1.toFlatObject(options, { + maxRate: 0, + chunkSize: 64 * 1024, + minChunkSize: 100, + timeWindow: 500, + ticksRate: 2, + samplesCount: 15 + }, null, (prop, source) => { + return !utils$1.isUndefined(source[prop]); + }); + + super({ + readableHighWaterMark: options.chunkSize + }); + + const self = this; + + const internals = this[kInternals] = { + length: options.length, + timeWindow: options.timeWindow, + ticksRate: options.ticksRate, + chunkSize: options.chunkSize, + maxRate: options.maxRate, + minChunkSize: options.minChunkSize, + bytesSeen: 0, + isCaptured: false, + notifiedBytesLoaded: 0, + ts: Date.now(), + bytes: 0, + onReadCallback: null + }; + + const _speedometer = speedometer(internals.ticksRate * options.samplesCount, internals.timeWindow); + + this.on('newListener', event => { + if (event === 'progress') { + if (!internals.isCaptured) { + internals.isCaptured = true; + } + } + }); + + let bytesNotified = 0; + + internals.updateProgress = throttle(function throttledHandler() { + const totalBytes = internals.length; + const bytesTransferred = internals.bytesSeen; + const progressBytes = bytesTransferred - bytesNotified; + if (!progressBytes || self.destroyed) return; + + const rate = _speedometer(progressBytes); + + bytesNotified = bytesTransferred; + + process.nextTick(() => { + self.emit('progress', { + loaded: bytesTransferred, + total: totalBytes, + progress: totalBytes ? (bytesTransferred / totalBytes) : undefined, + bytes: progressBytes, + rate: rate ? rate : undefined, + estimated: rate && totalBytes && bytesTransferred <= totalBytes ? + (totalBytes - bytesTransferred) / rate : undefined, + lengthComputable: totalBytes != null + }); + }); + }, internals.ticksRate); + + const onFinish = () => { + internals.updateProgress.call(true); + }; + + this.once('end', onFinish); + this.once('error', onFinish); + } + + _read(size) { + const internals = this[kInternals]; + + if (internals.onReadCallback) { + internals.onReadCallback(); + } + + return super._read(size); + } + + _transform(chunk, encoding, callback) { + const self = this; + const internals = this[kInternals]; + const maxRate = internals.maxRate; + + const readableHighWaterMark = this.readableHighWaterMark; + + const timeWindow = internals.timeWindow; + + const divider = 1000 / timeWindow; + const bytesThreshold = (maxRate / divider); + const minChunkSize = internals.minChunkSize !== false ? Math.max(internals.minChunkSize, bytesThreshold * 0.01) : 0; + + function pushChunk(_chunk, _callback) { + const bytes = Buffer.byteLength(_chunk); + internals.bytesSeen += bytes; + internals.bytes += bytes; + + if (internals.isCaptured) { + internals.updateProgress(); + } + + if (self.push(_chunk)) { + process.nextTick(_callback); + } else { + internals.onReadCallback = () => { + internals.onReadCallback = null; + process.nextTick(_callback); + }; + } + } + + const transformChunk = (_chunk, _callback) => { + const chunkSize = Buffer.byteLength(_chunk); + let chunkRemainder = null; + let maxChunkSize = readableHighWaterMark; + let bytesLeft; + let passed = 0; + + if (maxRate) { + const now = Date.now(); + + if (!internals.ts || (passed = (now - internals.ts)) >= timeWindow) { + internals.ts = now; + bytesLeft = bytesThreshold - internals.bytes; + internals.bytes = bytesLeft < 0 ? -bytesLeft : 0; + passed = 0; + } + + bytesLeft = bytesThreshold - internals.bytes; + } + + if (maxRate) { + if (bytesLeft <= 0) { + // next time window + return setTimeout(() => { + _callback(null, _chunk); + }, timeWindow - passed); + } + + if (bytesLeft < maxChunkSize) { + maxChunkSize = bytesLeft; + } + } + + if (maxChunkSize && chunkSize > maxChunkSize && (chunkSize - maxChunkSize) > minChunkSize) { + chunkRemainder = _chunk.subarray(maxChunkSize); + _chunk = _chunk.subarray(0, maxChunkSize); + } + + pushChunk(_chunk, chunkRemainder ? () => { + process.nextTick(_callback, null, chunkRemainder); + } : _callback); + }; + + transformChunk(chunk, function transformNextChunk(err, _chunk) { + if (err) { + return callback(err); + } + + if (_chunk) { + transformChunk(_chunk, transformNextChunk); + } else { + callback(null); + } + }); + } + + setLength(length) { + this[kInternals].length = +length; + return this; + } +} + +const AxiosTransformStream$1 = AxiosTransformStream; + +const {asyncIterator} = Symbol; + +const readBlob = async function* (blob) { + if (blob.stream) { + yield* blob.stream(); + } else if (blob.arrayBuffer) { + yield await blob.arrayBuffer(); + } else if (blob[asyncIterator]) { + yield* blob[asyncIterator](); + } else { + yield blob; + } +}; + +const readBlob$1 = readBlob; + +const BOUNDARY_ALPHABET = utils$1.ALPHABET.ALPHA_DIGIT + '-_'; + +const textEncoder = new util.TextEncoder(); + +const CRLF = '\r\n'; +const CRLF_BYTES = textEncoder.encode(CRLF); +const CRLF_BYTES_COUNT = 2; + +class FormDataPart { + constructor(name, value) { + const {escapeName} = this.constructor; + const isStringValue = utils$1.isString(value); + + let headers = `Content-Disposition: form-data; name="${escapeName(name)}"${ + !isStringValue && value.name ? `; filename="${escapeName(value.name)}"` : '' + }${CRLF}`; + + if (isStringValue) { + value = textEncoder.encode(String(value).replace(/\r?\n|\r\n?/g, CRLF)); + } else { + headers += `Content-Type: ${value.type || "application/octet-stream"}${CRLF}`; + } + + this.headers = textEncoder.encode(headers + CRLF); + + this.contentLength = isStringValue ? value.byteLength : value.size; + + this.size = this.headers.byteLength + this.contentLength + CRLF_BYTES_COUNT; + + this.name = name; + this.value = value; + } + + async *encode(){ + yield this.headers; + + const {value} = this; + + if(utils$1.isTypedArray(value)) { + yield value; + } else { + yield* readBlob$1(value); + } + + yield CRLF_BYTES; + } + + static escapeName(name) { + return String(name).replace(/[\r\n"]/g, (match) => ({ + '\r' : '%0D', + '\n' : '%0A', + '"' : '%22', + }[match])); + } +} + +const formDataToStream = (form, headersHandler, options) => { + const { + tag = 'form-data-boundary', + size = 25, + boundary = tag + '-' + utils$1.generateString(size, BOUNDARY_ALPHABET) + } = options || {}; + + if(!utils$1.isFormData(form)) { + throw TypeError('FormData instance required'); + } + + if (boundary.length < 1 || boundary.length > 70) { + throw Error('boundary must be 10-70 characters long') + } + + const boundaryBytes = textEncoder.encode('--' + boundary + CRLF); + const footerBytes = textEncoder.encode('--' + boundary + '--' + CRLF + CRLF); + let contentLength = footerBytes.byteLength; + + const parts = Array.from(form.entries()).map(([name, value]) => { + const part = new FormDataPart(name, value); + contentLength += part.size; + return part; + }); + + contentLength += boundaryBytes.byteLength * parts.length; + + contentLength = utils$1.toFiniteNumber(contentLength); + + const computedHeaders = { + 'Content-Type': `multipart/form-data; boundary=${boundary}` + }; + + if (Number.isFinite(contentLength)) { + computedHeaders['Content-Length'] = contentLength; + } + + headersHandler && headersHandler(computedHeaders); + + return stream.Readable.from((async function *() { + for(const part of parts) { + yield boundaryBytes; + yield* part.encode(); + } + + yield footerBytes; + })()); +}; + +const formDataToStream$1 = formDataToStream; + +class ZlibHeaderTransformStream extends stream__default["default"].Transform { + __transform(chunk, encoding, callback) { + this.push(chunk); + callback(); + } + + _transform(chunk, encoding, callback) { + if (chunk.length !== 0) { + this._transform = this.__transform; + + // Add Default Compression headers if no zlib headers are present + if (chunk[0] !== 120) { // Hex: 78 + const header = Buffer.alloc(2); + header[0] = 120; // Hex: 78 + header[1] = 156; // Hex: 9C + this.push(header, encoding); + } + } + + this.__transform(chunk, encoding, callback); + } +} + +const ZlibHeaderTransformStream$1 = ZlibHeaderTransformStream; + +const callbackify = (fn, reducer) => { + return utils$1.isAsyncFn(fn) ? function (...args) { + const cb = args.pop(); + fn.apply(this, args).then((value) => { + try { + reducer ? cb(null, ...reducer(value)) : cb(null, value); + } catch (err) { + cb(err); + } + }, cb); + } : fn; +}; + +const callbackify$1 = callbackify; + +const zlibOptions = { + flush: zlib__default["default"].constants.Z_SYNC_FLUSH, + finishFlush: zlib__default["default"].constants.Z_SYNC_FLUSH +}; + +const brotliOptions = { + flush: zlib__default["default"].constants.BROTLI_OPERATION_FLUSH, + finishFlush: zlib__default["default"].constants.BROTLI_OPERATION_FLUSH +}; + +const isBrotliSupported = utils$1.isFunction(zlib__default["default"].createBrotliDecompress); + +const {http: httpFollow, https: httpsFollow} = followRedirects__default["default"]; + +const isHttps = /https:?/; + +const supportedProtocols = platform.protocols.map(protocol => { + return protocol + ':'; +}); + +/** + * If the proxy or config beforeRedirects functions are defined, call them with the options + * object. + * + * @param {Object} options - The options object that was passed to the request. + * + * @returns {Object} + */ +function dispatchBeforeRedirect(options, responseDetails) { + if (options.beforeRedirects.proxy) { + options.beforeRedirects.proxy(options); + } + if (options.beforeRedirects.config) { + options.beforeRedirects.config(options, responseDetails); + } +} + +/** + * If the proxy or config afterRedirects functions are defined, call them with the options + * + * @param {http.ClientRequestArgs} options + * @param {AxiosProxyConfig} configProxy configuration from Axios options object + * @param {string} location + * + * @returns {http.ClientRequestArgs} + */ +function setProxy(options, configProxy, location) { + let proxy = configProxy; + if (!proxy && proxy !== false) { + const proxyUrl = proxyFromEnv.getProxyForUrl(location); + if (proxyUrl) { + proxy = new URL(proxyUrl); + } + } + if (proxy) { + // Basic proxy authorization + if (proxy.username) { + proxy.auth = (proxy.username || '') + ':' + (proxy.password || ''); + } + + if (proxy.auth) { + // Support proxy auth object form + if (proxy.auth.username || proxy.auth.password) { + proxy.auth = (proxy.auth.username || '') + ':' + (proxy.auth.password || ''); + } + const base64 = Buffer + .from(proxy.auth, 'utf8') + .toString('base64'); + options.headers['Proxy-Authorization'] = 'Basic ' + base64; + } + + options.headers.host = options.hostname + (options.port ? ':' + options.port : ''); + const proxyHost = proxy.hostname || proxy.host; + options.hostname = proxyHost; + // Replace 'host' since options is not a URL object + options.host = proxyHost; + options.port = proxy.port; + options.path = location; + if (proxy.protocol) { + options.protocol = proxy.protocol.includes(':') ? proxy.protocol : `${proxy.protocol}:`; + } + } + + options.beforeRedirects.proxy = function beforeRedirect(redirectOptions) { + // Configure proxy for redirected request, passing the original config proxy to apply + // the exact same logic as if the redirected request was performed by axios directly. + setProxy(redirectOptions, configProxy, redirectOptions.href); + }; +} + +const isHttpAdapterSupported = typeof process !== 'undefined' && utils$1.kindOf(process) === 'process'; + +// temporary hotfix + +const wrapAsync = (asyncExecutor) => { + return new Promise((resolve, reject) => { + let onDone; + let isDone; + + const done = (value, isRejected) => { + if (isDone) return; + isDone = true; + onDone && onDone(value, isRejected); + }; + + const _resolve = (value) => { + done(value); + resolve(value); + }; + + const _reject = (reason) => { + done(reason, true); + reject(reason); + }; + + asyncExecutor(_resolve, _reject, (onDoneHandler) => (onDone = onDoneHandler)).catch(_reject); + }) +}; + +const resolveFamily = ({address, family}) => { + if (!utils$1.isString(address)) { + throw TypeError('address must be a string'); + } + return ({ + address, + family: family || (address.indexOf('.') < 0 ? 6 : 4) + }); +}; + +const buildAddressEntry = (address, family) => resolveFamily(utils$1.isObject(address) ? address : {address, family}); + +/*eslint consistent-return:0*/ +const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { + return wrapAsync(async function dispatchHttpRequest(resolve, reject, onDone) { + let {data, lookup, family} = config; + const {responseType, responseEncoding} = config; + const method = config.method.toUpperCase(); + let isDone; + let rejected = false; + let req; + + if (lookup) { + const _lookup = callbackify$1(lookup, (value) => utils$1.isArray(value) ? value : [value]); + // hotfix to support opt.all option which is required for node 20.x + lookup = (hostname, opt, cb) => { + _lookup(hostname, opt, (err, arg0, arg1) => { + if (err) { + return cb(err); + } + + const addresses = utils$1.isArray(arg0) ? arg0.map(addr => buildAddressEntry(addr)) : [buildAddressEntry(arg0, arg1)]; + + opt.all ? cb(err, addresses) : cb(err, addresses[0].address, addresses[0].family); + }); + }; + } + + // temporary internal emitter until the AxiosRequest class will be implemented + const emitter = new events.EventEmitter(); + + const onFinished = () => { + if (config.cancelToken) { + config.cancelToken.unsubscribe(abort); + } + + if (config.signal) { + config.signal.removeEventListener('abort', abort); + } + + emitter.removeAllListeners(); + }; + + onDone((value, isRejected) => { + isDone = true; + if (isRejected) { + rejected = true; + onFinished(); + } + }); + + function abort(reason) { + emitter.emit('abort', !reason || reason.type ? new CanceledError(null, config, req) : reason); + } + + emitter.once('abort', reject); + + if (config.cancelToken || config.signal) { + config.cancelToken && config.cancelToken.subscribe(abort); + if (config.signal) { + config.signal.aborted ? abort() : config.signal.addEventListener('abort', abort); + } + } + + // Parse url + const fullPath = buildFullPath(config.baseURL, config.url); + const parsed = new URL(fullPath, 'http://localhost'); + const protocol = parsed.protocol || supportedProtocols[0]; + + if (protocol === 'data:') { + let convertedData; + + if (method !== 'GET') { + return settle(resolve, reject, { + status: 405, + statusText: 'method not allowed', + headers: {}, + config + }); + } + + try { + convertedData = fromDataURI(config.url, responseType === 'blob', { + Blob: config.env && config.env.Blob + }); + } catch (err) { + throw AxiosError.from(err, AxiosError.ERR_BAD_REQUEST, config); + } + + if (responseType === 'text') { + convertedData = convertedData.toString(responseEncoding); + + if (!responseEncoding || responseEncoding === 'utf8') { + convertedData = utils$1.stripBOM(convertedData); + } + } else if (responseType === 'stream') { + convertedData = stream__default["default"].Readable.from(convertedData); + } + + return settle(resolve, reject, { + data: convertedData, + status: 200, + statusText: 'OK', + headers: new AxiosHeaders$1(), + config + }); + } + + if (supportedProtocols.indexOf(protocol) === -1) { + return reject(new AxiosError( + 'Unsupported protocol ' + protocol, + AxiosError.ERR_BAD_REQUEST, + config + )); + } + + const headers = AxiosHeaders$1.from(config.headers).normalize(); + + // Set User-Agent (required by some servers) + // See https://github.com/axios/axios/issues/69 + // User-Agent is specified; handle case where no UA header is desired + // Only set header if it hasn't been set in config + headers.set('User-Agent', 'axios/' + VERSION, false); + + const onDownloadProgress = config.onDownloadProgress; + const onUploadProgress = config.onUploadProgress; + const maxRate = config.maxRate; + let maxUploadRate = undefined; + let maxDownloadRate = undefined; + + // support for spec compliant FormData objects + if (utils$1.isSpecCompliantForm(data)) { + const userBoundary = headers.getContentType(/boundary=([-_\w\d]{10,70})/i); + + data = formDataToStream$1(data, (formHeaders) => { + headers.set(formHeaders); + }, { + tag: `axios-${VERSION}-boundary`, + boundary: userBoundary && userBoundary[1] || undefined + }); + // support for https://www.npmjs.com/package/form-data api + } else if (utils$1.isFormData(data) && utils$1.isFunction(data.getHeaders)) { + headers.set(data.getHeaders()); + + if (!headers.hasContentLength()) { + try { + const knownLength = await util__default["default"].promisify(data.getLength).call(data); + Number.isFinite(knownLength) && knownLength >= 0 && headers.setContentLength(knownLength); + /*eslint no-empty:0*/ + } catch (e) { + } + } + } else if (utils$1.isBlob(data)) { + data.size && headers.setContentType(data.type || 'application/octet-stream'); + headers.setContentLength(data.size || 0); + data = stream__default["default"].Readable.from(readBlob$1(data)); + } else if (data && !utils$1.isStream(data)) { + if (Buffer.isBuffer(data)) ; else if (utils$1.isArrayBuffer(data)) { + data = Buffer.from(new Uint8Array(data)); + } else if (utils$1.isString(data)) { + data = Buffer.from(data, 'utf-8'); + } else { + return reject(new AxiosError( + 'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream', + AxiosError.ERR_BAD_REQUEST, + config + )); + } + + // Add Content-Length header if data exists + headers.setContentLength(data.length, false); + + if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) { + return reject(new AxiosError( + 'Request body larger than maxBodyLength limit', + AxiosError.ERR_BAD_REQUEST, + config + )); + } + } + + const contentLength = utils$1.toFiniteNumber(headers.getContentLength()); + + if (utils$1.isArray(maxRate)) { + maxUploadRate = maxRate[0]; + maxDownloadRate = maxRate[1]; + } else { + maxUploadRate = maxDownloadRate = maxRate; + } + + if (data && (onUploadProgress || maxUploadRate)) { + if (!utils$1.isStream(data)) { + data = stream__default["default"].Readable.from(data, {objectMode: false}); + } + + data = stream__default["default"].pipeline([data, new AxiosTransformStream$1({ + length: contentLength, + maxRate: utils$1.toFiniteNumber(maxUploadRate) + })], utils$1.noop); + + onUploadProgress && data.on('progress', progress => { + onUploadProgress(Object.assign(progress, { + upload: true + })); + }); + } + + // HTTP basic authentication + let auth = undefined; + if (config.auth) { + const username = config.auth.username || ''; + const password = config.auth.password || ''; + auth = username + ':' + password; + } + + if (!auth && parsed.username) { + const urlUsername = parsed.username; + const urlPassword = parsed.password; + auth = urlUsername + ':' + urlPassword; + } + + auth && headers.delete('authorization'); + + let path; + + try { + path = buildURL( + parsed.pathname + parsed.search, + config.params, + config.paramsSerializer + ).replace(/^\?/, ''); + } catch (err) { + const customErr = new Error(err.message); + customErr.config = config; + customErr.url = config.url; + customErr.exists = true; + return reject(customErr); + } + + headers.set( + 'Accept-Encoding', + 'gzip, compress, deflate' + (isBrotliSupported ? ', br' : ''), false + ); + + const options = { + path, + method: method, + headers: headers.toJSON(), + agents: { http: config.httpAgent, https: config.httpsAgent }, + auth, + protocol, + family, + beforeRedirect: dispatchBeforeRedirect, + beforeRedirects: {} + }; + + // cacheable-lookup integration hotfix + !utils$1.isUndefined(lookup) && (options.lookup = lookup); + + if (config.socketPath) { + options.socketPath = config.socketPath; + } else { + options.hostname = parsed.hostname; + options.port = parsed.port; + setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); + } + + let transport; + const isHttpsRequest = isHttps.test(options.protocol); + options.agent = isHttpsRequest ? config.httpsAgent : config.httpAgent; + if (config.transport) { + transport = config.transport; + } else if (config.maxRedirects === 0) { + transport = isHttpsRequest ? https__default["default"] : http__default["default"]; + } else { + if (config.maxRedirects) { + options.maxRedirects = config.maxRedirects; + } + if (config.beforeRedirect) { + options.beforeRedirects.config = config.beforeRedirect; + } + transport = isHttpsRequest ? httpsFollow : httpFollow; + } + + if (config.maxBodyLength > -1) { + options.maxBodyLength = config.maxBodyLength; + } else { + // follow-redirects does not skip comparison, so it should always succeed for axios -1 unlimited + options.maxBodyLength = Infinity; + } + + if (config.insecureHTTPParser) { + options.insecureHTTPParser = config.insecureHTTPParser; + } + + // Create the request + req = transport.request(options, function handleResponse(res) { + if (req.destroyed) return; + + const streams = [res]; + + const responseLength = +res.headers['content-length']; + + if (onDownloadProgress) { + const transformStream = new AxiosTransformStream$1({ + length: utils$1.toFiniteNumber(responseLength), + maxRate: utils$1.toFiniteNumber(maxDownloadRate) + }); + + onDownloadProgress && transformStream.on('progress', progress => { + onDownloadProgress(Object.assign(progress, { + download: true + })); + }); + + streams.push(transformStream); + } + + // decompress the response body transparently if required + let responseStream = res; + + // return the last request in case of redirects + const lastRequest = res.req || req; + + // if decompress disabled we should not decompress + if (config.decompress !== false && res.headers['content-encoding']) { + // if no content, but headers still say that it is encoded, + // remove the header not confuse downstream operations + if (method === 'HEAD' || res.statusCode === 204) { + delete res.headers['content-encoding']; + } + + switch ((res.headers['content-encoding'] || '').toLowerCase()) { + /*eslint default-case:0*/ + case 'gzip': + case 'x-gzip': + case 'compress': + case 'x-compress': + // add the unzipper to the body stream processing pipeline + streams.push(zlib__default["default"].createUnzip(zlibOptions)); + + // remove the content-encoding in order to not confuse downstream operations + delete res.headers['content-encoding']; + break; + case 'deflate': + streams.push(new ZlibHeaderTransformStream$1()); + + // add the unzipper to the body stream processing pipeline + streams.push(zlib__default["default"].createUnzip(zlibOptions)); + + // remove the content-encoding in order to not confuse downstream operations + delete res.headers['content-encoding']; + break; + case 'br': + if (isBrotliSupported) { + streams.push(zlib__default["default"].createBrotliDecompress(brotliOptions)); + delete res.headers['content-encoding']; + } + } + } + + responseStream = streams.length > 1 ? stream__default["default"].pipeline(streams, utils$1.noop) : streams[0]; + + const offListeners = stream__default["default"].finished(responseStream, () => { + offListeners(); + onFinished(); + }); + + const response = { + status: res.statusCode, + statusText: res.statusMessage, + headers: new AxiosHeaders$1(res.headers), + config, + request: lastRequest + }; + + if (responseType === 'stream') { + response.data = responseStream; + settle(resolve, reject, response); + } else { + const responseBuffer = []; + let totalResponseBytes = 0; + + responseStream.on('data', function handleStreamData(chunk) { + responseBuffer.push(chunk); + totalResponseBytes += chunk.length; + + // make sure the content length is not over the maxContentLength if specified + if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) { + // stream.destroy() emit aborted event before calling reject() on Node.js v16 + rejected = true; + responseStream.destroy(); + reject(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded', + AxiosError.ERR_BAD_RESPONSE, config, lastRequest)); + } + }); + + responseStream.on('aborted', function handlerStreamAborted() { + if (rejected) { + return; + } + + const err = new AxiosError( + 'maxContentLength size of ' + config.maxContentLength + ' exceeded', + AxiosError.ERR_BAD_RESPONSE, + config, + lastRequest + ); + responseStream.destroy(err); + reject(err); + }); + + responseStream.on('error', function handleStreamError(err) { + if (req.destroyed) return; + reject(AxiosError.from(err, null, config, lastRequest)); + }); + + responseStream.on('end', function handleStreamEnd() { + try { + let responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer); + if (responseType !== 'arraybuffer') { + responseData = responseData.toString(responseEncoding); + if (!responseEncoding || responseEncoding === 'utf8') { + responseData = utils$1.stripBOM(responseData); + } + } + response.data = responseData; + } catch (err) { + return reject(AxiosError.from(err, null, config, response.request, response)); + } + settle(resolve, reject, response); + }); + } + + emitter.once('abort', err => { + if (!responseStream.destroyed) { + responseStream.emit('error', err); + responseStream.destroy(); + } + }); + }); + + emitter.once('abort', err => { + reject(err); + req.destroy(err); + }); + + // Handle errors + req.on('error', function handleRequestError(err) { + // @todo remove + // if (req.aborted && err.code !== AxiosError.ERR_FR_TOO_MANY_REDIRECTS) return; + reject(AxiosError.from(err, null, config, req)); + }); + + // set tcp keep alive to prevent drop connection by peer + req.on('socket', function handleRequestSocket(socket) { + // default interval of sending ack packet is 1 minute + socket.setKeepAlive(true, 1000 * 60); + }); + + // Handle request timeout + if (config.timeout) { + // This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types. + const timeout = parseInt(config.timeout, 10); + + if (Number.isNaN(timeout)) { + reject(new AxiosError( + 'error trying to parse `config.timeout` to int', + AxiosError.ERR_BAD_OPTION_VALUE, + config, + req + )); + + return; + } + + // Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system. + // And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET. + // At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up. + // And then these socket which be hang up will devouring CPU little by little. + // ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect. + req.setTimeout(timeout, function handleRequestTimeout() { + if (isDone) return; + let timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded'; + const transitional = config.transitional || transitionalDefaults; + if (config.timeoutErrorMessage) { + timeoutErrorMessage = config.timeoutErrorMessage; + } + reject(new AxiosError( + timeoutErrorMessage, + transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED, + config, + req + )); + abort(); + }); + } + + + // Send the request + if (utils$1.isStream(data)) { + let ended = false; + let errored = false; + + data.on('end', () => { + ended = true; + }); + + data.once('error', err => { + errored = true; + req.destroy(err); + }); + + data.on('close', () => { + if (!ended && !errored) { + abort(new CanceledError('Request stream has been aborted', config, req)); + } + }); + + data.pipe(req); + } else { + req.end(data); + } + }); +}; + +const progressEventReducer = (listener, isDownloadStream, freq = 3) => { + let bytesNotified = 0; + const _speedometer = speedometer(50, 250); + + return throttle(e => { + const loaded = e.loaded; + const total = e.lengthComputable ? e.total : undefined; + const progressBytes = loaded - bytesNotified; + const rate = _speedometer(progressBytes); + const inRange = loaded <= total; + + bytesNotified = loaded; + + const data = { + loaded, + total, + progress: total ? (loaded / total) : undefined, + bytes: progressBytes, + rate: rate ? rate : undefined, + estimated: rate && total && inRange ? (total - loaded) / rate : undefined, + event: e, + lengthComputable: total != null + }; + + data[isDownloadStream ? 'download' : 'upload'] = true; + + listener(data); + }, freq); +}; + +const isURLSameOrigin = platform.hasStandardBrowserEnv ? + +// Standard browser envs have full support of the APIs needed to test +// whether the request URL is of the same origin as current location. + (function standardBrowserEnv() { + const msie = /(msie|trident)/i.test(navigator.userAgent); + const urlParsingNode = document.createElement('a'); + let originURL; + + /** + * Parse a URL to discover its components + * + * @param {String} url The URL to be parsed + * @returns {Object} + */ + function resolveURL(url) { + let href = url; + + if (msie) { + // IE needs attribute set twice to normalize properties + urlParsingNode.setAttribute('href', href); + href = urlParsingNode.href; + } + + urlParsingNode.setAttribute('href', href); + + // urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils + return { + href: urlParsingNode.href, + protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '', + host: urlParsingNode.host, + search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '', + hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '', + hostname: urlParsingNode.hostname, + port: urlParsingNode.port, + pathname: (urlParsingNode.pathname.charAt(0) === '/') ? + urlParsingNode.pathname : + '/' + urlParsingNode.pathname + }; + } + + originURL = resolveURL(window.location.href); + + /** + * Determine if a URL shares the same origin as the current location + * + * @param {String} requestURL The URL to test + * @returns {boolean} True if URL shares the same origin, otherwise false + */ + return function isURLSameOrigin(requestURL) { + const parsed = (utils$1.isString(requestURL)) ? resolveURL(requestURL) : requestURL; + return (parsed.protocol === originURL.protocol && + parsed.host === originURL.host); + }; + })() : + + // Non standard browser envs (web workers, react-native) lack needed support. + (function nonStandardBrowserEnv() { + return function isURLSameOrigin() { + return true; + }; + })(); + +const cookies = platform.hasStandardBrowserEnv ? + + // Standard browser envs support document.cookie + { + write(name, value, expires, path, domain, secure) { + const cookie = [name + '=' + encodeURIComponent(value)]; + + utils$1.isNumber(expires) && cookie.push('expires=' + new Date(expires).toGMTString()); + + utils$1.isString(path) && cookie.push('path=' + path); + + utils$1.isString(domain) && cookie.push('domain=' + domain); + + secure === true && cookie.push('secure'); + + document.cookie = cookie.join('; '); + }, + + read(name) { + const match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); + return (match ? decodeURIComponent(match[3]) : null); + }, + + remove(name) { + this.write(name, '', Date.now() - 86400000); + } + } + + : + + // Non-standard browser env (web workers, react-native) lack needed support. + { + write() {}, + read() { + return null; + }, + remove() {} + }; + +const headersToObject = (thing) => thing instanceof AxiosHeaders$1 ? { ...thing } : thing; + +/** + * Config-specific merge-function which creates a new config-object + * by merging two configuration objects together. + * + * @param {Object} config1 + * @param {Object} config2 + * + * @returns {Object} New object resulting from merging config2 to config1 + */ +function mergeConfig(config1, config2) { + // eslint-disable-next-line no-param-reassign + config2 = config2 || {}; + const config = {}; + + function getMergedValue(target, source, caseless) { + if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) { + return utils$1.merge.call({caseless}, target, source); + } else if (utils$1.isPlainObject(source)) { + return utils$1.merge({}, source); + } else if (utils$1.isArray(source)) { + return source.slice(); + } + return source; + } + + // eslint-disable-next-line consistent-return + function mergeDeepProperties(a, b, caseless) { + if (!utils$1.isUndefined(b)) { + return getMergedValue(a, b, caseless); + } else if (!utils$1.isUndefined(a)) { + return getMergedValue(undefined, a, caseless); + } + } + + // eslint-disable-next-line consistent-return + function valueFromConfig2(a, b) { + if (!utils$1.isUndefined(b)) { + return getMergedValue(undefined, b); + } + } + + // eslint-disable-next-line consistent-return + function defaultToConfig2(a, b) { + if (!utils$1.isUndefined(b)) { + return getMergedValue(undefined, b); + } else if (!utils$1.isUndefined(a)) { + return getMergedValue(undefined, a); + } + } + + // eslint-disable-next-line consistent-return + function mergeDirectKeys(a, b, prop) { + if (prop in config2) { + return getMergedValue(a, b); + } else if (prop in config1) { + return getMergedValue(undefined, a); + } + } + + const mergeMap = { + url: valueFromConfig2, + method: valueFromConfig2, + data: valueFromConfig2, + baseURL: defaultToConfig2, + transformRequest: defaultToConfig2, + transformResponse: defaultToConfig2, + paramsSerializer: defaultToConfig2, + timeout: defaultToConfig2, + timeoutMessage: defaultToConfig2, + withCredentials: defaultToConfig2, + withXSRFToken: defaultToConfig2, + adapter: defaultToConfig2, + responseType: defaultToConfig2, + xsrfCookieName: defaultToConfig2, + xsrfHeaderName: defaultToConfig2, + onUploadProgress: defaultToConfig2, + onDownloadProgress: defaultToConfig2, + decompress: defaultToConfig2, + maxContentLength: defaultToConfig2, + maxBodyLength: defaultToConfig2, + beforeRedirect: defaultToConfig2, + transport: defaultToConfig2, + httpAgent: defaultToConfig2, + httpsAgent: defaultToConfig2, + cancelToken: defaultToConfig2, + socketPath: defaultToConfig2, + responseEncoding: defaultToConfig2, + validateStatus: mergeDirectKeys, + headers: (a, b) => mergeDeepProperties(headersToObject(a), headersToObject(b), true) + }; + + utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) { + const merge = mergeMap[prop] || mergeDeepProperties; + const configValue = merge(config1[prop], config2[prop], prop); + (utils$1.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue); + }); + + return config; +} + +const resolveConfig = (config) => { + const newConfig = mergeConfig({}, config); + + let {data, withXSRFToken, xsrfHeaderName, xsrfCookieName, headers, auth} = newConfig; + + newConfig.headers = headers = AxiosHeaders$1.from(headers); + + newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url), config.params, config.paramsSerializer); + + // HTTP basic authentication + if (auth) { + headers.set('Authorization', 'Basic ' + + btoa((auth.username || '') + ':' + (auth.password ? unescape(encodeURIComponent(auth.password)) : '')) + ); + } + + let contentType; + + if (utils$1.isFormData(data)) { + if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) { + headers.setContentType(undefined); // Let the browser set it + } else if ((contentType = headers.getContentType()) !== false) { + // fix semicolon duplication issue for ReactNative FormData implementation + const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : []; + headers.setContentType([type || 'multipart/form-data', ...tokens].join('; ')); + } + } + + // Add xsrf header + // This is only done if running in a standard browser environment. + // Specifically not if we're in a web worker, or react-native. + + if (platform.hasStandardBrowserEnv) { + withXSRFToken && utils$1.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(newConfig)); + + if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(newConfig.url))) { + // Add xsrf header + const xsrfValue = xsrfHeaderName && xsrfCookieName && cookies.read(xsrfCookieName); + + if (xsrfValue) { + headers.set(xsrfHeaderName, xsrfValue); + } + } + } + + return newConfig; +}; + +const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined'; + +const xhrAdapter = isXHRAdapterSupported && function (config) { + return new Promise(function dispatchXhrRequest(resolve, reject) { + const _config = resolveConfig(config); + let requestData = _config.data; + const requestHeaders = AxiosHeaders$1.from(_config.headers).normalize(); + let {responseType} = _config; + let onCanceled; + function done() { + if (_config.cancelToken) { + _config.cancelToken.unsubscribe(onCanceled); + } + + if (_config.signal) { + _config.signal.removeEventListener('abort', onCanceled); + } + } + + let request = new XMLHttpRequest(); + + request.open(_config.method.toUpperCase(), _config.url, true); + + // Set the request timeout in MS + request.timeout = _config.timeout; + + function onloadend() { + if (!request) { + return; + } + // Prepare the response + const responseHeaders = AxiosHeaders$1.from( + 'getAllResponseHeaders' in request && request.getAllResponseHeaders() + ); + const responseData = !responseType || responseType === 'text' || responseType === 'json' ? + request.responseText : request.response; + const response = { + data: responseData, + status: request.status, + statusText: request.statusText, + headers: responseHeaders, + config, + request + }; + + settle(function _resolve(value) { + resolve(value); + done(); + }, function _reject(err) { + reject(err); + done(); + }, response); + + // Clean up request + request = null; + } + + if ('onloadend' in request) { + // Use onloadend if available + request.onloadend = onloadend; + } else { + // Listen for ready state to emulate onloadend + request.onreadystatechange = function handleLoad() { + if (!request || request.readyState !== 4) { + return; + } + + // The request errored out and we didn't get a response, this will be + // handled by onerror instead + // With one exception: request that using file: protocol, most browsers + // will return status as 0 even though it's a successful request + if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) { + return; + } + // readystate handler is calling before onerror or ontimeout handlers, + // so we should call onloadend on the next 'tick' + setTimeout(onloadend); + }; + } + + // Handle browser request cancellation (as opposed to a manual cancellation) + request.onabort = function handleAbort() { + if (!request) { + return; + } + + reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, _config, request)); + + // Clean up request + request = null; + }; + + // Handle low level network errors + request.onerror = function handleError() { + // Real errors are hidden from us by the browser + // onerror should only fire if it's a network error + reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, _config, request)); + + // Clean up request + request = null; + }; + + // Handle timeout + request.ontimeout = function handleTimeout() { + let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded'; + const transitional = _config.transitional || transitionalDefaults; + if (_config.timeoutErrorMessage) { + timeoutErrorMessage = _config.timeoutErrorMessage; + } + reject(new AxiosError( + timeoutErrorMessage, + transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED, + _config, + request)); + + // Clean up request + request = null; + }; + + // Remove Content-Type if data is undefined + requestData === undefined && requestHeaders.setContentType(null); + + // Add headers to the request + if ('setRequestHeader' in request) { + utils$1.forEach(requestHeaders.toJSON(), function setRequestHeader(val, key) { + request.setRequestHeader(key, val); + }); + } + + // Add withCredentials to request if needed + if (!utils$1.isUndefined(_config.withCredentials)) { + request.withCredentials = !!_config.withCredentials; + } + + // Add responseType to request if needed + if (responseType && responseType !== 'json') { + request.responseType = _config.responseType; + } + + // Handle progress if needed + if (typeof _config.onDownloadProgress === 'function') { + request.addEventListener('progress', progressEventReducer(_config.onDownloadProgress, true)); + } + + // Not all browsers support upload events + if (typeof _config.onUploadProgress === 'function' && request.upload) { + request.upload.addEventListener('progress', progressEventReducer(_config.onUploadProgress)); + } + + if (_config.cancelToken || _config.signal) { + // Handle cancellation + // eslint-disable-next-line func-names + onCanceled = cancel => { + if (!request) { + return; + } + reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel); + request.abort(); + request = null; + }; + + _config.cancelToken && _config.cancelToken.subscribe(onCanceled); + if (_config.signal) { + _config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled); + } + } + + const protocol = parseProtocol(_config.url); + + if (protocol && platform.protocols.indexOf(protocol) === -1) { + reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config)); + return; + } + + + // Send the request + request.send(requestData || null); + }); +}; + +const composeSignals = (signals, timeout) => { + let controller = new AbortController(); + + let aborted; + + const onabort = function (cancel) { + if (!aborted) { + aborted = true; + unsubscribe(); + const err = cancel instanceof Error ? cancel : this.reason; + controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err)); + } + }; + + let timer = timeout && setTimeout(() => { + onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT)); + }, timeout); + + const unsubscribe = () => { + if (signals) { + timer && clearTimeout(timer); + timer = null; + signals.forEach(signal => { + signal && + (signal.removeEventListener ? signal.removeEventListener('abort', onabort) : signal.unsubscribe(onabort)); + }); + signals = null; + } + }; + + signals.forEach((signal) => signal && signal.addEventListener && signal.addEventListener('abort', onabort)); + + const {signal} = controller; + + signal.unsubscribe = unsubscribe; + + return [signal, () => { + timer && clearTimeout(timer); + timer = null; + }]; +}; + +const composeSignals$1 = composeSignals; + +const streamChunk = function* (chunk, chunkSize) { + let len = chunk.byteLength; + + if (!chunkSize || len < chunkSize) { + yield chunk; + return; + } + + let pos = 0; + let end; + + while (pos < len) { + end = pos + chunkSize; + yield chunk.slice(pos, end); + pos = end; + } +}; + +const readBytes = async function* (iterable, chunkSize, encode) { + for await (const chunk of iterable) { + yield* streamChunk(ArrayBuffer.isView(chunk) ? chunk : (await encode(String(chunk))), chunkSize); + } +}; + +const trackStream = (stream, chunkSize, onProgress, onFinish, encode) => { + const iterator = readBytes(stream, chunkSize, encode); + + let bytes = 0; + + return new ReadableStream({ + type: 'bytes', + + async pull(controller) { + const {done, value} = await iterator.next(); + + if (done) { + controller.close(); + onFinish(); + return; + } + + let len = value.byteLength; + onProgress && onProgress(bytes += len); + controller.enqueue(new Uint8Array(value)); + }, + cancel(reason) { + onFinish(reason); + return iterator.return(); + } + }, { + highWaterMark: 2 + }) +}; + +const fetchProgressDecorator = (total, fn) => { + const lengthComputable = total != null; + return (loaded) => setTimeout(() => fn({ + lengthComputable, + total, + loaded + })); +}; + +const isFetchSupported = typeof fetch === 'function' && typeof Request === 'function' && typeof Response === 'function'; +const isReadableStreamSupported = isFetchSupported && typeof ReadableStream === 'function'; + +// used only inside the fetch adapter +const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ? + ((encoder) => (str) => encoder.encode(str))(new TextEncoder()) : + async (str) => new Uint8Array(await new Response(str).arrayBuffer()) +); + +const supportsRequestStream = isReadableStreamSupported && (() => { + let duplexAccessed = false; + + const hasContentType = new Request(platform.origin, { + body: new ReadableStream(), + method: 'POST', + get duplex() { + duplexAccessed = true; + return 'half'; + }, + }).headers.has('Content-Type'); + + return duplexAccessed && !hasContentType; +})(); + +const DEFAULT_CHUNK_SIZE = 64 * 1024; + +const supportsResponseStream = isReadableStreamSupported && !!(()=> { + try { + return utils$1.isReadableStream(new Response('').body); + } catch(err) { + // return undefined + } +})(); + +const resolvers = { + stream: supportsResponseStream && ((res) => res.body) +}; + +isFetchSupported && (((res) => { + ['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => { + !resolvers[type] && (resolvers[type] = utils$1.isFunction(res[type]) ? (res) => res[type]() : + (_, config) => { + throw new AxiosError(`Response type '${type}' is not supported`, AxiosError.ERR_NOT_SUPPORT, config); + }); + }); +})(new Response)); + +const getBodyLength = async (body) => { + if (body == null) { + return 0; + } + + if(utils$1.isBlob(body)) { + return body.size; + } + + if(utils$1.isSpecCompliantForm(body)) { + return (await new Request(body).arrayBuffer()).byteLength; + } + + if(utils$1.isArrayBufferView(body)) { + return body.byteLength; + } + + if(utils$1.isURLSearchParams(body)) { + body = body + ''; + } + + if(utils$1.isString(body)) { + return (await encodeText(body)).byteLength; + } +}; + +const resolveBodyLength = async (headers, body) => { + const length = utils$1.toFiniteNumber(headers.getContentLength()); + + return length == null ? getBodyLength(body) : length; +}; + +const fetchAdapter = isFetchSupported && (async (config) => { + let { + url, + method, + data, + signal, + cancelToken, + timeout, + onDownloadProgress, + onUploadProgress, + responseType, + headers, + withCredentials = 'same-origin', + fetchOptions + } = resolveConfig(config); + + responseType = responseType ? (responseType + '').toLowerCase() : 'text'; + + let [composedSignal, stopTimeout] = (signal || cancelToken || timeout) ? + composeSignals$1([signal, cancelToken], timeout) : []; + + let finished, request; + + const onFinish = () => { + !finished && setTimeout(() => { + composedSignal && composedSignal.unsubscribe(); + }); + + finished = true; + }; + + let requestContentLength; + + try { + if ( + onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' && + (requestContentLength = await resolveBodyLength(headers, data)) !== 0 + ) { + let _request = new Request(url, { + method: 'POST', + body: data, + duplex: "half" + }); + + let contentTypeHeader; + + if (utils$1.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) { + headers.setContentType(contentTypeHeader); + } + + if (_request.body) { + data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, fetchProgressDecorator( + requestContentLength, + progressEventReducer(onUploadProgress) + ), null, encodeText); + } + } + + if (!utils$1.isString(withCredentials)) { + withCredentials = withCredentials ? 'cors' : 'omit'; + } + + request = new Request(url, { + ...fetchOptions, + signal: composedSignal, + method: method.toUpperCase(), + headers: headers.normalize().toJSON(), + body: data, + duplex: "half", + withCredentials + }); + + let response = await fetch(request); + + const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response'); + + if (supportsResponseStream && (onDownloadProgress || isStreamResponse)) { + const options = {}; + + ['status', 'statusText', 'headers'].forEach(prop => { + options[prop] = response[prop]; + }); + + const responseContentLength = utils$1.toFiniteNumber(response.headers.get('content-length')); + + response = new Response( + trackStream(response.body, DEFAULT_CHUNK_SIZE, onDownloadProgress && fetchProgressDecorator( + responseContentLength, + progressEventReducer(onDownloadProgress, true) + ), isStreamResponse && onFinish, encodeText), + options + ); + } + + responseType = responseType || 'text'; + + let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config); + + !isStreamResponse && onFinish(); + + stopTimeout && stopTimeout(); + + return await new Promise((resolve, reject) => { + settle(resolve, reject, { + data: responseData, + headers: AxiosHeaders$1.from(response.headers), + status: response.status, + statusText: response.statusText, + config, + request + }); + }) + } catch (err) { + onFinish(); + + if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) { + throw Object.assign( + new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request), + { + cause: err.cause || err + } + ) + } + + throw AxiosError.from(err, err && err.code, config, request); + } +}); + +const knownAdapters = { + http: httpAdapter, + xhr: xhrAdapter, + fetch: fetchAdapter +}; + +utils$1.forEach(knownAdapters, (fn, value) => { + if (fn) { + try { + Object.defineProperty(fn, 'name', {value}); + } catch (e) { + // eslint-disable-next-line no-empty + } + Object.defineProperty(fn, 'adapterName', {value}); + } +}); + +const renderReason = (reason) => `- ${reason}`; + +const isResolvedHandle = (adapter) => utils$1.isFunction(adapter) || adapter === null || adapter === false; + +const adapters = { + getAdapter: (adapters) => { + adapters = utils$1.isArray(adapters) ? adapters : [adapters]; + + const {length} = adapters; + let nameOrAdapter; + let adapter; + + const rejectedReasons = {}; + + for (let i = 0; i < length; i++) { + nameOrAdapter = adapters[i]; + let id; + + adapter = nameOrAdapter; + + if (!isResolvedHandle(nameOrAdapter)) { + adapter = knownAdapters[(id = String(nameOrAdapter)).toLowerCase()]; + + if (adapter === undefined) { + throw new AxiosError(`Unknown adapter '${id}'`); + } + } + + if (adapter) { + break; + } + + rejectedReasons[id || '#' + i] = adapter; + } + + if (!adapter) { + + const reasons = Object.entries(rejectedReasons) + .map(([id, state]) => `adapter ${id} ` + + (state === false ? 'is not supported by the environment' : 'is not available in the build') + ); + + let s = length ? + (reasons.length > 1 ? 'since :\n' + reasons.map(renderReason).join('\n') : ' ' + renderReason(reasons[0])) : + 'as no adapter specified'; + + throw new AxiosError( + `There is no suitable adapter to dispatch the request ` + s, + 'ERR_NOT_SUPPORT' + ); + } + + return adapter; + }, + adapters: knownAdapters +}; + +/** + * Throws a `CanceledError` if cancellation has been requested. + * + * @param {Object} config The config that is to be used for the request + * + * @returns {void} + */ +function throwIfCancellationRequested(config) { + if (config.cancelToken) { + config.cancelToken.throwIfRequested(); + } + + if (config.signal && config.signal.aborted) { + throw new CanceledError(null, config); + } +} + +/** + * Dispatch a request to the server using the configured adapter. + * + * @param {object} config The config that is to be used for the request + * + * @returns {Promise} The Promise to be fulfilled + */ +function dispatchRequest(config) { + throwIfCancellationRequested(config); + + config.headers = AxiosHeaders$1.from(config.headers); + + // Transform request data + config.data = transformData.call( + config, + config.transformRequest + ); + + if (['post', 'put', 'patch'].indexOf(config.method) !== -1) { + config.headers.setContentType('application/x-www-form-urlencoded', false); + } + + const adapter = adapters.getAdapter(config.adapter || defaults$1.adapter); + + return adapter(config).then(function onAdapterResolution(response) { + throwIfCancellationRequested(config); + + // Transform response data + response.data = transformData.call( + config, + config.transformResponse, + response + ); + + response.headers = AxiosHeaders$1.from(response.headers); + + return response; + }, function onAdapterRejection(reason) { + if (!isCancel(reason)) { + throwIfCancellationRequested(config); + + // Transform response data + if (reason && reason.response) { + reason.response.data = transformData.call( + config, + config.transformResponse, + reason.response + ); + reason.response.headers = AxiosHeaders$1.from(reason.response.headers); + } + } + + return Promise.reject(reason); + }); +} + +const validators$1 = {}; + +// eslint-disable-next-line func-names +['object', 'boolean', 'number', 'function', 'string', 'symbol'].forEach((type, i) => { + validators$1[type] = function validator(thing) { + return typeof thing === type || 'a' + (i < 1 ? 'n ' : ' ') + type; + }; +}); + +const deprecatedWarnings = {}; + +/** + * Transitional option validator + * + * @param {function|boolean?} validator - set to false if the transitional option has been removed + * @param {string?} version - deprecated version / removed since version + * @param {string?} message - some message with additional info + * + * @returns {function} + */ +validators$1.transitional = function transitional(validator, version, message) { + function formatMessage(opt, desc) { + return '[Axios v' + VERSION + '] Transitional option \'' + opt + '\'' + desc + (message ? '. ' + message : ''); + } + + // eslint-disable-next-line func-names + return (value, opt, opts) => { + if (validator === false) { + throw new AxiosError( + formatMessage(opt, ' has been removed' + (version ? ' in ' + version : '')), + AxiosError.ERR_DEPRECATED + ); + } + + if (version && !deprecatedWarnings[opt]) { + deprecatedWarnings[opt] = true; + // eslint-disable-next-line no-console + console.warn( + formatMessage( + opt, + ' has been deprecated since v' + version + ' and will be removed in the near future' + ) + ); + } + + return validator ? validator(value, opt, opts) : true; + }; +}; + +/** + * Assert object's properties type + * + * @param {object} options + * @param {object} schema + * @param {boolean?} allowUnknown + * + * @returns {object} + */ + +function assertOptions(options, schema, allowUnknown) { + if (typeof options !== 'object') { + throw new AxiosError('options must be an object', AxiosError.ERR_BAD_OPTION_VALUE); + } + const keys = Object.keys(options); + let i = keys.length; + while (i-- > 0) { + const opt = keys[i]; + const validator = schema[opt]; + if (validator) { + const value = options[opt]; + const result = value === undefined || validator(value, opt, options); + if (result !== true) { + throw new AxiosError('option ' + opt + ' must be ' + result, AxiosError.ERR_BAD_OPTION_VALUE); + } + continue; + } + if (allowUnknown !== true) { + throw new AxiosError('Unknown option ' + opt, AxiosError.ERR_BAD_OPTION); + } + } +} + +const validator = { + assertOptions, + validators: validators$1 +}; + +const validators = validator.validators; + +/** + * Create a new instance of Axios + * + * @param {Object} instanceConfig The default config for the instance + * + * @return {Axios} A new instance of Axios + */ +class Axios { + constructor(instanceConfig) { + this.defaults = instanceConfig; + this.interceptors = { + request: new InterceptorManager$1(), + response: new InterceptorManager$1() + }; + } + + /** + * Dispatch a request + * + * @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults) + * @param {?Object} config + * + * @returns {Promise} The Promise to be fulfilled + */ + async request(configOrUrl, config) { + try { + return await this._request(configOrUrl, config); + } catch (err) { + if (err instanceof Error) { + let dummy; + + Error.captureStackTrace ? Error.captureStackTrace(dummy = {}) : (dummy = new Error()); + + // slice off the Error: ... line + const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : ''; + try { + if (!err.stack) { + err.stack = stack; + // match without the 2 top stack lines + } else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) { + err.stack += '\n' + stack; + } + } catch (e) { + // ignore the case where "stack" is an un-writable property + } + } + + throw err; + } + } + + _request(configOrUrl, config) { + /*eslint no-param-reassign:0*/ + // Allow for axios('example/url'[, config]) a la fetch API + if (typeof configOrUrl === 'string') { + config = config || {}; + config.url = configOrUrl; + } else { + config = configOrUrl || {}; + } + + config = mergeConfig(this.defaults, config); + + const {transitional, paramsSerializer, headers} = config; + + if (transitional !== undefined) { + validator.assertOptions(transitional, { + silentJSONParsing: validators.transitional(validators.boolean), + forcedJSONParsing: validators.transitional(validators.boolean), + clarifyTimeoutError: validators.transitional(validators.boolean) + }, false); + } + + if (paramsSerializer != null) { + if (utils$1.isFunction(paramsSerializer)) { + config.paramsSerializer = { + serialize: paramsSerializer + }; + } else { + validator.assertOptions(paramsSerializer, { + encode: validators.function, + serialize: validators.function + }, true); + } + } + + // Set config.method + config.method = (config.method || this.defaults.method || 'get').toLowerCase(); + + // Flatten headers + let contextHeaders = headers && utils$1.merge( + headers.common, + headers[config.method] + ); + + headers && utils$1.forEach( + ['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], + (method) => { + delete headers[method]; + } + ); + + config.headers = AxiosHeaders$1.concat(contextHeaders, headers); + + // filter out skipped interceptors + const requestInterceptorChain = []; + let synchronousRequestInterceptors = true; + this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) { + if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) { + return; + } + + synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous; + + requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected); + }); + + const responseInterceptorChain = []; + this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) { + responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected); + }); + + let promise; + let i = 0; + let len; + + if (!synchronousRequestInterceptors) { + const chain = [dispatchRequest.bind(this), undefined]; + chain.unshift.apply(chain, requestInterceptorChain); + chain.push.apply(chain, responseInterceptorChain); + len = chain.length; + + promise = Promise.resolve(config); + + while (i < len) { + promise = promise.then(chain[i++], chain[i++]); + } + + return promise; + } + + len = requestInterceptorChain.length; + + let newConfig = config; + + i = 0; + + while (i < len) { + const onFulfilled = requestInterceptorChain[i++]; + const onRejected = requestInterceptorChain[i++]; + try { + newConfig = onFulfilled(newConfig); + } catch (error) { + onRejected.call(this, error); + break; + } + } + + try { + promise = dispatchRequest.call(this, newConfig); + } catch (error) { + return Promise.reject(error); + } + + i = 0; + len = responseInterceptorChain.length; + + while (i < len) { + promise = promise.then(responseInterceptorChain[i++], responseInterceptorChain[i++]); + } + + return promise; + } + + getUri(config) { + config = mergeConfig(this.defaults, config); + const fullPath = buildFullPath(config.baseURL, config.url); + return buildURL(fullPath, config.params, config.paramsSerializer); + } +} + +// Provide aliases for supported request methods +utils$1.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) { + /*eslint func-names:0*/ + Axios.prototype[method] = function(url, config) { + return this.request(mergeConfig(config || {}, { + method, + url, + data: (config || {}).data + })); + }; +}); + +utils$1.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { + /*eslint func-names:0*/ + + function generateHTTPMethod(isForm) { + return function httpMethod(url, data, config) { + return this.request(mergeConfig(config || {}, { + method, + headers: isForm ? { + 'Content-Type': 'multipart/form-data' + } : {}, + url, + data + })); + }; + } + + Axios.prototype[method] = generateHTTPMethod(); + + Axios.prototype[method + 'Form'] = generateHTTPMethod(true); +}); + +const Axios$1 = Axios; + +/** + * A `CancelToken` is an object that can be used to request cancellation of an operation. + * + * @param {Function} executor The executor function. + * + * @returns {CancelToken} + */ +class CancelToken { + constructor(executor) { + if (typeof executor !== 'function') { + throw new TypeError('executor must be a function.'); + } + + let resolvePromise; + + this.promise = new Promise(function promiseExecutor(resolve) { + resolvePromise = resolve; + }); + + const token = this; + + // eslint-disable-next-line func-names + this.promise.then(cancel => { + if (!token._listeners) return; + + let i = token._listeners.length; + + while (i-- > 0) { + token._listeners[i](cancel); + } + token._listeners = null; + }); + + // eslint-disable-next-line func-names + this.promise.then = onfulfilled => { + let _resolve; + // eslint-disable-next-line func-names + const promise = new Promise(resolve => { + token.subscribe(resolve); + _resolve = resolve; + }).then(onfulfilled); + + promise.cancel = function reject() { + token.unsubscribe(_resolve); + }; + + return promise; + }; + + executor(function cancel(message, config, request) { + if (token.reason) { + // Cancellation has already been requested + return; + } + + token.reason = new CanceledError(message, config, request); + resolvePromise(token.reason); + }); + } + + /** + * Throws a `CanceledError` if cancellation has been requested. + */ + throwIfRequested() { + if (this.reason) { + throw this.reason; + } + } + + /** + * Subscribe to the cancel signal + */ + + subscribe(listener) { + if (this.reason) { + listener(this.reason); + return; + } + + if (this._listeners) { + this._listeners.push(listener); + } else { + this._listeners = [listener]; + } + } + + /** + * Unsubscribe from the cancel signal + */ + + unsubscribe(listener) { + if (!this._listeners) { + return; + } + const index = this._listeners.indexOf(listener); + if (index !== -1) { + this._listeners.splice(index, 1); + } + } + + /** + * Returns an object that contains a new `CancelToken` and a function that, when called, + * cancels the `CancelToken`. + */ + static source() { + let cancel; + const token = new CancelToken(function executor(c) { + cancel = c; + }); + return { + token, + cancel + }; + } +} + +const CancelToken$1 = CancelToken; + +/** + * Syntactic sugar for invoking a function and expanding an array for arguments. + * + * Common use case would be to use `Function.prototype.apply`. + * + * ```js + * function f(x, y, z) {} + * var args = [1, 2, 3]; + * f.apply(null, args); + * ``` + * + * With `spread` this example can be re-written. + * + * ```js + * spread(function(x, y, z) {})([1, 2, 3]); + * ``` + * + * @param {Function} callback + * + * @returns {Function} + */ +function spread(callback) { + return function wrap(arr) { + return callback.apply(null, arr); + }; +} + +/** + * Determines whether the payload is an error thrown by Axios + * + * @param {*} payload The value to test + * + * @returns {boolean} True if the payload is an error thrown by Axios, otherwise false + */ +function isAxiosError(payload) { + return utils$1.isObject(payload) && (payload.isAxiosError === true); +} + +const HttpStatusCode = { + Continue: 100, + SwitchingProtocols: 101, + Processing: 102, + EarlyHints: 103, + Ok: 200, + Created: 201, + Accepted: 202, + NonAuthoritativeInformation: 203, + NoContent: 204, + ResetContent: 205, + PartialContent: 206, + MultiStatus: 207, + AlreadyReported: 208, + ImUsed: 226, + MultipleChoices: 300, + MovedPermanently: 301, + Found: 302, + SeeOther: 303, + NotModified: 304, + UseProxy: 305, + Unused: 306, + TemporaryRedirect: 307, + PermanentRedirect: 308, + BadRequest: 400, + Unauthorized: 401, + PaymentRequired: 402, + Forbidden: 403, + NotFound: 404, + MethodNotAllowed: 405, + NotAcceptable: 406, + ProxyAuthenticationRequired: 407, + RequestTimeout: 408, + Conflict: 409, + Gone: 410, + LengthRequired: 411, + PreconditionFailed: 412, + PayloadTooLarge: 413, + UriTooLong: 414, + UnsupportedMediaType: 415, + RangeNotSatisfiable: 416, + ExpectationFailed: 417, + ImATeapot: 418, + MisdirectedRequest: 421, + UnprocessableEntity: 422, + Locked: 423, + FailedDependency: 424, + TooEarly: 425, + UpgradeRequired: 426, + PreconditionRequired: 428, + TooManyRequests: 429, + RequestHeaderFieldsTooLarge: 431, + UnavailableForLegalReasons: 451, + InternalServerError: 500, + NotImplemented: 501, + BadGateway: 502, + ServiceUnavailable: 503, + GatewayTimeout: 504, + HttpVersionNotSupported: 505, + VariantAlsoNegotiates: 506, + InsufficientStorage: 507, + LoopDetected: 508, + NotExtended: 510, + NetworkAuthenticationRequired: 511, +}; + +Object.entries(HttpStatusCode).forEach(([key, value]) => { + HttpStatusCode[value] = key; +}); + +const HttpStatusCode$1 = HttpStatusCode; + +/** + * Create an instance of Axios + * + * @param {Object} defaultConfig The default config for the instance + * + * @returns {Axios} A new instance of Axios + */ +function createInstance(defaultConfig) { + const context = new Axios$1(defaultConfig); + const instance = bind(Axios$1.prototype.request, context); + + // Copy axios.prototype to instance + utils$1.extend(instance, Axios$1.prototype, context, {allOwnKeys: true}); + + // Copy context to instance + utils$1.extend(instance, context, null, {allOwnKeys: true}); + + // Factory for creating new instances + instance.create = function create(instanceConfig) { + return createInstance(mergeConfig(defaultConfig, instanceConfig)); + }; + + return instance; +} + +// Create the default instance to be exported +const axios = createInstance(defaults$1); + +// Expose Axios class to allow class inheritance +axios.Axios = Axios$1; + +// Expose Cancel & CancelToken +axios.CanceledError = CanceledError; +axios.CancelToken = CancelToken$1; +axios.isCancel = isCancel; +axios.VERSION = VERSION; +axios.toFormData = toFormData; + +// Expose AxiosError class +axios.AxiosError = AxiosError; + +// alias for CanceledError for backward compatibility +axios.Cancel = axios.CanceledError; + +// Expose all/spread +axios.all = function all(promises) { + return Promise.all(promises); +}; + +axios.spread = spread; + +// Expose isAxiosError +axios.isAxiosError = isAxiosError; + +// Expose mergeConfig +axios.mergeConfig = mergeConfig; + +axios.AxiosHeaders = AxiosHeaders$1; + +axios.formToJSON = thing => formDataToJSON(utils$1.isHTMLForm(thing) ? new FormData(thing) : thing); + +axios.getAdapter = adapters.getAdapter; + +axios.HttpStatusCode = HttpStatusCode$1; + +axios.default = axios; + +module.exports = axios; +//# sourceMappingURL=axios.cjs.map + + +/***/ }), + +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) /***/ 9364: /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { diff --git a/package-lock.json b/package-lock.json index 4b15e5946..8a3ca4820 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,8 +9,13 @@ "version": "0.0.0-development", "license": "MIT", "dependencies": { +<<<<<<< HEAD "@actions/cache": "3.3.0", "@actions/core": "1.11.1", +======= + "@actions/cache": "npm:@useblacksmith/cache@3.2.132", + "@actions/core": "1.10.0", +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "@actions/exec": "1.1.1", "@actions/io": "1.1.3", "@octokit/core": "4.2.0", @@ -31,10 +36,17 @@ } }, "node_modules/@actions/cache": { +<<<<<<< HEAD "version": "3.3.0", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.3.0.tgz", "integrity": "sha512-+eCsMTIZUEm+QA9GqjollOhCdvRrZ1JV8d9Rp34zVNizBkYITO8dhKczP5Xps1dFzc5n59p7vYVtZrGt18bb5Q==", "license": "MIT", +======= + "name": "@useblacksmith/cache", + "version": "3.2.132", + "resolved": "https://registry.npmjs.org/@useblacksmith/cache/-/cache-3.2.132.tgz", + "integrity": "sha512-UHYhMQthXT4Eho3cywUC70fwnGXySU8y7Fe9khS5W+o6j6N3s+OhmT7grq4/2xDIt+6Bv947XPmUiQzxKRlxgg==", +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "dependencies": { "@actions/core": "^1.11.1", "@actions/exec": "^1.0.1", @@ -44,7 +56,15 @@ "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.13.0", +<<<<<<< HEAD "semver": "^6.3.1" +======= + "@sentry/node": "^8.0.0", + "axios": "^1.6.8", + "axios-retry": "^4.1.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } }, "node_modules/@actions/core": { @@ -75,10 +95,16 @@ } }, "node_modules/@actions/http-client": { +<<<<<<< HEAD "version": "2.2.3", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", "license": "MIT", +======= + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", + "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" @@ -266,7 +292,10 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", +<<<<<<< HEAD "license": "MIT", +======= +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "engines": { "node": ">=14" } @@ -433,13 +462,509 @@ } }, "node_modules/@opentelemetry/api": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", - "integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", "engines": { "node": ">=8.0.0" } }, + "node_modules/@opentelemetry/api-logs": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.52.1.tgz", + "integrity": "sha512-qnSqB2DQ9TPP96dl8cDubDvrUyWc0/sK81xHTK8eSUspzDM3bsewX903qclQFvVhgStjRWdC5bLb3kQqMkfV5A==", + "dependencies": { + "@opentelemetry/api": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/context-async-hooks": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.25.1.tgz", + "integrity": "sha512-UW/ge9zjvAEmRWVapOP0qyCvPulWU6cQxGxDbWEFfGOj1VBBZAuOqTo3X6yWmDTD3Xe15ysCZChHncr2xFMIfQ==", + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/instrumentation": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.52.1.tgz", + "integrity": "sha512-uXJbYU/5/MBHjMp1FqrILLRuiJCs3Ofk0MeRDk8g1S1gD47U8X3JnSwcMO1rtRo1x1a7zKaQHaoYu49p/4eSKw==", + "dependencies": { + "@opentelemetry/api-logs": "0.52.1", + "@types/shimmer": "^1.0.2", + "import-in-the-middle": "^1.8.1", + "require-in-the-middle": "^7.1.1", + "semver": "^7.5.2", + "shimmer": "^1.2.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-connect": { + "version": "0.38.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.38.0.tgz", + "integrity": "sha512-2/nRnx3pjYEmdPIaBwtgtSviTKHWnDZN3R+TkRUnhIVrvBKVcq+I5B2rtd6mr6Fe9cHlZ9Ojcuh7pkNh/xdWWg==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0", + "@types/connect": "3.4.36" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-express": { + "version": "0.41.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.41.0.tgz", + "integrity": "sha512-/B7fbMdaf3SYe5f1P973tkqd6s7XZirjpfkoJ63E7nltU30qmlgm9tY5XwZOzAFI0rHS9tbrFI2HFPAvQUFe/A==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-fastify": { + "version": "0.38.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fastify/-/instrumentation-fastify-0.38.0.tgz", + "integrity": "sha512-HBVLpTSYpkQZ87/Df3N0gAw7VzYZV3n28THIBrJWfuqw3Or7UqdhnjeuMIPQ04BKk3aZc0cWn2naSQObbh5vXw==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-graphql": { + "version": "0.42.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.42.0.tgz", + "integrity": "sha512-N8SOwoKL9KQSX7z3gOaw5UaTeVQcfDO1c21csVHnmnmGUoqsXbArK2B8VuwPWcv6/BC/i3io+xTo7QGRZ/z28Q==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-hapi": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.40.0.tgz", + "integrity": "sha512-8U/w7Ifumtd2bSN1OLaSwAAFhb9FyqWUki3lMMB0ds+1+HdSxYBe9aspEJEgvxAqOkrQnVniAPTEGf1pGM7SOw==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-http": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.52.1.tgz", + "integrity": "sha512-dG/aevWhaP+7OLv4BQQSEKMJv8GyeOp3Wxl31NHqE8xo9/fYMfEljiZphUHIfyg4gnZ9swMyWjfOQs5GUQe54Q==", + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/instrumentation": "0.52.1", + "@opentelemetry/semantic-conventions": "1.25.1", + "semver": "^7.5.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-http/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation-ioredis": { + "version": "0.42.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.42.0.tgz", + "integrity": "sha512-P11H168EKvBB9TUSasNDOGJCSkpT44XgoM6d3gRIWAa9ghLpYhl0uRkS8//MqPzcJVHr3h3RmfXIpiYLjyIZTw==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/redis-common": "^0.36.2", + "@opentelemetry/semantic-conventions": "^1.23.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-koa": { + "version": "0.42.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.42.0.tgz", + "integrity": "sha512-H1BEmnMhho8o8HuNRq5zEI4+SIHDIglNB7BPKohZyWG4fWNuR7yM4GTlR01Syq21vODAS7z5omblScJD/eZdKw==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mongodb": { + "version": "0.46.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.46.0.tgz", + "integrity": "sha512-VF/MicZ5UOBiXrqBslzwxhN7TVqzu1/LN/QDpkskqM0Zm0aZ4CVRbUygL8d7lrjLn15x5kGIe8VsSphMfPJzlA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/sdk-metrics": "^1.9.1", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mongoose": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.40.0.tgz", + "integrity": "sha512-niRi5ZUnkgzRhIGMOozTyoZIvJKNJyhijQI4nF4iFSb+FUx2v5fngfR+8XLmdQAO7xmsD8E5vEGdDVYVtKbZew==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mysql": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.40.0.tgz", + "integrity": "sha512-d7ja8yizsOCNMYIJt5PH/fKZXjb/mS48zLROO4BzZTtDfhNCl2UM/9VIomP2qkGIFVouSJrGr/T00EzY7bPtKA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0", + "@types/mysql": "2.15.22" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mysql2": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.40.0.tgz", + "integrity": "sha512-0xfS1xcqUmY7WE1uWjlmI67Xg3QsSUlNT+AcXHeA4BDUPwZtWqF4ezIwLgpVZfHOnkAEheqGfNSWd1PIu3Wnfg==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0", + "@opentelemetry/sql-common": "^0.40.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-nestjs-core": { + "version": "0.39.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-nestjs-core/-/instrumentation-nestjs-core-0.39.0.tgz", + "integrity": "sha512-mewVhEXdikyvIZoMIUry8eb8l3HUjuQjSjVbmLVTt4NQi35tkpnHQrG9bTRBrl3403LoWZ2njMPJyg4l6HfKvA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.23.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-pg": { + "version": "0.43.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.43.0.tgz", + "integrity": "sha512-og23KLyoxdnAeFs1UWqzSonuCkePUzCX30keSYigIzJe/6WSYA8rnEI5lobcxPEzg+GcU06J7jzokuEHbjVJNw==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/semantic-conventions": "^1.22.0", + "@opentelemetry/sql-common": "^0.40.1", + "@types/pg": "8.6.1", + "@types/pg-pool": "2.0.4" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-redis-4": { + "version": "0.41.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis-4/-/instrumentation-redis-4-0.41.0.tgz", + "integrity": "sha512-H7IfGTqW2reLXqput4yzAe8YpDC0fmVNal95GHMLOrS89W+qWUKIqxolSh63hJyfmwPSFwXASzj7wpSk8Az+Dg==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "@opentelemetry/redis-common": "^0.36.2", + "@opentelemetry/semantic-conventions": "^1.22.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/redis-common": { + "version": "0.36.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/redis-common/-/redis-common-0.36.2.tgz", + "integrity": "sha512-faYX1N0gpLhej/6nyp6bgRjzAKXn5GOEMYY7YhciSfCoITAktLUtQ36d24QEWNA1/WA1y6qQunCe0OhHRkVl9g==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-metrics": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-1.25.1.tgz", + "integrity": "sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==", + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "lodash.merge": "^4.6.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sql-common": { + "version": "0.40.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sql-common/-/sql-common-0.40.1.tgz", + "integrity": "sha512-nSDlnHSqzC3pXn/wZEZVLuAuJ1MYMXPBwtv2qAbCa3847SaHItdE7SzUq/Jtb0KZmh1zfAbNi3AAMjztTT4Ugg==", + "dependencies": { + "@opentelemetry/core": "^1.1.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0" + } + }, + "node_modules/@prisma/instrumentation": { + "version": "5.17.0", + "resolved": "https://registry.npmjs.org/@prisma/instrumentation/-/instrumentation-5.17.0.tgz", + "integrity": "sha512-c1Sle4ji8aasMcYfBBHFM56We4ljfenVtRmS8aY06BllS7SoU6SmJBwG7vil+GHiR0Yrh+t9iBwt4AY0Jr4KNQ==", + "dependencies": { + "@opentelemetry/api": "^1.8", + "@opentelemetry/instrumentation": "^0.49 || ^0.50 || ^0.51 || ^0.52.0", + "@opentelemetry/sdk-trace-base": "^1.22" + } + }, + "node_modules/@sentry/core": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-8.19.0.tgz", + "integrity": "sha512-MrgjsZCEjOJgQjIznnDSrLEy7qL+4LVpNieAvr49cV1rzBNSwGmWRnt/puVaPsLyCUgupVx/43BPUHB/HtKNUw==", + "dependencies": { + "@sentry/types": "8.19.0", + "@sentry/utils": "8.19.0" + }, + "engines": { + "node": ">=14.18" + } + }, + "node_modules/@sentry/node": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@sentry/node/-/node-8.19.0.tgz", + "integrity": "sha512-r7AeKxfB9eE/UW0NZT3AMh+hNA65NFEwtsMYO6iI52FPLFZh0DLOvzVOeNsmsJqPpyetooUGTtUYpBdinZldWA==", + "dependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/context-async-hooks": "^1.25.1", + "@opentelemetry/core": "^1.25.1", + "@opentelemetry/instrumentation": "^0.52.1", + "@opentelemetry/instrumentation-connect": "0.38.0", + "@opentelemetry/instrumentation-express": "0.41.0", + "@opentelemetry/instrumentation-fastify": "0.38.0", + "@opentelemetry/instrumentation-graphql": "0.42.0", + "@opentelemetry/instrumentation-hapi": "0.40.0", + "@opentelemetry/instrumentation-http": "0.52.1", + "@opentelemetry/instrumentation-ioredis": "0.42.0", + "@opentelemetry/instrumentation-koa": "0.42.0", + "@opentelemetry/instrumentation-mongodb": "0.46.0", + "@opentelemetry/instrumentation-mongoose": "0.40.0", + "@opentelemetry/instrumentation-mysql": "0.40.0", + "@opentelemetry/instrumentation-mysql2": "0.40.0", + "@opentelemetry/instrumentation-nestjs-core": "0.39.0", + "@opentelemetry/instrumentation-pg": "0.43.0", + "@opentelemetry/instrumentation-redis-4": "0.41.0", + "@opentelemetry/resources": "^1.25.1", + "@opentelemetry/sdk-trace-base": "^1.25.1", + "@opentelemetry/semantic-conventions": "^1.25.1", + "@prisma/instrumentation": "5.17.0", + "@sentry/core": "8.19.0", + "@sentry/opentelemetry": "8.19.0", + "@sentry/types": "8.19.0", + "@sentry/utils": "8.19.0" + }, + "engines": { + "node": ">=14.18" + }, + "optionalDependencies": { + "opentelemetry-instrumentation-fetch-node": "1.2.3" + } + }, + "node_modules/@sentry/opentelemetry": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@sentry/opentelemetry/-/opentelemetry-8.19.0.tgz", + "integrity": "sha512-L1aSxO/aJJ7D3pIlTaVOgbiZJAnUHXezobTc8j5pqFCQACjxnLMSDrt53QfFV52CcjbliDWCYe4IB8umu4DgpA==", + "dependencies": { + "@sentry/core": "8.19.0", + "@sentry/types": "8.19.0", + "@sentry/utils": "8.19.0" + }, + "engines": { + "node": ">=14.18" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/core": "^1.25.1", + "@opentelemetry/instrumentation": "^0.52.1", + "@opentelemetry/sdk-trace-base": "^1.25.1", + "@opentelemetry/semantic-conventions": "^1.25.1" + } + }, + "node_modules/@sentry/types": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-8.19.0.tgz", + "integrity": "sha512-52C8X5V7mK2KIxMJt8MV5TxXAFHqrQR1RKm1oPTwKVWm8hKr1ZYJXINymNrWvpAc3oVIKLC/sa9WFYgXQh+YlA==", + "engines": { + "node": ">=14.18" + } + }, + "node_modules/@sentry/utils": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-8.19.0.tgz", + "integrity": "sha512-8dWJJKaUN6Hf92Oxw2TBmHchGua2W3ZmonrZTTwLvl06jcAigbiQD0MGuF5ytZP8PHx860orV+SbTGKFzfU3Pg==", + "dependencies": { + "@sentry/types": "8.19.0" + }, + "engines": { + "node": ">=14.18" + } + }, "node_modules/@sindresorhus/is": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", @@ -480,6 +1005,14 @@ "@types/responselike": "^1.0.0" } }, + "node_modules/@types/connect": { + "version": "3.4.36", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.36.tgz", + "integrity": "sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/http-cache-semantics": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", @@ -493,6 +1026,14 @@ "@types/node": "*" } }, + "node_modules/@types/mysql": { + "version": "2.15.22", + "resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.22.tgz", + "integrity": "sha512-wK1pzsJVVAjYCSZWQoWHziQZbNggXFDUEIGf54g4ZM/ERuP86uGdWeKZWMYlqTPMZfHJJvLPyogXGvCOg87yLQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/node": { "version": "22.10.1", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.1.tgz", @@ -524,6 +1065,24 @@ "node": ">= 6" } }, + "node_modules/@types/pg": { + "version": "8.6.1", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz", + "integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/@types/pg-pool": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/pg-pool/-/pg-pool-2.0.4.tgz", + "integrity": "sha512-qZAvkv1K3QbmHHFYSNRYPkRjOWRLBYrL4B9c+wG0GSVGBw0NtJwPcgx/DSddeDJvRGMHCEQ4VMEVfuJ/0gZ3XQ==", + "dependencies": { + "@types/pg": "*" + } + }, "node_modules/@types/responselike": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz", @@ -532,6 +1091,11 @@ "@types/node": "*" } }, + "node_modules/@types/shimmer": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@types/shimmer/-/shimmer-1.2.0.tgz", + "integrity": "sha512-UE7oxhQLLd9gub6JKIAhDq06T0F6FnztwMNRvYgjeQSBeMc1ZG/tA47EwfduvkuQS8apbkM/lpLpWsaCeYsXVg==" + }, "node_modules/@types/tunnel": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", @@ -560,6 +1124,34 @@ "node": ">=6.5" } }, + "node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-assertions": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", + "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", + "optional": true, + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "peerDependencies": { + "acorn": "^8" + } + }, "node_modules/agent-base": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", @@ -636,6 +1228,40 @@ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, + "node_modules/axios": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz", + "integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios-retry": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.4.2.tgz", + "integrity": "sha512-2fjo9uDNBQjX8+GMEGOFG5TrLMZ3QijjeRYcgBI2MhrsabnvcIAfLxxIhG7+CCD68vPEQY3IM2llV70ZsrqPvA==", + "dependencies": { + "is-retry-allowed": "^2.2.0" + }, + "peerDependencies": { + "axios": "0.x || 1.x" + } + }, + "node_modules/axios/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -772,6 +1398,11 @@ "url": "https://github.com/sponsors/fb55" } }, + "node_modules/cjs-module-lexer": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.3.1.tgz", + "integrity": "sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==" + }, "node_modules/clone-response": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", @@ -1091,6 +1722,25 @@ "micromatch": "^4.0.2" } }, + "node_modules/follow-redirects": { + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, "node_modules/form-data": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", @@ -1119,6 +1769,14 @@ "node": ">=14.14" } }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/get-uri": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.3.tgz", @@ -1192,6 +1850,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/html-link-extractor": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/html-link-extractor/-/html-link-extractor-1.0.5.tgz", @@ -1294,6 +1963,17 @@ "node": ">=0.10.0" } }, + "node_modules/import-in-the-middle": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.10.0.tgz", + "integrity": "sha512-Z1jumVdF2GwnnYfM0a/y2ts7mZbwFMgt5rRuVmLgobgahC6iKgN5MBuXjzfTIOUpq5LSU10vJIPpVKe0X89fIw==", + "dependencies": { + "acorn": "^8.8.2", + "acorn-import-attributes": "^1.9.5", + "cjs-module-lexer": "^1.2.2", + "module-details-from-path": "^1.0.3" + } + }, "node_modules/ip-address": { "version": "9.0.5", "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", @@ -1320,6 +2000,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-core-module": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz", + "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -1351,10 +2045,28 @@ "url": "https://github.com/sponsors/sindresorhus" } }, +<<<<<<< HEAD "node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", +======= + "node_modules/is-retry-allowed": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz", + "integrity": "sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isemail": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", + "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "dev": true, "license": "MIT", "dependencies": { @@ -1412,6 +2124,26 @@ "proxy-agent": "^6.4.0" } }, +<<<<<<< HEAD +======= + "node_modules/link-check/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "node_modules/lowercase-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", @@ -1526,6 +2258,11 @@ "node": "*" } }, + "node_modules/module-details-from-path": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -1623,6 +2360,65 @@ "wrappy": "1" } }, + "node_modules/opentelemetry-instrumentation-fetch-node": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/opentelemetry-instrumentation-fetch-node/-/opentelemetry-instrumentation-fetch-node-1.2.3.tgz", + "integrity": "sha512-Qb11T7KvoCevMaSeuamcLsAD+pZnavkhDnlVL0kRozfhl42dKG5Q3anUklAFKJZjY3twLR+BnRa6DlwwkIE/+A==", + "optional": true, + "dependencies": { + "@opentelemetry/instrumentation": "^0.46.0", + "@opentelemetry/semantic-conventions": "^1.17.0" + }, + "engines": { + "node": ">18.0.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.6.0" + } + }, + "node_modules/opentelemetry-instrumentation-fetch-node/node_modules/@opentelemetry/instrumentation": { + "version": "0.46.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.46.0.tgz", + "integrity": "sha512-a9TijXZZbk0vI5TGLZl+0kxyFfrXHhX6Svtz7Pp2/VBlCSKrazuULEyoJQrOknJyFWNMEmbbJgOciHCCpQcisw==", + "optional": true, + "dependencies": { + "@types/shimmer": "^1.0.2", + "import-in-the-middle": "1.7.1", + "require-in-the-middle": "^7.1.1", + "semver": "^7.5.2", + "shimmer": "^1.2.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/opentelemetry-instrumentation-fetch-node/node_modules/import-in-the-middle": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.7.1.tgz", + "integrity": "sha512-1LrZPDtW+atAxH42S6288qyDFNQ2YCty+2mxEPRtfazH6Z5QwkaBSTS2ods7hnVJioF6rkRfNoA6A/MstpFXLg==", + "optional": true, + "dependencies": { + "acorn": "^8.8.2", + "acorn-import-assertions": "^1.9.0", + "cjs-module-lexer": "^1.2.2", + "module-details-from-path": "^1.0.3" + } + }, + "node_modules/opentelemetry-instrumentation-fetch-node/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "optional": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/p-cancelable": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", @@ -1690,6 +2486,39 @@ "url": "https://github.com/inikulin/parse5?sponsor=1" } }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.1.tgz", + "integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -1701,6 +2530,41 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prettier": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", @@ -1758,7 +2622,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true, "license": "MIT" }, "node_modules/pump": { @@ -1786,6 +2649,35 @@ "resolved": "https://registry.npmjs.org/quote/-/quote-0.4.0.tgz", "integrity": "sha512-KHp3y3xDjuBhRx+tYKOgzPnVHMRlgpn2rU450GcU4PL24r1H6ls/hfPrxDwX2pvYMlwODHI2l8WwgoV69x5rUQ==" }, + "node_modules/require-in-the-middle": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-7.3.0.tgz", + "integrity": "sha512-nQFEv9gRw6SJAwWD2LrL0NmQvAcO7FBwJbwmr2ttPAacfy0xuiOjE5zt+zM4xDyuyvUaxBi/9gb2SoCyNEVJcw==", + "dependencies": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/resolve-alpn": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", @@ -1822,6 +2714,11 @@ "semver": "bin/semver.js" } }, + "node_modules/shimmer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" + }, "node_modules/smart-buffer": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", @@ -1892,6 +2789,17 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -1933,7 +2841,10 @@ "version": "5.28.4", "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", +<<<<<<< HEAD "license": "MIT", +======= +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -2011,6 +2922,7 @@ "node": ">=4.0" } }, +<<<<<<< HEAD "node_modules/xmlbuilder2": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/xmlbuilder2/-/xmlbuilder2-3.1.1.tgz", @@ -2025,6 +2937,14 @@ }, "engines": { "node": ">=12.0" +======= + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" +>>>>>>> 81f8ab7 (*: integrate the blacksmith cache) } } } diff --git a/package.json b/package.json index 005a0c645..2edd32c0c 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ }, "homepage": "https://github.com/cypress-io/github-action#readme", "dependencies": { - "@actions/cache": "3.3.0", + "@actions/cache": "npm:@useblacksmith/cache@3.2.213", "@actions/core": "1.11.1", "@actions/exec": "1.1.1", "@actions/io": "1.1.3", @@ -55,4 +55,4 @@ "publishConfig": { "access": "public" } -} +} \ No newline at end of file