From 57f0e3f198251983d70fc3c83e71b973822da983 Mon Sep 17 00:00:00 2001 From: Jason Orendorff Date: Mon, 12 Jun 2023 12:42:28 -0500 Subject: [PATCH 1/4] Remove actions to add new PRs and issues to a project board The project doesn't seem to exist, so this always fails. --- .github/workflows/issue-opened-workflow.yml | 6 ------ .github/workflows/pr-opened-workflow.yml | 6 ------ 2 files changed, 12 deletions(-) diff --git a/.github/workflows/issue-opened-workflow.yml b/.github/workflows/issue-opened-workflow.yml index b127d98..185eb1d 100644 --- a/.github/workflows/issue-opened-workflow.yml +++ b/.github/workflows/issue-opened-workflow.yml @@ -14,9 +14,3 @@ jobs: - name: add_assignees run: | curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}' - - - uses: actions/add-to-project@v0.4.0 - name: Add to Project Board - with: - project-url: https://github.com/orgs/actions/projects/12 - github-token: ${{ secrets.CACHE_BOARD_TOKEN }} diff --git a/.github/workflows/pr-opened-workflow.yml b/.github/workflows/pr-opened-workflow.yml index cdd375a..3346d9e 100644 --- a/.github/workflows/pr-opened-workflow.yml +++ b/.github/workflows/pr-opened-workflow.yml @@ -18,9 +18,3 @@ jobs: - name: Add Assignee run: | curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}' - - - uses: actions/add-to-project@v0.4.0 - name: Add to Project Board - with: - project-url: https://github.com/orgs/actions/projects/12 - github-token: ${{ secrets.CACHE_BOARD_TOKEN }} From f7ebb81a3f195b4fb88dab7c14e2f7aff52045aa Mon Sep 17 00:00:00 2001 From: Chad Kimes <1936066+chkimes@users.noreply.github.com> Date: Wed, 9 Aug 2023 10:36:51 -0400 Subject: [PATCH 2/4] Consume latest toolkit and fix dangling promise bug (#1217) * Consume latest toolkit and fix dangling promise bug * Pass earlyExit parameter to run method so tests don't hang * Pass earlyExit parameter to run method so tests don't hang * Refactor restore files to have better patterns for testing * style --- .licenses/npm/@actions/cache.dep.yml | 2 +- .licenses/npm/@actions/http-client.dep.yml | 2 +- __tests__/restore.test.ts | 16 +- __tests__/restoreImpl.test.ts | 30 +-- __tests__/restoreOnly.test.ts | 10 +- dist/restore-only/index.js | 281 +++++++++++++++++---- dist/restore/index.js | 281 +++++++++++++++++---- dist/save-only/index.js | 221 +++++++++++++--- dist/save/index.js | 221 +++++++++++++--- package-lock.json | 30 +-- package.json | 2 +- src/restore.ts | 11 +- src/restoreImpl.ts | 43 +++- src/restoreOnly.ts | 11 +- 14 files changed, 922 insertions(+), 239 deletions(-) diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index 9abc53e..69b7928 100644 --- a/.licenses/npm/@actions/cache.dep.yml +++ b/.licenses/npm/@actions/cache.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/cache" -version: 3.2.1 +version: 3.2.2 type: npm summary: homepage: diff --git a/.licenses/npm/@actions/http-client.dep.yml b/.licenses/npm/@actions/http-client.dep.yml index 5c60ad3..53743c1 100644 --- a/.licenses/npm/@actions/http-client.dep.yml +++ b/.licenses/npm/@actions/http-client.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/http-client" -version: 2.0.1 +version: 2.1.1 type: npm summary: Actions Http Client homepage: https://github.com/actions/toolkit/tree/main/packages/http-client diff --git a/__tests__/restore.test.ts b/__tests__/restore.test.ts index c51c293..250f7ef 100644 --- a/__tests__/restore.test.ts +++ b/__tests__/restore.test.ts @@ -2,7 +2,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import { Events, RefKey } from "../src/constants"; -import run from "../src/restore"; +import { restoreRun } from "../src/restoreImpl"; import * as actionUtils from "../src/utils/actionUtils"; import * as testUtils from "../src/utils/testUtils"; @@ -71,7 +71,7 @@ test("restore with no cache found", async () => { return Promise.resolve(undefined); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => { return Promise.resolve(undefined); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -156,7 +156,7 @@ test("restore with cache found for key", async () => { return Promise.resolve(key); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -201,7 +201,7 @@ test("restore with cache found for restore key", async () => { return Promise.resolve(restoreKey); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -246,7 +246,7 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys return Promise.resolve(undefined); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -289,7 +289,7 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r return Promise.resolve(restoreKey); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -335,7 +335,7 @@ test("restore with fail on cache miss disabled and no cache found", async () => return Promise.resolve(undefined); }); - await run(); + await restoreRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( diff --git a/__tests__/restoreImpl.test.ts b/__tests__/restoreImpl.test.ts index d6f13ba..8bab894 100644 --- a/__tests__/restoreImpl.test.ts +++ b/__tests__/restoreImpl.test.ts @@ -2,7 +2,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import { Events, Inputs, RefKey } from "../src/constants"; -import run from "../src/restoreImpl"; +import { restoreImpl } from "../src/restoreImpl"; import { StateProvider } from "../src/stateProvider"; import * as actionUtils from "../src/utils/actionUtils"; import * as testUtils from "../src/utils/testUtils"; @@ -60,7 +60,7 @@ test("restore with invalid event outputs warning", async () => { const invalidEvent = "commit_comment"; process.env[Events.Key] = invalidEvent; delete process.env[RefKey]; - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(logWarningMock).toHaveBeenCalledWith( `Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.` ); @@ -76,7 +76,7 @@ test("restore without AC available should no-op", async () => { const restoreCacheMock = jest.spyOn(cache, "restoreCache"); const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(0); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); @@ -92,7 +92,7 @@ test("restore on GHES without AC available should no-op", async () => { const restoreCacheMock = jest.spyOn(cache, "restoreCache"); const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(0); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); @@ -119,7 +119,7 @@ test("restore on GHES with AC available ", async () => { return Promise.resolve(key); }); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -143,7 +143,7 @@ test("restore on GHES with AC available ", async () => { test("restore with no path should fail", async () => { const failedMock = jest.spyOn(core, "setFailed"); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(0); // this input isn't necessary for restore b/c tarball contains entries relative to workspace expect(failedMock).not.toHaveBeenCalledWith( @@ -155,7 +155,7 @@ test("restore with no key", async () => { testUtils.setInput(Inputs.Path, "node_modules"); const failedMock = jest.spyOn(core, "setFailed"); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledWith( "Input required and not supplied: key" @@ -174,7 +174,7 @@ test("restore with too many keys should fail", async () => { }); const failedMock = jest.spyOn(core, "setFailed"); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( [path], @@ -200,7 +200,7 @@ test("restore with large key should fail", async () => { }); const failedMock = jest.spyOn(core, "setFailed"); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( [path], @@ -226,7 +226,7 @@ test("restore with invalid key should fail", async () => { }); const failedMock = jest.spyOn(core, "setFailed"); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( [path], @@ -260,7 +260,7 @@ test("restore with no cache found", async () => { return Promise.resolve(undefined); }); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -301,7 +301,7 @@ test("restore with restore keys and no cache found", async () => { return Promise.resolve(undefined); }); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -341,7 +341,7 @@ test("restore with cache found for key", async () => { return Promise.resolve(key); }); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -383,7 +383,7 @@ test("restore with cache found for restore key", async () => { return Promise.resolve(restoreKey); }); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -424,7 +424,7 @@ test("restore with lookup-only set", async () => { return Promise.resolve(key); }); - await run(new StateProvider()); + await restoreImpl(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( diff --git a/__tests__/restoreOnly.test.ts b/__tests__/restoreOnly.test.ts index 800c2e1..81e5bca 100644 --- a/__tests__/restoreOnly.test.ts +++ b/__tests__/restoreOnly.test.ts @@ -2,7 +2,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import { Events, RefKey } from "../src/constants"; -import run from "../src/restoreOnly"; +import { restoreOnlyRun } from "../src/restoreImpl"; import * as actionUtils from "../src/utils/actionUtils"; import * as testUtils from "../src/utils/testUtils"; @@ -72,7 +72,7 @@ test("restore with no cache found", async () => { return Promise.resolve(undefined); }); - await run(); + await restoreOnlyRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => { return Promise.resolve(undefined); }); - await run(); + await restoreOnlyRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -153,7 +153,7 @@ test("restore with cache found for key", async () => { return Promise.resolve(key); }); - await run(); + await restoreOnlyRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( @@ -196,7 +196,7 @@ test("restore with cache found for restore key", async () => { return Promise.resolve(restoreKey); }); - await run(); + await restoreOnlyRun(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); expect(restoreCacheMock).toHaveBeenCalledWith( diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index b91b307..c536263 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -1127,35 +1127,42 @@ function getArchiveFileSizeInBytes(filePath) { } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { - var e_1, _a; - var _b; + var _a, e_1, _b, _c; + var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - const relativeFile = path - .relative(workspace, file) - .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); - core.debug(`Matched: ${relativeFile}`); - // Paths are made relative so the tar entries are all relative to the root of the workspace. - if (relativeFile === '') { - // path.relative returns empty string if workspace and file are equal - paths.push('.'); + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { + _c = _g.value; + _e = false; + try { + const file = _c; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } } - else { - paths.push(`${relativeFile}`); + finally { + _e = true; } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } @@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); + return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { @@ -3450,13 +3454,21 @@ function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + if (downloadOptions.useAzureSdk) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } + else if (downloadOptions.concurrentBlobDownloads) { + // Use concurrent implementation with HttpClient to work around blob SDK issue + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } } else { - // Otherwise, download using the Actions http-client. yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); @@ -3489,9 +3501,7 @@ function getContentRange(start, end) { } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) @@ -4866,7 +4876,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; @@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) { .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} //# sourceMappingURL=proxy.js.map /***/ }), @@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__webpack_require__(470)); const http_client_1 = __webpack_require__(425); const storage_blob_1 = __webpack_require__(373); @@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. @@ -35837,6 +35977,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -36901,28 +37054,9 @@ exports.default = { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const restoreImpl_1 = __importDefault(__webpack_require__(835)); -const stateProvider_1 = __webpack_require__(309); -function run() { - return __awaiter(this, void 0, void 0, function* () { - yield (0, restoreImpl_1.default)(new stateProvider_1.NullStateProvider()); - }); -} -run(); -exports.default = run; +const restoreImpl_1 = __webpack_require__(835); +(0, restoreImpl_1.restoreOnlyRun)(true); /***/ }), @@ -40310,7 +40444,8 @@ exports.getUploadOptions = getUploadOptions; */ function getDownloadOptions(copy) { const result = { - useAzureSdk: true, + useAzureSdk: false, + concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, @@ -40320,6 +40455,9 @@ function getDownloadOptions(copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } + if (typeof copy.concurrentBlobDownloads === 'boolean') { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } @@ -49096,9 +49234,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); +exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0; const cache = __importStar(__webpack_require__(692)); const core = __importStar(__webpack_require__(470)); const constants_1 = __webpack_require__(694); +const stateProvider_1 = __webpack_require__(309); const utils = __importStar(__webpack_require__(360)); function restoreImpl(stateProvider) { return __awaiter(this, void 0, void 0, function* () { @@ -49149,7 +49289,40 @@ function restoreImpl(stateProvider) { } }); } -exports.default = restoreImpl; +exports.restoreImpl = restoreImpl; +function run(stateProvider, earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield restoreImpl(stateProvider); + } + catch (err) { + console.error(err); + if (earlyExit) { + process.exit(1); + } + } + // node will stay alive if any promises are not resolved, + // which is a possibility if HTTP requests are dangling + // due to retries or timeouts. We know that if we got here + // that all promises that we care about have successfully + // resolved, so simply exit with success. + if (earlyExit) { + process.exit(0); + } + }); +} +function restoreOnlyRun(earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + yield run(new stateProvider_1.NullStateProvider(), earlyExit); + }); +} +exports.restoreOnlyRun = restoreOnlyRun; +function restoreRun(earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + yield run(new stateProvider_1.StateProvider(), earlyExit); + }); +} +exports.restoreRun = restoreRun; /***/ }), diff --git a/dist/restore/index.js b/dist/restore/index.js index 3a855e4..d88eb2d 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1127,35 +1127,42 @@ function getArchiveFileSizeInBytes(filePath) { } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { - var e_1, _a; - var _b; + var _a, e_1, _b, _c; + var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - const relativeFile = path - .relative(workspace, file) - .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); - core.debug(`Matched: ${relativeFile}`); - // Paths are made relative so the tar entries are all relative to the root of the workspace. - if (relativeFile === '') { - // path.relative returns empty string if workspace and file are equal - paths.push('.'); + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { + _c = _g.value; + _e = false; + try { + const file = _c; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } } - else { - paths.push(`${relativeFile}`); + finally { + _e = true; } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } @@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); + return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { @@ -3450,13 +3454,21 @@ function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + if (downloadOptions.useAzureSdk) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } + else if (downloadOptions.concurrentBlobDownloads) { + // Use concurrent implementation with HttpClient to work around blob SDK issue + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } } else { - // Otherwise, download using the Actions http-client. yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); @@ -3489,9 +3501,7 @@ function getContentRange(start, end) { } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) @@ -4866,7 +4876,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; @@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) { .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} //# sourceMappingURL=proxy.js.map /***/ }), @@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__webpack_require__(470)); const http_client_1 = __webpack_require__(425); const storage_blob_1 = __webpack_require__(373); @@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. @@ -35745,6 +35885,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -40281,7 +40434,8 @@ exports.getUploadOptions = getUploadOptions; */ function getDownloadOptions(copy) { const result = { - useAzureSdk: true, + useAzureSdk: false, + concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, @@ -40291,6 +40445,9 @@ function getDownloadOptions(copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } + if (typeof copy.concurrentBlobDownloads === 'boolean') { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } @@ -47453,28 +47610,9 @@ module.exports = function(dst, src) { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const restoreImpl_1 = __importDefault(__webpack_require__(835)); -const stateProvider_1 = __webpack_require__(309); -function run() { - return __awaiter(this, void 0, void 0, function* () { - yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider()); - }); -} -run(); -exports.default = run; +const restoreImpl_1 = __webpack_require__(835); +(0, restoreImpl_1.restoreRun)(true); /***/ }), @@ -49096,9 +49234,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); +exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0; const cache = __importStar(__webpack_require__(692)); const core = __importStar(__webpack_require__(470)); const constants_1 = __webpack_require__(694); +const stateProvider_1 = __webpack_require__(309); const utils = __importStar(__webpack_require__(443)); function restoreImpl(stateProvider) { return __awaiter(this, void 0, void 0, function* () { @@ -49149,7 +49289,40 @@ function restoreImpl(stateProvider) { } }); } -exports.default = restoreImpl; +exports.restoreImpl = restoreImpl; +function run(stateProvider, earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield restoreImpl(stateProvider); + } + catch (err) { + console.error(err); + if (earlyExit) { + process.exit(1); + } + } + // node will stay alive if any promises are not resolved, + // which is a possibility if HTTP requests are dangling + // due to retries or timeouts. We know that if we got here + // that all promises that we care about have successfully + // resolved, so simply exit with success. + if (earlyExit) { + process.exit(0); + } + }); +} +function restoreOnlyRun(earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + yield run(new stateProvider_1.NullStateProvider(), earlyExit); + }); +} +exports.restoreOnlyRun = restoreOnlyRun; +function restoreRun(earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + yield run(new stateProvider_1.StateProvider(), earlyExit); + }); +} +exports.restoreRun = restoreRun; /***/ }), diff --git a/dist/save-only/index.js b/dist/save-only/index.js index 9c0599f..2f50001 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -1183,35 +1183,42 @@ function getArchiveFileSizeInBytes(filePath) { } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { - var e_1, _a; - var _b; + var _a, e_1, _b, _c; + var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - const relativeFile = path - .relative(workspace, file) - .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); - core.debug(`Matched: ${relativeFile}`); - // Paths are made relative so the tar entries are all relative to the root of the workspace. - if (relativeFile === '') { - // path.relative returns empty string if workspace and file are equal - paths.push('.'); + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { + _c = _g.value; + _e = false; + try { + const file = _c; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } } - else { - paths.push(`${relativeFile}`); + finally { + _e = true; } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } @@ -3450,10 +3457,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); + return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { @@ -3506,13 +3510,21 @@ function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + if (downloadOptions.useAzureSdk) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } + else if (downloadOptions.concurrentBlobDownloads) { + // Use concurrent implementation with HttpClient to work around blob SDK issue + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } } else { - // Otherwise, download using the Actions http-client. yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); @@ -3545,9 +3557,7 @@ function getContentRange(start, end) { } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) @@ -4922,7 +4932,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -4933,6 +4949,10 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; @@ -4958,13 +4978,24 @@ function checkBypass(reqUrl) { .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} //# sourceMappingURL=proxy.js.map /***/ }), @@ -5613,7 +5644,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__webpack_require__(470)); const http_client_1 = __webpack_require__(425); const storage_blob_1 = __webpack_require__(373); @@ -5770,6 +5801,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. @@ -35796,6 +35936,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -40422,7 +40575,8 @@ exports.getUploadOptions = getUploadOptions; */ function getDownloadOptions(copy) { const result = { - useAzureSdk: true, + useAzureSdk: false, + concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, @@ -40432,6 +40586,9 @@ function getDownloadOptions(copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } + if (typeof copy.concurrentBlobDownloads === 'boolean') { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } diff --git a/dist/save/index.js b/dist/save/index.js index becfe13..59a4459 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1127,35 +1127,42 @@ function getArchiveFileSizeInBytes(filePath) { } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { - var e_1, _a; - var _b; + var _a, e_1, _b, _c; + var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - const relativeFile = path - .relative(workspace, file) - .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); - core.debug(`Matched: ${relativeFile}`); - // Paths are made relative so the tar entries are all relative to the root of the workspace. - if (relativeFile === '') { - // path.relative returns empty string if workspace and file are equal - paths.push('.'); + for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { + _c = _g.value; + _e = false; + try { + const file = _c; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } } - else { - paths.push(`${relativeFile}`); + finally { + _e = true; } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } @@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); + return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { @@ -3450,13 +3454,21 @@ function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + if (downloadOptions.useAzureSdk) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } + else if (downloadOptions.concurrentBlobDownloads) { + // Use concurrent implementation with HttpClient to work around blob SDK issue + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } } else { - // Otherwise, download using the Actions http-client. yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); @@ -3489,9 +3501,7 @@ function getContentRange(start, end) { } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) @@ -4866,7 +4876,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; @@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) { .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} //# sourceMappingURL=proxy.js.map /***/ }), @@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__webpack_require__(470)); const http_client_1 = __webpack_require__(425); const storage_blob_1 = __webpack_require__(373); @@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. @@ -35740,6 +35880,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -40366,7 +40519,8 @@ exports.getUploadOptions = getUploadOptions; */ function getDownloadOptions(copy) { const result = { - useAzureSdk: true, + useAzureSdk: false, + concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, @@ -40376,6 +40530,9 @@ function getDownloadOptions(copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } + if (typeof copy.concurrentBlobDownloads === 'boolean') { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } diff --git a/package-lock.json b/package-lock.json index c7f83ee..050ff43 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "3.3.1", "license": "MIT", "dependencies": { - "@actions/cache": "^3.2.1", + "@actions/cache": "^3.2.2", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,14 +36,14 @@ } }, "node_modules/@actions/cache": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", - "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz", + "integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", - "@actions/http-client": "^2.0.1", + "@actions/http-client": "^2.1.1", "@actions/io": "^1.0.1", "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", @@ -87,9 +87,9 @@ } }, "node_modules/@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz", + "integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==", "dependencies": { "tunnel": "^0.0.6" } @@ -9707,14 +9707,14 @@ }, "dependencies": { "@actions/cache": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", - "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz", + "integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", - "@actions/http-client": "^2.0.1", + "@actions/http-client": "^2.1.1", "@actions/io": "^1.0.1", "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", @@ -9757,9 +9757,9 @@ } }, "@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz", + "integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==", "requires": { "tunnel": "^0.0.6" } diff --git a/package.json b/package.json index 83fb161..1dcf874 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^3.2.1", + "@actions/cache": "^3.2.2", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" diff --git a/src/restore.ts b/src/restore.ts index 96a527a..4c53c55 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -1,10 +1,3 @@ -import restoreImpl from "./restoreImpl"; -import { StateProvider } from "./stateProvider"; +import { restoreRun } from "./restoreImpl"; -async function run(): Promise { - await restoreImpl(new StateProvider()); -} - -run(); - -export default run; +restoreRun(true); diff --git a/src/restoreImpl.ts b/src/restoreImpl.ts index 797bc74..0aff57a 100644 --- a/src/restoreImpl.ts +++ b/src/restoreImpl.ts @@ -2,10 +2,14 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import { Events, Inputs, Outputs, State } from "./constants"; -import { IStateProvider } from "./stateProvider"; +import { + IStateProvider, + NullStateProvider, + StateProvider +} from "./stateProvider"; import * as utils from "./utils/actionUtils"; -async function restoreImpl( +export async function restoreImpl( stateProvider: IStateProvider ): Promise { try { @@ -82,4 +86,37 @@ async function restoreImpl( } } -export default restoreImpl; +async function run( + stateProvider: IStateProvider, + earlyExit: boolean | undefined +): Promise { + try { + await restoreImpl(stateProvider); + } catch (err) { + console.error(err); + if (earlyExit) { + process.exit(1); + } + } + + // node will stay alive if any promises are not resolved, + // which is a possibility if HTTP requests are dangling + // due to retries or timeouts. We know that if we got here + // that all promises that we care about have successfully + // resolved, so simply exit with success. + if (earlyExit) { + process.exit(0); + } +} + +export async function restoreOnlyRun( + earlyExit?: boolean | undefined +): Promise { + await run(new NullStateProvider(), earlyExit); +} + +export async function restoreRun( + earlyExit?: boolean | undefined +): Promise { + await run(new StateProvider(), earlyExit); +} diff --git a/src/restoreOnly.ts b/src/restoreOnly.ts index af93e6d..c773a4c 100644 --- a/src/restoreOnly.ts +++ b/src/restoreOnly.ts @@ -1,10 +1,3 @@ -import restoreImpl from "./restoreImpl"; -import { NullStateProvider } from "./stateProvider"; +import { restoreOnlyRun } from "./restoreImpl"; -async function run(): Promise { - await restoreImpl(new NullStateProvider()); -} - -run(); - -export default run; +restoreOnlyRun(true); From 667d8fdfa213683cee9fcaad43be0f25322bef02 Mon Sep 17 00:00:00 2001 From: bethanyj28 Date: Wed, 6 Sep 2023 14:15:33 -0400 Subject: [PATCH 3/4] bump action version to 3.3.2 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 050ff43..5179d9b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "cache", - "version": "3.3.1", + "version": "3.3.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.3.1", + "version": "3.3.2", "license": "MIT", "dependencies": { "@actions/cache": "^3.2.2", diff --git a/package.json b/package.json index 1dcf874..daa7b26 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.3.1", + "version": "3.3.2", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", From 17e2888746ae4cdbac78cbb1a45a157d310c0e53 Mon Sep 17 00:00:00 2001 From: bethanyj28 Date: Wed, 6 Sep 2023 14:41:49 -0400 Subject: [PATCH 4/4] Add to RELEASES.md --- RELEASES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/RELEASES.md b/RELEASES.md index d4016e9..5efe301 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -107,3 +107,7 @@ ### 3.3.1 - Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck. + +### 3.3.2 + +- Fixes bug with Azure SDK causing blob downloads to get stuck.