From 940f3d7cf195ba83374c77632d1e2cbb2f24ae68 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 9 Mar 2023 13:30:28 +0100 Subject: [PATCH] Add `lookup-only` option (#1041) * Add new actions/cache version (with dryRun support) * Add dry-run option * Changes after rebase * Update readme * Rename option to lookup-only * Update test name * Update package.json + changelog * Update README * Update custom package version * Update custom package version * Update @actions/cache to 3.2.0 * Code review * Update log statement * Move test case --------- Co-authored-by: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> --- .licenses/npm/@actions/cache.dep.yml | 2 +- README.md | 2 + RELEASES.md | 3 + __tests__/restore.test.ts | 40 ++++++++-- __tests__/restoreImpl.test.ts | 107 +++++++++++++++++++++++++-- __tests__/restoreOnly.test.ts | 28 ++++++- action.yml | 4 + dist/restore-only/index.js | 24 +++++- dist/restore/index.js | 24 +++++- dist/save-only/index.js | 14 +++- dist/save/index.js | 14 +++- package-lock.json | 18 ++--- package.json | 4 +- restore/README.md | 3 +- restore/action.yml | 4 + src/constants.ts | 3 +- src/restoreImpl.ts | 9 ++- src/utils/testUtils.ts | 4 + 18 files changed, 260 insertions(+), 47 deletions(-) diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index ddbc316..bde674f 100644 --- a/.licenses/npm/@actions/cache.dep.yml +++ b/.licenses/npm/@actions/cache.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/cache" -version: 3.1.4 +version: 3.2.0 type: npm summary: homepage: diff --git a/README.md b/README.md index b509076..9852657 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,7 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac * Support cross-os caching as an opt-in feature. See [Cross OS caching](./tips-and-workarounds.md#cross-os-cache) for more info. * Added option to fail job on cache miss. See [Exit workflow on cache miss](./restore/README.md#exit-workflow-on-cache-miss) for more info. * Fix zstd not being used after zstd version upgrade to 1.5.4 on hosted runners +* Added option to lookup cache without downloading it. See the [v2 README.md](https://github.com/actions/cache/blob/v2/README.md) for older updates. @@ -52,6 +53,7 @@ If you are using a `self-hosted` Windows runner, `GNU tar` and `zstd` are requir * `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key. * `enableCrossOsArchive` - An optional boolean when enabled, allows Windows runners to save or restore caches that can be restored or saved respectively on other platforms. Default: `false` * `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false` +* `lookup-only` - Skip downloading cache. Only check if cache entry exists. Default: `false` #### Environment Variables diff --git a/RELEASES.md b/RELEASES.md index a06ec0f..8239921 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -76,3 +76,6 @@ ### 3.2.6 - Fix zstd not being used after zstd version upgrade to 1.5.4 on hosted runners. + +### 3.3.0 +- Added option to lookup cache without downloading it. diff --git a/__tests__/restore.test.ts b/__tests__/restore.test.ts index 5d7eaab..c51c293 100644 --- a/__tests__/restore.test.ts +++ b/__tests__/restore.test.ts @@ -74,7 +74,15 @@ test("restore with no cache found", async () => { await run(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledTimes(1); @@ -113,7 +121,9 @@ test("restore with restore keys and no cache found", async () => { [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -149,7 +159,15 @@ test("restore with cache found for key", async () => { await run(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key); @@ -190,7 +208,9 @@ test("restore with cache found for restore key", async () => { [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -233,7 +253,9 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -274,7 +296,9 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -318,7 +342,9 @@ test("restore with fail on cache miss disabled and no cache found", async () => [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); diff --git a/__tests__/restoreImpl.test.ts b/__tests__/restoreImpl.test.ts index 9bc4fc3..d6f13ba 100644 --- a/__tests__/restoreImpl.test.ts +++ b/__tests__/restoreImpl.test.ts @@ -122,7 +122,15 @@ test("restore on GHES with AC available ", async () => { await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); @@ -172,7 +180,9 @@ test("restore with too many keys should fail", async () => { [path], key, restoreKeys, - {}, + { + lookupOnly: false + }, false ); expect(failedMock).toHaveBeenCalledWith( @@ -192,7 +202,15 @@ test("restore with large key should fail", async () => { const restoreCacheMock = jest.spyOn(cache, "restoreCache"); await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(failedMock).toHaveBeenCalledWith( `Key Validation Error: ${key} cannot be larger than 512 characters.` ); @@ -210,7 +228,15 @@ test("restore with invalid key should fail", async () => { const restoreCacheMock = jest.spyOn(cache, "restoreCache"); await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(failedMock).toHaveBeenCalledWith( `Key Validation Error: ${key} cannot contain commas.` ); @@ -237,7 +263,15 @@ test("restore with no cache found", async () => { await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(failedMock).toHaveBeenCalledTimes(0); @@ -274,7 +308,9 @@ test("restore with restore keys and no cache found", async () => { [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -308,7 +344,15 @@ test("restore with cache found for key", async () => { await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); @@ -346,7 +390,9 @@ test("restore with cache found for restore key", async () => { [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -358,3 +404,48 @@ test("restore with cache found for restore key", async () => { ); expect(failedMock).toHaveBeenCalledTimes(0); }); + +test("restore with lookup-only set", async () => { + const path = "node_modules"; + const key = "node-test"; + testUtils.setInputs({ + path: path, + key, + lookupOnly: true + }); + + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + return Promise.resolve(key); + }); + + await run(new StateProvider()); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: true + }, + false + ); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key); + expect(stateMock).toHaveBeenCalledTimes(2); + + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true"); + + expect(infoMock).toHaveBeenCalledWith( + `Cache found and can be restored from key: ${key}` + ); + expect(failedMock).toHaveBeenCalledTimes(0); +}); diff --git a/__tests__/restoreOnly.test.ts b/__tests__/restoreOnly.test.ts index ab69914..800c2e1 100644 --- a/__tests__/restoreOnly.test.ts +++ b/__tests__/restoreOnly.test.ts @@ -75,7 +75,15 @@ test("restore with no cache found", async () => { await run(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key); expect(outputMock).toHaveBeenCalledTimes(1); @@ -113,7 +121,9 @@ test("restore with restore keys and no cache found", async () => { [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); @@ -146,7 +156,15 @@ test("restore with cache found for key", async () => { await run(); expect(restoreCacheMock).toHaveBeenCalledTimes(1); - expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false); + expect(restoreCacheMock).toHaveBeenCalledWith( + [path], + key, + [], + { + lookupOnly: false + }, + false + ); expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key); expect(outputMock).toHaveBeenCalledWith("cache-hit", "true"); @@ -185,7 +203,9 @@ test("restore with cache found for restore key", async () => { [path], key, [restoreKey], - {}, + { + lookupOnly: false + }, false ); diff --git a/action.yml b/action.yml index 7afb9c0..5c6fa87 100644 --- a/action.yml +++ b/action.yml @@ -22,6 +22,10 @@ inputs: description: 'Fail the workflow if cache entry is not found' default: 'false' required: false + lookup-only: + description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache' + default: 'false' + required: false outputs: cache-hit: description: 'A boolean value to indicate an exact match was found for the primary key' diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index 0949366..883dfbc 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -4975,7 +4975,8 @@ var Inputs; Inputs["RestoreKeys"] = "restore-keys"; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; - Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action + Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; + Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action })(Inputs = exports.Inputs || (exports.Inputs = {})); var Outputs; (function (Outputs) { @@ -41806,7 +41807,8 @@ function getDownloadOptions(copy) { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, - segmentTimeoutInMs: 3600000 + segmentTimeoutInMs: 3600000, + lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -41821,6 +41823,9 @@ function getDownloadOptions(copy) { if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && @@ -41833,6 +41838,7 @@ function getDownloadOptions(copy) { core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; @@ -47281,6 +47287,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch // Cache not found return undefined; } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry @@ -50494,7 +50504,8 @@ function restoreImpl(stateProvider) { }); const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive); const failOnCacheMiss = utils.getInputAsBool(constants_1.Inputs.FailOnCacheMiss); - const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive); + const lookupOnly = utils.getInputAsBool(constants_1.Inputs.LookupOnly); + const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, { lookupOnly: lookupOnly }, enableCrossOsArchive); if (!cacheKey) { if (failOnCacheMiss) { throw new Error(`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`); @@ -50509,7 +50520,12 @@ function restoreImpl(stateProvider) { stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey); const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey); core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString()); - core.info(`Cache restored from key: ${cacheKey}`); + if (lookupOnly) { + core.info(`Cache found and can be restored from key: ${cacheKey}`); + } + else { + core.info(`Cache restored from key: ${cacheKey}`); + } return cacheKey; } catch (error) { diff --git a/dist/restore/index.js b/dist/restore/index.js index 6767d7b..2fe23bc 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -4975,7 +4975,8 @@ var Inputs; Inputs["RestoreKeys"] = "restore-keys"; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; - Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action + Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; + Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action })(Inputs = exports.Inputs || (exports.Inputs = {})); var Outputs; (function (Outputs) { @@ -41777,7 +41778,8 @@ function getDownloadOptions(copy) { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, - segmentTimeoutInMs: 3600000 + segmentTimeoutInMs: 3600000, + lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -41792,6 +41794,9 @@ function getDownloadOptions(copy) { if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && @@ -41804,6 +41809,7 @@ function getDownloadOptions(copy) { core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; @@ -47252,6 +47258,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch // Cache not found return undefined; } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry @@ -50494,7 +50504,8 @@ function restoreImpl(stateProvider) { }); const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive); const failOnCacheMiss = utils.getInputAsBool(constants_1.Inputs.FailOnCacheMiss); - const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive); + const lookupOnly = utils.getInputAsBool(constants_1.Inputs.LookupOnly); + const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, { lookupOnly: lookupOnly }, enableCrossOsArchive); if (!cacheKey) { if (failOnCacheMiss) { throw new Error(`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`); @@ -50509,7 +50520,12 @@ function restoreImpl(stateProvider) { stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey); const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey); core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString()); - core.info(`Cache restored from key: ${cacheKey}`); + if (lookupOnly) { + core.info(`Cache found and can be restored from key: ${cacheKey}`); + } + else { + core.info(`Cache restored from key: ${cacheKey}`); + } return cacheKey; } catch (error) { diff --git a/dist/save-only/index.js b/dist/save-only/index.js index 139d8ba..2330a76 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -5031,7 +5031,8 @@ var Inputs; Inputs["RestoreKeys"] = "restore-keys"; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; - Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action + Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; + Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action })(Inputs = exports.Inputs || (exports.Inputs = {})); var Outputs; (function (Outputs) { @@ -41918,7 +41919,8 @@ function getDownloadOptions(copy) { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, - segmentTimeoutInMs: 3600000 + segmentTimeoutInMs: 3600000, + lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -41933,6 +41935,9 @@ function getDownloadOptions(copy) { if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && @@ -41945,6 +41950,7 @@ function getDownloadOptions(copy) { core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; @@ -47393,6 +47399,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch // Cache not found return undefined; } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry diff --git a/dist/save/index.js b/dist/save/index.js index cd23f84..9685ccb 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -4975,7 +4975,8 @@ var Inputs; Inputs["RestoreKeys"] = "restore-keys"; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; - Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action + Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; + Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action })(Inputs = exports.Inputs || (exports.Inputs = {})); var Outputs; (function (Outputs) { @@ -41862,7 +41863,8 @@ function getDownloadOptions(copy) { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, - segmentTimeoutInMs: 3600000 + segmentTimeoutInMs: 3600000, + lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -41877,6 +41879,9 @@ function getDownloadOptions(copy) { if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && @@ -41889,6 +41894,7 @@ function getDownloadOptions(copy) { core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; @@ -47366,6 +47372,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch // Cache not found return undefined; } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry diff --git a/package-lock.json b/package-lock.json index 12ccc83..7751fc2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "cache", - "version": "3.2.6", + "version": "3.3.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.2.6", + "version": "3.3.0", "license": "MIT", "dependencies": { - "@actions/cache": "^3.1.4", + "@actions/cache": "^3.2.0", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,9 +36,9 @@ } }, "node_modules/@actions/cache": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.4.tgz", - "integrity": "sha512-Uh9wsz7SxunfyqF3UY/wfHI81z97CYQrZs4NU+whzYd0N8emTaloB+XtrAq46X2RbQEOBjF6R090jKQpX4coGg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.0.tgz", + "integrity": "sha512-bCjN0+gPLaZZbpOoeK/1ve7J5MO+zv8FpcdKOWF3Tb9to0bWDpvgn9D2c/lC22oPUtHnCWQhLNVMfsWF4OBhNw==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", @@ -9722,9 +9722,9 @@ }, "dependencies": { "@actions/cache": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.4.tgz", - "integrity": "sha512-Uh9wsz7SxunfyqF3UY/wfHI81z97CYQrZs4NU+whzYd0N8emTaloB+XtrAq46X2RbQEOBjF6R090jKQpX4coGg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.0.tgz", + "integrity": "sha512-bCjN0+gPLaZZbpOoeK/1ve7J5MO+zv8FpcdKOWF3Tb9to0bWDpvgn9D2c/lC22oPUtHnCWQhLNVMfsWF4OBhNw==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", diff --git a/package.json b/package.json index 89c0c3b..2ace307 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.2.6", + "version": "3.3.0", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^3.1.4", + "@actions/cache": "^3.2.0", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" diff --git a/restore/README.md b/restore/README.md index 168835c..6571aef 100644 --- a/restore/README.md +++ b/restore/README.md @@ -9,7 +9,8 @@ The restore action restores a cache. It works similarly to the `cache` action ex * `key` - An explicit key for a cache entry. See [creating a cache key](../README.md#creating-a-cache-key). * `path` - A list of files, directories, and wildcard patterns to restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns. * `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key. -* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: false +* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false` +* `lookup-only` - Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache. Default: `false` ### Outputs diff --git a/restore/action.yml b/restore/action.yml index 5de91f2..21be5f0 100644 --- a/restore/action.yml +++ b/restore/action.yml @@ -19,6 +19,10 @@ inputs: description: 'Fail the workflow if cache entry is not found' default: 'false' required: false + lookup-only: + description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache' + default: 'false' + required: false outputs: cache-hit: description: 'A boolean value to indicate an exact match was found for the primary key' diff --git a/src/constants.ts b/src/constants.ts index 4de3845..0158ae0 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -4,7 +4,8 @@ export enum Inputs { RestoreKeys = "restore-keys", // Input for cache, restore action UploadChunkSize = "upload-chunk-size", // Input for cache, save action EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action - FailOnCacheMiss = "fail-on-cache-miss" // Input for cache, restore action + FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action + LookupOnly = "lookup-only" // Input for cache, restore action } export enum Outputs { diff --git a/src/restoreImpl.ts b/src/restoreImpl.ts index 3ae1dbd..797bc74 100644 --- a/src/restoreImpl.ts +++ b/src/restoreImpl.ts @@ -35,12 +35,13 @@ async function restoreImpl( Inputs.EnableCrossOsArchive ); const failOnCacheMiss = utils.getInputAsBool(Inputs.FailOnCacheMiss); + const lookupOnly = utils.getInputAsBool(Inputs.LookupOnly); const cacheKey = await cache.restoreCache( cachePaths, primaryKey, restoreKeys, - {}, + { lookupOnly: lookupOnly }, enableCrossOsArchive ); @@ -69,7 +70,11 @@ async function restoreImpl( ); core.setOutput(Outputs.CacheHit, isExactKeyMatch.toString()); - core.info(`Cache restored from key: ${cacheKey}`); + if (lookupOnly) { + core.info(`Cache found and can be restored from key: ${cacheKey}`); + } else { + core.info(`Cache restored from key: ${cacheKey}`); + } return cacheKey; } catch (error: unknown) { diff --git a/src/utils/testUtils.ts b/src/utils/testUtils.ts index 18447c0..ba0670b 100644 --- a/src/utils/testUtils.ts +++ b/src/utils/testUtils.ts @@ -15,6 +15,7 @@ interface CacheInput { restoreKeys?: string[]; enableCrossOsArchive?: boolean; failOnCacheMiss?: boolean; + lookupOnly?: boolean; } export function setInputs(input: CacheInput): void { @@ -29,6 +30,8 @@ export function setInputs(input: CacheInput): void { ); input.failOnCacheMiss !== undefined && setInput(Inputs.FailOnCacheMiss, input.failOnCacheMiss.toString()); + input.lookupOnly !== undefined && + setInput(Inputs.LookupOnly, input.lookupOnly.toString()); } export function clearInputs(): void { @@ -38,4 +41,5 @@ export function clearInputs(): void { delete process.env[getInputName(Inputs.UploadChunkSize)]; delete process.env[getInputName(Inputs.EnableCrossOsArchive)]; delete process.env[getInputName(Inputs.FailOnCacheMiss)]; + delete process.env[getInputName(Inputs.LookupOnly)]; }