mirror of
https://code.forgejo.org/actions/cache.git
synced 2025-04-19 19:46:17 +02:00
Enhancement: Allow usage when GITHUB_REF or ACTIONS_CACHE_REF are defined
This commit is contained in:
parent
16a133d9a7
commit
77fd223211
7 changed files with 792 additions and 638 deletions
|
@ -4,7 +4,7 @@ import { promises as fs } from "fs";
|
|||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import { Events, Outputs, RefKey, State } from "../src/constants";
|
||||
import { Events, Outputs, RefKeys, State } from "../src/constants";
|
||||
import { ArtifactCacheEntry } from "../src/contracts";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
|
||||
|
@ -19,7 +19,8 @@ function getTempDir(): string {
|
|||
|
||||
afterEach(() => {
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
|
||||
RefKeys.forEach(refKey => delete process.env[refKey]);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -326,16 +327,23 @@ test("resolvePaths exclusion pattern returns not found", async () => {
|
|||
}
|
||||
});
|
||||
|
||||
test("isValidEvent returns true for event that has a ref", () => {
|
||||
const event = Events.Push;
|
||||
process.env[Events.Key] = event;
|
||||
process.env[RefKey] = "ref/heads/feature";
|
||||
|
||||
const isValidEvent = actionUtils.isValidEvent();
|
||||
|
||||
expect(isValidEvent).toBe(true);
|
||||
const refKeySet = RefKeys.map(refKey => {
|
||||
return [refKey];
|
||||
});
|
||||
|
||||
test.each(refKeySet)(
|
||||
"isValidEvent returns true for event that has a ref",
|
||||
refKey => {
|
||||
const event = Events.Push;
|
||||
process.env[Events.Key] = event;
|
||||
process.env[refKey] = "ref/heads/feature";
|
||||
|
||||
const isValidEvent = actionUtils.isValidEvent();
|
||||
|
||||
expect(isValidEvent).toBe(true);
|
||||
}
|
||||
);
|
||||
|
||||
test("unlinkFile unlinks file", async () => {
|
||||
const testDirectory = await fs.mkdtemp("unlinkFileTest");
|
||||
const testFile = path.join(testDirectory, "test.txt");
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
CompressionMethod,
|
||||
Events,
|
||||
Inputs,
|
||||
RefKey
|
||||
RefKeys
|
||||
} from "../src/constants";
|
||||
import { ArtifactCacheEntry } from "../src/contracts";
|
||||
import run from "../src/restore";
|
||||
|
@ -40,13 +40,17 @@ beforeAll(() => {
|
|||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
|
||||
RefKeys.forEach(refKey => delete process.env[refKey]);
|
||||
});
|
||||
|
||||
const refKeySet = RefKeys.map(refKey => {
|
||||
return [refKey, `refs/heads/feature/${refKey.toLowerCase()}`];
|
||||
});
|
||||
|
||||
test("restore with invalid event outputs warning", async () => {
|
||||
|
@ -54,7 +58,6 @@ test("restore with invalid event outputs warning", async () => {
|
|||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
|
@ -62,16 +65,23 @@ test("restore with invalid event outputs warning", async () => {
|
|||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
});
|
||||
test.each(refKeySet)(
|
||||
"restore with no path should fail",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)("restore with no key", async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
test("restore with no key", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
|
@ -80,48 +90,65 @@ test("restore with no key", async () => {
|
|||
);
|
||||
});
|
||||
|
||||
test("restore with too many keys should fail", async () => {
|
||||
const key = "node-test";
|
||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
restoreKeys
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
});
|
||||
test.each(refKeySet)(
|
||||
"restore with too many keys should fail",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
test("restore with large key should fail", async () => {
|
||||
const key = "foo".repeat(512); // Over the 512 character limit
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
});
|
||||
const key = "node-test";
|
||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
restoreKeys
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test("restore with invalid key should fail", async () => {
|
||||
const key = "comma,comma";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
});
|
||||
test.each(refKeySet)(
|
||||
"restore with large key should fail",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const key = "foo".repeat(512); // Over the 512 character limit
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)(
|
||||
"restore with invalid key should fail",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const key = "comma,comma";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)("restore with no cache found", async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
|
@ -147,287 +174,330 @@ test("restore with no cache found", async () => {
|
|||
);
|
||||
});
|
||||
|
||||
test("restore with server error should fail", async () => {
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
test.each(refKeySet)(
|
||||
"restore with server error should fail",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
|
||||
await run();
|
||||
const setCacheHitOutputMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"setCacheHitOutput"
|
||||
);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
await run();
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
|
||||
test("restore with restore keys and no cache found", async () => {
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
}
|
||||
);
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
test.each(refKeySet)(
|
||||
"restore with restore keys and no cache found",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
|
||||
await run();
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache not found for input keys: ${key}, ${restoreKey}`
|
||||
);
|
||||
});
|
||||
await run();
|
||||
|
||||
test("restore with gzip compressed cache found", async () => {
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache not found for input keys: ${key}, ${restoreKey}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
test.each(refKeySet)(
|
||||
"restore with gzip compressed cache found",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Gzip);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const fileSize = 142;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
|
||||
const compression = CompressionMethod.Gzip;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
const archivePath = path.join(tempPath, CacheFilename.Gzip);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
|
||||
await run();
|
||||
const fileSize = 142;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
||||
const setCacheHitOutputMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"setCacheHitOutput"
|
||||
);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
const compression = CompressionMethod.Gzip;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||
await run();
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
|
||||
test("restore with a pull request event and zstd compressed cache found", async () => {
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
process.env[Events.Key] = Events.PullRequest;
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${key}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
test.each(refKeySet)(
|
||||
"restore with a pull request event and zstd compressed cache found",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
process.env[Events.Key] = Events.PullRequest;
|
||||
|
||||
const fileSize = 62915000;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
|
||||
await run();
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
const fileSize = 62915000;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const setCacheHitOutputMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"setCacheHitOutput"
|
||||
);
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
await run();
|
||||
|
||||
test("restore with cache found for restore key", async () => {
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache Size: ~60 MB (62915000 B)`
|
||||
);
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: restoreKey,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${key}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
test.each(refKeySet)(
|
||||
"restore with cache found for restore key",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const fileSize = 142;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
await run();
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: restoreKey,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
const fileSize = 142;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${restoreKey}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const setCacheHitOutputMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"setCacheHitOutput"
|
||||
);
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${restoreKey}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
CompressionMethod,
|
||||
Events,
|
||||
Inputs,
|
||||
RefKey
|
||||
RefKeys
|
||||
} from "../src/constants";
|
||||
import { ArtifactCacheEntry } from "../src/contracts";
|
||||
import run from "../src/save";
|
||||
|
@ -60,368 +60,418 @@ beforeAll(() => {
|
|||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
|
||||
RefKeys.forEach(refKey => delete process.env[refKey]);
|
||||
});
|
||||
|
||||
test("save with invalid event outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
const refKeySet = RefKeys.map(refKey => {
|
||||
return [refKey, `refs/heads/feature/${refKey.toLowerCase()}`];
|
||||
});
|
||||
|
||||
test("save with no primary key in state outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
test.each(refKeySet)(
|
||||
"save with invalid event outputs warning",
|
||||
async refKey => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[refKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
}
|
||||
);
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
test.each(refKeySet)(
|
||||
"save with no primary key in state outputs warning",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return "";
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return "";
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Error retrieving key from state.`
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)(
|
||||
"save with exact match returns early",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey =
|
||||
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: primaryKey,
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
await run();
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)(
|
||||
"save with missing input outputs warning",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey =
|
||||
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)(
|
||||
"save with large cache outputs warning",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey =
|
||||
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
|
||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(
|
||||
() => {
|
||||
return cacheSize;
|
||||
}
|
||||
);
|
||||
const compression = CompressionMethod.Gzip;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
test.each(refKeySet)(
|
||||
"save with reserve cache failure outputs warning",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey =
|
||||
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(-1);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
await run();
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Error retrieving key from state.`
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
test("save with exact match returns early", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
test.each(refKeySet)(
|
||||
"save with server error outputs warning",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: primaryKey,
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
const primaryKey =
|
||||
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
const archiveFolder = "/foo/bar";
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||
|
||||
await run();
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
test("save with missing input outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
test.each(refKeySet)(
|
||||
"save with valid inputs uploads a cache",
|
||||
async (refKey, ref) => {
|
||||
process.env[refKey] = ref;
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
const primaryKey =
|
||||
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
await run();
|
||||
const archiveFolder = "/foo/bar";
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
|
||||
test("save with large cache outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
|
||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||
return cacheSize;
|
||||
});
|
||||
const compression = CompressionMethod.Gzip;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("save with reserve cache failure outputs warning", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(-1);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("save with server error outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
|
15
dist/restore/index.js
vendored
15
dist/restore/index.js
vendored
|
@ -3345,10 +3345,16 @@ function resolvePaths(patterns) {
|
|||
});
|
||||
}
|
||||
exports.resolvePaths = resolvePaths;
|
||||
// Cache token authorized for all events that are tied to a ref
|
||||
// Cache token authorized for events where a reference is defined
|
||||
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
||||
function isValidEvent() {
|
||||
return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]);
|
||||
for (let i = 0; i < constants_1.RefKeys.length; i++) {
|
||||
let refKey = constants_1.RefKeys[i];
|
||||
if (refKey in process.env) {
|
||||
return Boolean(process.env[refKey]);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.isValidEvent = isValidEvent;
|
||||
function unlinkFile(path) {
|
||||
|
@ -4607,7 +4613,10 @@ var CompressionMethod;
|
|||
// over the socket during this period, the socket is destroyed and the download
|
||||
// is aborted.
|
||||
exports.SocketTimeout = 5000;
|
||||
exports.RefKey = "GITHUB_REF";
|
||||
exports.RefKeys = [
|
||||
"ACTIONS_CACHE_REF",
|
||||
"GITHUB_REF",
|
||||
];
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
|
15
dist/save/index.js
vendored
15
dist/save/index.js
vendored
|
@ -3345,10 +3345,16 @@ function resolvePaths(patterns) {
|
|||
});
|
||||
}
|
||||
exports.resolvePaths = resolvePaths;
|
||||
// Cache token authorized for all events that are tied to a ref
|
||||
// Cache token authorized for events where a reference is defined
|
||||
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
||||
function isValidEvent() {
|
||||
return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]);
|
||||
for (let i = 0; i < constants_1.RefKeys.length; i++) {
|
||||
let refKey = constants_1.RefKeys[i];
|
||||
if (refKey in process.env) {
|
||||
return Boolean(process.env[refKey]);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.isValidEvent = isValidEvent;
|
||||
function unlinkFile(path) {
|
||||
|
@ -4694,7 +4700,10 @@ var CompressionMethod;
|
|||
// over the socket during this period, the socket is destroyed and the download
|
||||
// is aborted.
|
||||
exports.SocketTimeout = 5000;
|
||||
exports.RefKey = "GITHUB_REF";
|
||||
exports.RefKeys = [
|
||||
"ACTIONS_CACHE_REF",
|
||||
"GITHUB_REF",
|
||||
];
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
|
|
@ -34,4 +34,4 @@ export enum CompressionMethod {
|
|||
// is aborted.
|
||||
export const SocketTimeout = 5000;
|
||||
|
||||
export const RefKey = "GITHUB_REF";
|
||||
export const RefKeys = ["ACTIONS_CACHE_REF", "GITHUB_REF"];
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
CacheFilename,
|
||||
CompressionMethod,
|
||||
Outputs,
|
||||
RefKey,
|
||||
RefKeys,
|
||||
State
|
||||
} from "../constants";
|
||||
import { ArtifactCacheEntry } from "../contracts";
|
||||
|
@ -108,10 +108,18 @@ export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
|||
return paths;
|
||||
}
|
||||
|
||||
// Cache token authorized for all events that are tied to a ref
|
||||
// Cache token authorized for events where a reference is defined
|
||||
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
||||
export function isValidEvent(): boolean {
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
for (let i = 0; i < RefKeys.length; i++) {
|
||||
let refKey = RefKeys[i];
|
||||
|
||||
if (refKey in process.env) {
|
||||
return Boolean(process.env[refKey])
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function unlinkFile(path: fs.PathLike): Promise<void> {
|
||||
|
|
Loading…
Add table
Reference in a new issue