mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-24 04:29:16 +01:00
Cache multiple paths and add glob pattern support (#212)
* Allow for multiple line-delimited paths to cache * Add initial minimatch support * Use @actions/glob for pattern matching * Cache multiple entries using --files-from tar input remove known failing test Quote tar paths Add salt to test cache Try reading input files from manifest bump salt Run test on macos more testing Run caching tests on 3 platforms Run tests on self-hosted Apparently cant reference hosted runners by name Bump salt wait for some time after save more timing out smarter waiting Cache in tmp dir that won't be deleted Use child_process instead of actions/exec Revert tempDir hack bump salt more logging More console logging Use filepath to with cacheHttpClient Test cache restoration Revert temp dir hack debug logging clean up cache.yml testing Bump salt change debug output build actions * unit test coverage for caching multiple dirs * Ensure there's a locateable test folder at homedir * Clean up code * Version cache with all inputs * Unit test getCacheVersion * Include keys in getCacheEntry request * Clean import orders * Use fs promises in actionUtils tests * Update import order for to fix linter errors * Fix remaining linter error * Remove platform-specific test code * Add lerna example for caching multiple dirs * Lerna example updated to v2 Co-Authored-By: Josh Gross <joshmgross@github.com> Co-authored-by: Josh Gross <joshmgross@github.com>
This commit is contained in:
parent
22d71e33ad
commit
eb78578266
16 changed files with 4820 additions and 160 deletions
|
@ -1,5 +1,6 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as fs from "fs";
|
import * as io from "@actions/io";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
import * as os from "os";
|
import * as os from "os";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
|
@ -7,13 +8,24 @@ import { Events, Outputs, State } from "../src/constants";
|
||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
|
|
||||||
|
import uuid = require("uuid");
|
||||||
|
|
||||||
jest.mock("@actions/core");
|
jest.mock("@actions/core");
|
||||||
jest.mock("os");
|
jest.mock("os");
|
||||||
|
|
||||||
|
function getTempDir(): string {
|
||||||
|
return path.join(__dirname, "_temp", "actionUtils");
|
||||||
|
}
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
delete process.env[Events.Key];
|
delete process.env[Events.Key];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
delete process.env["GITHUB_WORKSPACE"];
|
||||||
|
await io.rmRF(getTempDir());
|
||||||
|
});
|
||||||
|
|
||||||
test("getArchiveFileSize returns file size", () => {
|
test("getArchiveFileSize returns file size", () => {
|
||||||
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
|
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
|
||||||
|
|
||||||
|
@ -182,17 +194,43 @@ test("isValidEvent returns false for unknown event", () => {
|
||||||
expect(isValidEvent).toBe(false);
|
expect(isValidEvent).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("resolvePath with no ~ in path", () => {
|
test("resolvePaths with no ~ in path", async () => {
|
||||||
const filePath = ".cache/yarn";
|
const filePath = ".cache";
|
||||||
|
|
||||||
const resolvedPath = actionUtils.resolvePath(filePath);
|
// Create the following layout:
|
||||||
|
// cwd
|
||||||
|
// cwd/.cache
|
||||||
|
// cwd/.cache/file.txt
|
||||||
|
|
||||||
const expectedPath = path.resolve(filePath);
|
const root = path.join(getTempDir(), "no-tilde");
|
||||||
expect(resolvedPath).toBe(expectedPath);
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
const cache = path.join(root, ".cache");
|
||||||
|
await fs.mkdir(cache, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(cache, "file.txt"), "cached");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||||
|
|
||||||
|
const expectedPath = [filePath];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("resolvePath with ~ in path", () => {
|
test("resolvePaths with ~ in path", async () => {
|
||||||
const filePath = "~/.cache/yarn";
|
const cacheDir = uuid();
|
||||||
|
const filePath = `~/${cacheDir}`;
|
||||||
|
// Create the following layout:
|
||||||
|
// ~/uuid
|
||||||
|
// ~/uuid/file.txt
|
||||||
|
|
||||||
const homedir = jest.requireActual("os").homedir();
|
const homedir = jest.requireActual("os").homedir();
|
||||||
const homedirMock = jest.spyOn(os, "homedir");
|
const homedirMock = jest.spyOn(os, "homedir");
|
||||||
|
@ -200,24 +238,93 @@ test("resolvePath with ~ in path", () => {
|
||||||
return homedir;
|
return homedir;
|
||||||
});
|
});
|
||||||
|
|
||||||
const resolvedPath = actionUtils.resolvePath(filePath);
|
const target = path.join(homedir, cacheDir);
|
||||||
|
await fs.mkdir(target, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(target, "file.txt"), "cached");
|
||||||
|
|
||||||
const expectedPath = path.join(homedir, ".cache/yarn");
|
const root = getTempDir();
|
||||||
expect(resolvedPath).toBe(expectedPath);
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||||
|
|
||||||
|
const expectedPath = [path.relative(root, target)];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
await io.rmRF(target);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("resolvePath with home not found", () => {
|
test("resolvePaths with home not found", async () => {
|
||||||
const filePath = "~/.cache/yarn";
|
const filePath = "~/.cache/yarn";
|
||||||
const homedirMock = jest.spyOn(os, "homedir");
|
const homedirMock = jest.spyOn(os, "homedir");
|
||||||
homedirMock.mockImplementation(() => {
|
homedirMock.mockImplementation(() => {
|
||||||
return "";
|
return "";
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(() => actionUtils.resolvePath(filePath)).toThrow(
|
await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
|
||||||
"Unable to resolve `~` to HOME"
|
"Unable to determine HOME directory"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("resolvePaths inclusion pattern returns found", async () => {
|
||||||
|
const pattern = "*.ts";
|
||||||
|
// Create the following layout:
|
||||||
|
// inclusion-patterns
|
||||||
|
// inclusion-patterns/miss.txt
|
||||||
|
// inclusion-patterns/test.ts
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "inclusion-patterns");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||||
|
await fs.writeFile(path.join(root, "test.ts"), "match");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([pattern]);
|
||||||
|
|
||||||
|
const expectedPath = ["test.ts"];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("resolvePaths exclusion pattern returns not found", async () => {
|
||||||
|
const patterns = ["*.ts", "!test.ts"];
|
||||||
|
// Create the following layout:
|
||||||
|
// exclusion-patterns
|
||||||
|
// exclusion-patterns/miss.txt
|
||||||
|
// exclusion-patterns/test.ts
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "exclusion-patterns");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||||
|
await fs.writeFile(path.join(root, "test.ts"), "no match");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths(patterns);
|
||||||
|
|
||||||
|
const expectedPath = [];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test("isValidEvent returns true for push event", () => {
|
test("isValidEvent returns true for push event", () => {
|
||||||
const event = Events.Push;
|
const event = Events.Push;
|
||||||
process.env[Events.Key] = event;
|
process.env[Events.Key] = event;
|
||||||
|
@ -237,13 +344,14 @@ test("isValidEvent returns true for pull request event", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("unlinkFile unlinks file", async () => {
|
test("unlinkFile unlinks file", async () => {
|
||||||
const testDirectory = fs.mkdtempSync("unlinkFileTest");
|
const testDirectory = await fs.mkdtemp("unlinkFileTest");
|
||||||
const testFile = path.join(testDirectory, "test.txt");
|
const testFile = path.join(testDirectory, "test.txt");
|
||||||
fs.writeFileSync(testFile, "hello world");
|
await fs.writeFile(testFile, "hello world");
|
||||||
|
|
||||||
await actionUtils.unlinkFile(testFile);
|
await actionUtils.unlinkFile(testFile);
|
||||||
|
|
||||||
expect(fs.existsSync(testFile)).toBe(false);
|
// This should throw as testFile should not exist
|
||||||
|
await expect(fs.stat(testFile)).rejects.toThrow();
|
||||||
|
|
||||||
fs.rmdirSync(testDirectory);
|
await fs.rmdir(testDirectory);
|
||||||
});
|
});
|
||||||
|
|
21
__tests__/cacheHttpsClient.test.ts
Normal file
21
__tests__/cacheHttpsClient.test.ts
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
import { getCacheVersion } from "../src/cacheHttpClient";
|
||||||
|
import { Inputs } from "../src/constants";
|
||||||
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
testUtils.clearInputs();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with path input returns version", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
|
||||||
|
const result = getCacheVersion();
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with no input throws", async () => {
|
||||||
|
expect(() => getCacheVersion()).toThrow();
|
||||||
|
});
|
|
@ -14,10 +14,6 @@ jest.mock("../src/tar");
|
||||||
jest.mock("../src/utils/actionUtils");
|
jest.mock("../src/utils/actionUtils");
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
|
|
||||||
return path.resolve(filePath);
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
(key, cacheResult) => {
|
(key, cacheResult) => {
|
||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
@ -60,7 +56,8 @@ test("restore with invalid event outputs warning", async () => {
|
||||||
test("restore with no path should fail", async () => {
|
test("restore with no path should fail", async () => {
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
await run();
|
await run();
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||||
|
expect(failedMock).not.toHaveBeenCalledWith(
|
||||||
"Input required and not supplied: path"
|
"Input required and not supplied: path"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -202,7 +199,6 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
|
|
||||||
test("restore with cache found", async () => {
|
test("restore with cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const cachePath = path.resolve("node_modules");
|
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
|
@ -257,7 +253,7 @@ test("restore with cache found", async () => {
|
||||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
@ -271,7 +267,6 @@ test("restore with cache found", async () => {
|
||||||
|
|
||||||
test("restore with a pull request event and cache found", async () => {
|
test("restore with a pull request event and cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const cachePath = path.resolve("node_modules");
|
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
|
@ -328,7 +323,7 @@ test("restore with a pull request event and cache found", async () => {
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||||
|
@ -340,7 +335,6 @@ test("restore with a pull request event and cache found", async () => {
|
||||||
test("restore with cache found for restore key", async () => {
|
test("restore with cache found for restore key", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const restoreKey = "node-";
|
const restoreKey = "node-";
|
||||||
const cachePath = path.resolve("node_modules");
|
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
key,
|
key,
|
||||||
|
@ -396,7 +390,7 @@ test("restore with cache found for restore key", async () => {
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||||
|
|
|
@ -2,7 +2,7 @@ import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
import { Events, Inputs } from "../src/constants";
|
import { CacheFilename, Events, Inputs } from "../src/constants";
|
||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/save";
|
import run from "../src/save";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
@ -41,9 +41,11 @@ beforeAll(() => {
|
||||||
return actualUtils.getSupportedEvents();
|
return actualUtils.getSupportedEvents();
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
|
jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
|
||||||
return path.resolve(filePath);
|
async filePaths => {
|
||||||
});
|
return filePaths.map(x => path.resolve(x));
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
||||||
return Promise.resolve("/foo/bar");
|
return Promise.resolve("/foo/bar");
|
||||||
|
@ -190,7 +192,7 @@ test("save with large cache outputs warning", async () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
const cachePath = path.resolve(inputPath);
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
@ -202,10 +204,10 @@ test("save with large cache outputs warning", async () => {
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
const archiveFolder = "/foo/bar";
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
|
@ -289,7 +291,7 @@ test("save with server error outputs warning", async () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
const cachePath = path.resolve(inputPath);
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const cacheId = 4;
|
const cacheId = 4;
|
||||||
|
@ -312,13 +314,14 @@ test("save with server error outputs warning", async () => {
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
||||||
|
|
||||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
const archiveFolder = "/foo/bar";
|
||||||
|
const archiveFile = path.join(archiveFolder, CacheFilename);
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||||
|
@ -348,7 +351,7 @@ test("save with valid inputs uploads a cache", async () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
const cachePath = path.resolve(inputPath);
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const cacheId = 4;
|
const cacheId = 4;
|
||||||
|
@ -367,13 +370,14 @@ test("save with valid inputs uploads a cache", async () => {
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
||||||
|
|
||||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
const archiveFolder = "/foo/bar";
|
||||||
|
const archiveFile = path.join(archiveFolder, CacheFilename);
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,15 +1,30 @@
|
||||||
import * as exec from "@actions/exec";
|
import * as exec from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { CacheFilename } from "../src/constants";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
|
||||||
jest.mock("@actions/exec");
|
jest.mock("@actions/exec");
|
||||||
jest.mock("@actions/io");
|
jest.mock("@actions/io");
|
||||||
|
|
||||||
beforeAll(() => {
|
function getTempDir(): string {
|
||||||
|
return path.join(__dirname, "_temp", "tar");
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
jest.spyOn(io, "which").mockImplementation(tool => {
|
jest.spyOn(io, "which").mockImplementation(tool => {
|
||||||
return Promise.resolve(tool);
|
return Promise.resolve(tool);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
process.env["GITHUB_WORKSPACE"] = process.cwd();
|
||||||
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
delete process.env["GITHUB_WORKSPACE"];
|
||||||
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
});
|
});
|
||||||
|
|
||||||
test("extract tar", async () => {
|
test("extract tar", async () => {
|
||||||
|
@ -17,43 +32,54 @@ test("extract tar", async () => {
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archivePath = "cache.tar";
|
const archivePath = "cache.tar";
|
||||||
const targetDirectory = "~/.npm/cache";
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
await tar.extractTar(archivePath, targetDirectory);
|
|
||||||
|
|
||||||
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
|
await tar.extractTar(archivePath);
|
||||||
|
|
||||||
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
expect(execMock).toHaveBeenCalledTimes(1);
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
"-xz",
|
`"${tarPath}"`,
|
||||||
"-f",
|
["-xz", "-f", archivePath, "-P", "-C", workspace],
|
||||||
archivePath,
|
{ cwd: undefined }
|
||||||
"-C",
|
);
|
||||||
targetDirectory
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("create tar", async () => {
|
test("create tar", async () => {
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archivePath = "cache.tar";
|
const archiveFolder = getTempDir();
|
||||||
const sourceDirectory = "~/.npm/cache";
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
await tar.createTar(archivePath, sourceDirectory);
|
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||||
|
|
||||||
|
await fs.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
|
await tar.createTar(archiveFolder, sourceDirectories);
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
|
|
||||||
expect(execMock).toHaveBeenCalledTimes(1);
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
"-cz",
|
`"${tarPath}"`,
|
||||||
"-f",
|
[
|
||||||
archivePath,
|
"-cz",
|
||||||
"-C",
|
"-f",
|
||||||
sourceDirectory,
|
CacheFilename,
|
||||||
"."
|
"-C",
|
||||||
]);
|
workspace,
|
||||||
|
"--files-from",
|
||||||
|
"manifest.txt"
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: archiveFolder
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
2264
dist/restore/index.js
vendored
2264
dist/restore/index.js
vendored
File diff suppressed because it is too large
Load diff
2273
dist/save/index.js
vendored
2273
dist/save/index.js
vendored
File diff suppressed because it is too large
Load diff
13
examples.md
13
examples.md
|
@ -12,6 +12,7 @@
|
||||||
- [Windows](#windows)
|
- [Windows](#windows)
|
||||||
- [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config)
|
- [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config)
|
||||||
- [Node - Yarn](#node---yarn)
|
- [Node - Yarn](#node---yarn)
|
||||||
|
- [Node - Lerna](#node---lerna)
|
||||||
- [OCaml/Reason - esy](#ocamlreason---esy)
|
- [OCaml/Reason - esy](#ocamlreason---esy)
|
||||||
- [PHP - Composer](#php---composer)
|
- [PHP - Composer](#php---composer)
|
||||||
- [Python - pip](#python---pip)
|
- [Python - pip](#python---pip)
|
||||||
|
@ -184,6 +185,18 @@ The yarn cache directory will depend on your operating system and version of `ya
|
||||||
${{ runner.os }}-yarn-
|
${{ runner.os }}-yarn-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Node - Lerna
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: restore lerna
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
node_modules
|
||||||
|
*/*/node_modules
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('yarn.lock') }}
|
||||||
|
```
|
||||||
|
|
||||||
## OCaml/Reason - esy
|
## OCaml/Reason - esy
|
||||||
Esy allows you to export built dependencies and import pre-built dependencies.
|
Esy allows you to export built dependencies and import pre-built dependencies.
|
||||||
```yaml
|
```yaml
|
||||||
|
|
17
package-lock.json
generated
17
package-lock.json
generated
|
@ -14,6 +14,15 @@
|
||||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
|
||||||
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
||||||
},
|
},
|
||||||
|
"@actions/glob": {
|
||||||
|
"version": "0.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.0.tgz",
|
||||||
|
"integrity": "sha512-lx8SzyQ2FE9+UUvjqY1f28QbTJv+w8qP7kHHbfQRhphrlcx0Mdmm1tZdGJzfxv1jxREa/sLW4Oy8CbGQKCJySA==",
|
||||||
|
"requires": {
|
||||||
|
"@actions/core": "^1.2.0",
|
||||||
|
"minimatch": "^3.0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"@actions/http-client": {
|
"@actions/http-client": {
|
||||||
"version": "1.0.6",
|
"version": "1.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz",
|
||||||
|
@ -1436,8 +1445,7 @@
|
||||||
"balanced-match": {
|
"balanced-match": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
|
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"base": {
|
"base": {
|
||||||
"version": "0.11.2",
|
"version": "0.11.2",
|
||||||
|
@ -1513,7 +1521,6 @@
|
||||||
"version": "1.1.11",
|
"version": "1.1.11",
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"balanced-match": "^1.0.0",
|
"balanced-match": "^1.0.0",
|
||||||
"concat-map": "0.0.1"
|
"concat-map": "0.0.1"
|
||||||
|
@ -1800,8 +1807,7 @@
|
||||||
"concat-map": {
|
"concat-map": {
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
|
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"contains-path": {
|
"contains-path": {
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
|
@ -7156,7 +7162,6 @@
|
||||||
"version": "3.0.4",
|
"version": "3.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"brace-expansion": "^1.1.7"
|
"brace-expansion": "^1.1.7"
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.0",
|
"@actions/core": "^1.2.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^1.0.6",
|
"@actions/http-client": "^1.0.6",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"uuid": "^3.3.3"
|
"uuid": "^3.3.3"
|
||||||
|
|
|
@ -6,8 +6,10 @@ import {
|
||||||
IRequestOptions,
|
IRequestOptions,
|
||||||
ITypedResponse
|
ITypedResponse
|
||||||
} from "@actions/http-client/interfaces";
|
} from "@actions/http-client/interfaces";
|
||||||
|
import * as crypto from "crypto";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
|
|
||||||
|
import { Inputs } from "./constants";
|
||||||
import {
|
import {
|
||||||
ArtifactCacheEntry,
|
ArtifactCacheEntry,
|
||||||
CommitCacheRequest,
|
CommitCacheRequest,
|
||||||
|
@ -16,6 +18,8 @@ import {
|
||||||
} from "./contracts";
|
} from "./contracts";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
const versionSalt = "1.0";
|
||||||
|
|
||||||
function isSuccessStatusCode(statusCode?: number): boolean {
|
function isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -78,11 +82,27 @@ function createHttpClient(): HttpClient {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getCacheVersion(): string {
|
||||||
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
|
const components = [
|
||||||
|
core.getInput(Inputs.Path, { required: true }),
|
||||||
|
versionSalt
|
||||||
|
];
|
||||||
|
|
||||||
|
return crypto
|
||||||
|
.createHash("sha256")
|
||||||
|
.update(components.join("|"))
|
||||||
|
.digest("hex");
|
||||||
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[]
|
keys: string[]
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
const version = getCacheVersion();
|
||||||
|
const resource = `cache?keys=${encodeURIComponent(
|
||||||
|
keys.join(",")
|
||||||
|
)}&version=${version}`;
|
||||||
|
|
||||||
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
||||||
getCacheApiUrl(resource)
|
getCacheApiUrl(resource)
|
||||||
|
@ -130,9 +150,11 @@ export async function downloadCache(
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
export async function reserveCache(key: string): Promise<number> {
|
export async function reserveCache(key: string): Promise<number> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
|
const version = getCacheVersion();
|
||||||
|
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key
|
key,
|
||||||
|
version
|
||||||
};
|
};
|
||||||
const response = await httpClient.postJson<ReserveCacheResponse>(
|
const response = await httpClient.postJson<ReserveCacheResponse>(
|
||||||
getCacheApiUrl("caches"),
|
getCacheApiUrl("caches"),
|
||||||
|
|
|
@ -18,3 +18,5 @@ export enum Events {
|
||||||
Push = "push",
|
Push = "push",
|
||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const CacheFilename = "cache.tgz";
|
||||||
|
|
|
@ -20,11 +20,6 @@ async function run(): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachePath = utils.resolvePath(
|
|
||||||
core.getInput(Inputs.Path, { required: true })
|
|
||||||
);
|
|
||||||
core.debug(`Cache Path: ${cachePath}`);
|
|
||||||
|
|
||||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||||
core.saveState(State.CacheKey, primaryKey);
|
core.saveState(State.CacheKey, primaryKey);
|
||||||
|
|
||||||
|
@ -89,7 +84,7 @@ async function run(): Promise<void> {
|
||||||
)} MB (${archiveFileSize} B)`
|
)} MB (${archiveFileSize} B)`
|
||||||
);
|
);
|
||||||
|
|
||||||
await extractTar(archivePath, cachePath);
|
await extractTar(archivePath);
|
||||||
} finally {
|
} finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
|
21
src/save.ts
21
src/save.ts
|
@ -2,7 +2,7 @@ import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "./cacheHttpClient";
|
import * as cacheHttpClient from "./cacheHttpClient";
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { CacheFilename, Events, Inputs, State } from "./constants";
|
||||||
import { createTar } from "./tar";
|
import { createTar } from "./tar";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
@ -44,18 +44,21 @@ async function run(): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
core.debug(`Cache ID: ${cacheId}`);
|
core.debug(`Cache ID: ${cacheId}`);
|
||||||
const cachePath = utils.resolvePath(
|
const cachePaths = await utils.resolvePaths(
|
||||||
core.getInput(Inputs.Path, { required: true })
|
core
|
||||||
|
.getInput(Inputs.Path, { required: true })
|
||||||
|
.split("\n")
|
||||||
|
.filter(x => x !== "")
|
||||||
);
|
);
|
||||||
core.debug(`Cache Path: ${cachePath}`);
|
|
||||||
|
|
||||||
const archivePath = path.join(
|
core.debug("Cache Paths:");
|
||||||
await utils.createTempDirectory(),
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
"cache.tgz"
|
|
||||||
);
|
const archiveFolder = await utils.createTempDirectory();
|
||||||
|
const archivePath = path.join(archiveFolder, CacheFilename);
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
await createTar(archivePath, cachePath);
|
await createTar(archiveFolder, cachePaths);
|
||||||
|
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
47
src/tar.ts
47
src/tar.ts
|
@ -1,6 +1,9 @@
|
||||||
import { exec } from "@actions/exec";
|
import { exec } from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import { existsSync } from "fs";
|
import { existsSync, writeFileSync } from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { CacheFilename } from "./constants";
|
||||||
|
|
||||||
async function getTarPath(): Promise<string> {
|
async function getTarPath(): Promise<string> {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
|
@ -14,9 +17,9 @@ async function getTarPath(): Promise<string> {
|
||||||
return await io.which("tar", true);
|
return await io.which("tar", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function execTar(args: string[]): Promise<void> {
|
async function execTar(args: string[], cwd?: string): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await exec(`"${await getTarPath()}"`, args);
|
await exec(`"${await getTarPath()}"`, args, { cwd: cwd });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
|
@ -28,20 +31,38 @@ async function execTar(args: string[]): Promise<void> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function extractTar(
|
function getWorkingDirectory(): string {
|
||||||
archivePath: string,
|
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
targetDirectory: string
|
}
|
||||||
): Promise<void> {
|
|
||||||
|
export async function extractTar(archivePath: string): Promise<void> {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
await io.mkdirP(targetDirectory);
|
const workingDirectory = getWorkingDirectory();
|
||||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
await io.mkdirP(workingDirectory);
|
||||||
|
const args = ["-xz", "-f", archivePath, "-P", "-C", workingDirectory];
|
||||||
await execTar(args);
|
await execTar(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createTar(
|
export async function createTar(
|
||||||
archivePath: string,
|
archiveFolder: string,
|
||||||
sourceDirectory: string
|
sourceDirectories: string[]
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
await execTar(args);
|
const manifestFilename = "manifest.txt";
|
||||||
|
writeFileSync(
|
||||||
|
path.join(archiveFolder, manifestFilename),
|
||||||
|
sourceDirectories.join("\n")
|
||||||
|
);
|
||||||
|
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
const args = [
|
||||||
|
"-cz",
|
||||||
|
"-f",
|
||||||
|
CacheFilename,
|
||||||
|
"-C",
|
||||||
|
workingDirectory,
|
||||||
|
"--files-from",
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
await execTar(args, archiveFolder);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as glob from "@actions/glob";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import * as os from "os";
|
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import * as util from "util";
|
import * as util from "util";
|
||||||
import * as uuidV4 from "uuid/v4";
|
import * as uuidV4 from "uuid/v4";
|
||||||
|
@ -29,6 +29,7 @@ export async function createTempDirectory(): Promise<string> {
|
||||||
}
|
}
|
||||||
tempDirectory = path.join(baseLocation, "actions", "temp");
|
tempDirectory = path.join(baseLocation, "actions", "temp");
|
||||||
}
|
}
|
||||||
|
|
||||||
const dest = path.join(tempDirectory, uuidV4.default());
|
const dest = path.join(tempDirectory, uuidV4.default());
|
||||||
await io.mkdirP(dest);
|
await io.mkdirP(dest);
|
||||||
return dest;
|
return dest;
|
||||||
|
@ -83,16 +84,21 @@ export function logWarning(message: string): void {
|
||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function resolvePath(filePath: string): string {
|
export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
||||||
if (filePath[0] === "~") {
|
const paths: string[] = [];
|
||||||
const home = os.homedir();
|
const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
if (!home) {
|
const globber = await glob.create(patterns.join("\n"), {
|
||||||
throw new Error("Unable to resolve `~` to HOME");
|
implicitDescendants: false
|
||||||
}
|
});
|
||||||
return path.join(home, filePath.slice(1));
|
|
||||||
|
for await (const file of globber.globGenerator()) {
|
||||||
|
const relativeFile = path.relative(workspace, file);
|
||||||
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return path.resolve(filePath);
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSupportedEvents(): string[] {
|
export function getSupportedEvents(): string[] {
|
||||||
|
|
Loading…
Reference in a new issue