From e0d1942524af0e5f0301d424622e800e5a6f8abc Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 5 Mar 2020 14:48:42 -0800 Subject: [PATCH] Cache multiple entries using --files-from tar input remove known failing test Quote tar paths Add salt to test cache Try reading input files from manifest bump salt Run test on macos more testing Run caching tests on 3 platforms Run tests on self-hosted Apparently cant reference hosted runners by name Bump salt wait for some time after save more timing out smarter waiting Cache in tmp dir that won't be deleted Use child_process instead of actions/exec Revert tempDir hack bump salt more logging More console logging Use filepath to with cacheHttpClient Test cache restoration Revert temp dir hack debug logging clean up cache.yml testing Bump salt change debug output build actions --- .github/workflows/cache.yml | 17 +++------ .github/workflows/workflow.yml | 10 ++--- __tests__/actionUtils.test.ts | 16 +++++--- __tests__/save.test.ts | 2 +- dist/restore/index.js | 62 ++++++++++++++++++++++-------- dist/save/index.js | 69 ++++++++++++++++++++++++---------- salt.txt | 1 + src/constants.ts | 2 + src/save.ts | 12 +++--- src/tar.ts | 27 +++++++++---- src/utils/actionUtils.ts | 19 ++++++---- 11 files changed, 157 insertions(+), 80 deletions(-) create mode 100644 salt.txt diff --git a/.github/workflows/cache.yml b/.github/workflows/cache.yml index 7fba68e..c3ff3e1 100644 --- a/.github/workflows/cache.yml +++ b/.github/workflows/cache.yml @@ -6,7 +6,6 @@ on: - master jobs: - # Build and unit test build: runs-on: self-hosted steps: @@ -16,19 +15,15 @@ jobs: uses: actions/setup-node@v1 with: node-version: '12.x' - - run: npm install - - run: npm run build - name: Restore npm cache uses: ./ + id: cache with: path: | node_modules dist - ~/Desktop/cache-me - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - - name: Prettier Format Check - run: npm run format-check - - name: ESLint Check - run: npm run lint - - name: Build & Test - run: npm run test \ No newline at end of file + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('salt.txt') }} + - run: npm install + if: steps.cache.outputs.cache-hit != 'true' + - run: npm run build + if: steps.cache.outputs.cache-hit != 'true' \ No newline at end of file diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index c394023..0b19474 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -1,11 +1,11 @@ name: Tests on: - pull_request: - branches: - - master - paths-ignore: - - '**.md' + # pull_request: + # branches: + # - master + # paths-ignore: + # - '**.md' push: branches: - master diff --git a/__tests__/actionUtils.test.ts b/__tests__/actionUtils.test.ts index 34370ec..7292001 100644 --- a/__tests__/actionUtils.test.ts +++ b/__tests__/actionUtils.test.ts @@ -1,4 +1,5 @@ import * as core from "@actions/core"; +import * as glob from "@actions/glob"; import * as os from "os"; import * as path from "path"; @@ -181,16 +182,17 @@ test("isValidEvent returns false for unknown event", () => { expect(isValidEvent).toBe(false); }); -test("expandPaths with no ~ in path", () => { +test("resolvePaths with no ~ in path", async () => { + // TODO: these test paths will need to exist const filePath = ".cache/yarn"; - const resolvedPath = actionUtils.expandPaths([filePath]); + const resolvedPath = await actionUtils.resolvePaths([filePath]); const expectedPath = [path.resolve(filePath)]; expect(resolvedPath).toStrictEqual(expectedPath); }); -test("expandPaths with ~ in path", () => { +test("resolvePaths with ~ in path", async () => { const filePath = "~/.cache/yarn"; const homedir = jest.requireActual("os").homedir(); @@ -199,20 +201,22 @@ test("expandPaths with ~ in path", () => { return homedir; }); - const resolvedPath = actionUtils.expandPaths([filePath]); + const resolvedPath = await actionUtils.resolvePaths([filePath]); const expectedPath = [path.join(homedir, ".cache/yarn")]; expect(resolvedPath).toStrictEqual(expectedPath); }); -test("expandPaths with home not found", () => { +test("resolvePaths with home not found", () => { const filePath = "~/.cache/yarn"; const homedirMock = jest.spyOn(os, "homedir"); homedirMock.mockImplementation(() => { return ""; }); + // const globMock = jest.spyOn(glob, "homedir"); + // globMock.mockImplementation(() => ""); - expect(() => actionUtils.expandPaths([filePath])).toThrow( + expect(async () => await actionUtils.resolvePaths([filePath])).toThrow( "Unable to resolve `~` to HOME" ); }); diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts index ce97d52..8b0af81 100644 --- a/__tests__/save.test.ts +++ b/__tests__/save.test.ts @@ -40,7 +40,7 @@ beforeAll(() => { return actualUtils.getSupportedEvents(); }); - jest.spyOn(actionUtils, "expandPaths").mockImplementation( + jest.spyOn(actionUtils, "resolvePaths").mockImplementation( async filePaths => { return filePaths.map(x => path.resolve(x)); } diff --git a/dist/restore/index.js b/dist/restore/index.js index 86bd7c2..0ddb919 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -3151,6 +3151,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; @@ -3233,19 +3240,35 @@ function logWarning(message) { core.info(`${warningPrefix}${message}`); } exports.logWarning = logWarning; -function expandPaths(patterns) { - var _a; +function resolvePaths(patterns) { + var e_1, _a; + var _b; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd())); - const globber = yield glob.create(patterns.join("\n")); - const files = yield globber.glob(); - paths.push(...files); - // Convert paths to relative paths here? - return paths.map(x => path.relative(workspace, x)); + const workspace = (_b = process.env["GITHUB_WORKSPACE"], (_b !== null && _b !== void 0 ? _b : process.cwd())); + const globber = yield glob.create(patterns.join("\n"), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path.relative(workspace, file); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; }); } -exports.expandPaths = expandPaths; +exports.resolvePaths = resolvePaths; function getSupportedEvents() { return [constants_1.Events.Push, constants_1.Events.PullRequest]; } @@ -4421,6 +4444,7 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +exports.CacheFilename = "cache.tgz"; /***/ }), @@ -4901,8 +4925,10 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); +const path = __importStar(__webpack_require__(622)); +const constants_1 = __webpack_require__(694); +const exec_1 = __webpack_require__(986); const fs_1 = __webpack_require__(747); function getTarPath() { return __awaiter(this, void 0, void 0, function* () { @@ -4917,13 +4943,14 @@ function getTarPath() { return yield io.which("tar", true); }); } -function execTar(args) { +function execTar(args, cwd) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { try { - yield exec_1.exec(`"${yield getTarPath()}"`, args); + yield exec_1.exec(`"${yield getTarPath()}"`, args, { cwd: cwd }); } catch (error) { + console.log("error", error); const IS_WINDOWS = process.platform === "win32"; if (IS_WINDOWS) { throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); @@ -4946,19 +4973,22 @@ function extractTar(archivePath) { }); } exports.extractTar = extractTar; -function createTar(archivePath, sourceDirectories) { +function createTar(archiveFolder, sourceDirectories) { return __awaiter(this, void 0, void 0, function* () { // TODO: will want to stream sourceDirectories into tar + const manifestFilename = "manifest.txt"; + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); const workingDirectory = getWorkingDirectory(); const args = [ "-cz", "-f", - archivePath, + constants_1.CacheFilename, "-C", workingDirectory, - sourceDirectories.join(" ") + "--files-from", + manifestFilename ]; - yield execTar(args); + yield execTar(args, archiveFolder); }); } exports.createTar = createTar; diff --git a/dist/save/index.js b/dist/save/index.js index e8e7583..a037876 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -3151,6 +3151,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; @@ -3233,19 +3240,35 @@ function logWarning(message) { core.info(`${warningPrefix}${message}`); } exports.logWarning = logWarning; -function expandPaths(patterns) { - var _a; +function resolvePaths(patterns) { + var e_1, _a; + var _b; return __awaiter(this, void 0, void 0, function* () { const paths = []; - const workspace = (_a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd())); - const globber = yield glob.create(patterns.join("\n")); - const files = yield globber.glob(); - paths.push(...files); - // Convert paths to relative paths here? - return paths.map(x => path.relative(workspace, x)); + const workspace = (_b = process.env["GITHUB_WORKSPACE"], (_b !== null && _b !== void 0 ? _b : process.cwd())); + const globber = yield glob.create(patterns.join("\n"), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path.relative(workspace, file); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; }); } -exports.expandPaths = expandPaths; +exports.resolvePaths = resolvePaths; function getSupportedEvents() { return [constants_1.Events.Push, constants_1.Events.PullRequest]; } @@ -4449,15 +4472,16 @@ function run() { return; } core.debug(`Cache ID: ${cacheId}`); - const cachePaths = yield utils.expandPaths(core + const cachePaths = yield utils.resolvePaths(core .getInput(constants_1.Inputs.Path) .split("\n") .filter(x => x !== "")); core.debug("Cache Paths:"); core.debug(`${JSON.stringify(cachePaths)}`); - const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, constants_1.CacheFilename); core.debug(`Archive Path: ${archivePath}`); - yield tar_1.createTar(archivePath, cachePaths); + yield tar_1.createTar(archiveFolder, cachePaths); const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); @@ -4506,6 +4530,7 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +exports.CacheFilename = "cache.tgz"; /***/ }), @@ -4888,8 +4913,10 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); +const path = __importStar(__webpack_require__(622)); +const constants_1 = __webpack_require__(694); +const exec_1 = __webpack_require__(986); const fs_1 = __webpack_require__(747); function getTarPath() { return __awaiter(this, void 0, void 0, function* () { @@ -4904,13 +4931,14 @@ function getTarPath() { return yield io.which("tar", true); }); } -function execTar(args) { +function execTar(args, cwd) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { try { - yield exec_1.exec(`"${yield getTarPath()}"`, args); + yield exec_1.exec(`"${yield getTarPath()}"`, args, { cwd: cwd }); } catch (error) { + console.log("error", error); const IS_WINDOWS = process.platform === "win32"; if (IS_WINDOWS) { throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); @@ -4933,19 +4961,22 @@ function extractTar(archivePath) { }); } exports.extractTar = extractTar; -function createTar(archivePath, sourceDirectories) { +function createTar(archiveFolder, sourceDirectories) { return __awaiter(this, void 0, void 0, function* () { // TODO: will want to stream sourceDirectories into tar + const manifestFilename = "manifest.txt"; + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); const workingDirectory = getWorkingDirectory(); const args = [ "-cz", "-f", - archivePath, + constants_1.CacheFilename, "-C", workingDirectory, - sourceDirectories.join(" ") + "--files-from", + manifestFilename ]; - yield execTar(args); + yield execTar(args, archiveFolder); }); } exports.createTar = createTar; diff --git a/salt.txt b/salt.txt new file mode 100644 index 0000000..c5d6d97 --- /dev/null +++ b/salt.txt @@ -0,0 +1 @@ +Fri Mar 6 11:28:08 PST 2020 diff --git a/src/constants.ts b/src/constants.ts index 5f26e8c..2b78f62 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -18,3 +18,5 @@ export enum Events { Push = "push", PullRequest = "pull_request" } + +export const CacheFilename = "cache.tgz"; diff --git a/src/save.ts b/src/save.ts index f50712f..89a3b23 100644 --- a/src/save.ts +++ b/src/save.ts @@ -1,7 +1,7 @@ import * as core from "@actions/core"; import * as path from "path"; import * as cacheHttpClient from "./cacheHttpClient"; -import { Events, Inputs, State } from "./constants"; +import { Events, Inputs, State, CacheFilename } from "./constants"; import { createTar } from "./tar"; import * as utils from "./utils/actionUtils"; @@ -43,7 +43,7 @@ async function run(): Promise { return; } core.debug(`Cache ID: ${cacheId}`); - const cachePaths = await utils.expandPaths( + const cachePaths = await utils.resolvePaths( core .getInput(Inputs.Path) .split("\n") @@ -53,13 +53,11 @@ async function run(): Promise { core.debug("Cache Paths:"); core.debug(`${JSON.stringify(cachePaths)}`); - const archivePath = path.join( - await utils.createTempDirectory(), - "cache.tgz" - ); + const archiveFolder = await utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, CacheFilename); core.debug(`Archive Path: ${archivePath}`); - await createTar(archivePath, cachePaths); + await createTar(archiveFolder, cachePaths); const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit const archiveFileSize = utils.getArchiveFileSize(archivePath); diff --git a/src/tar.ts b/src/tar.ts index 0d91bfc..b275215 100644 --- a/src/tar.ts +++ b/src/tar.ts @@ -1,6 +1,8 @@ -import { exec } from "@actions/exec"; import * as io from "@actions/io"; -import { existsSync } from "fs"; +import * as path from "path"; +import { CacheFilename } from "./constants"; +import { exec } from "@actions/exec"; +import { existsSync, writeFileSync } from "fs"; async function getTarPath(): Promise { // Explicitly use BSD Tar on Windows @@ -14,10 +16,12 @@ async function getTarPath(): Promise { return await io.which("tar", true); } -async function execTar(args: string[]): Promise { +async function execTar(args: string[], cwd?: string): Promise { try { - await exec(`"${await getTarPath()}"`, args); + await exec(`"${await getTarPath()}"`, args, { cwd: cwd }); } catch (error) { + console.log("error", error); + const IS_WINDOWS = process.platform === "win32"; if (IS_WINDOWS) { throw new Error( @@ -41,18 +45,25 @@ export async function extractTar(archivePath: string): Promise { } export async function createTar( - archivePath: string, + archiveFolder: string, sourceDirectories: string[] ): Promise { // TODO: will want to stream sourceDirectories into tar + const manifestFilename = "manifest.txt"; + writeFileSync( + path.join(archiveFolder, manifestFilename), + sourceDirectories.join("\n") + ); + const workingDirectory = getWorkingDirectory(); const args = [ "-cz", "-f", - archivePath, + CacheFilename, "-C", workingDirectory, - sourceDirectories.join(" ") + "--files-from", + manifestFilename ]; - await execTar(args); + await execTar(args, archiveFolder); } diff --git a/src/utils/actionUtils.ts b/src/utils/actionUtils.ts index 6f5e462..692b42c 100644 --- a/src/utils/actionUtils.ts +++ b/src/utils/actionUtils.ts @@ -28,6 +28,7 @@ export async function createTempDirectory(): Promise { } tempDirectory = path.join(baseLocation, "actions", "temp"); } + const dest = path.join(tempDirectory, uuidV4.default()); await io.mkdirP(dest); return dest; @@ -82,17 +83,21 @@ export function logWarning(message: string): void { core.info(`${warningPrefix}${message}`); } -export async function expandPaths(patterns: string[]): Promise { +export async function resolvePaths(patterns: string[]): Promise { const paths: string[] = []; const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd(); + const globber = await glob.create(patterns.join("\n"), { + implicitDescendants: false + }); - const globber = await glob.create(patterns.join("\n")); - const files = await globber.glob(); + for await (const file of globber.globGenerator()) { + const relativeFile = path.relative(workspace, file); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } - paths.push(...files); - - // Convert paths to relative paths here? - return paths.map(x => path.relative(workspace, x)); + return paths; } export function getSupportedEvents(): string[] {