mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-24 04:29:16 +01:00
eb78578266
* Allow for multiple line-delimited paths to cache * Add initial minimatch support * Use @actions/glob for pattern matching * Cache multiple entries using --files-from tar input remove known failing test Quote tar paths Add salt to test cache Try reading input files from manifest bump salt Run test on macos more testing Run caching tests on 3 platforms Run tests on self-hosted Apparently cant reference hosted runners by name Bump salt wait for some time after save more timing out smarter waiting Cache in tmp dir that won't be deleted Use child_process instead of actions/exec Revert tempDir hack bump salt more logging More console logging Use filepath to with cacheHttpClient Test cache restoration Revert temp dir hack debug logging clean up cache.yml testing Bump salt change debug output build actions * unit test coverage for caching multiple dirs * Ensure there's a locateable test folder at homedir * Clean up code * Version cache with all inputs * Unit test getCacheVersion * Include keys in getCacheEntry request * Clean import orders * Use fs promises in actionUtils tests * Update import order for to fix linter errors * Fix remaining linter error * Remove platform-specific test code * Add lerna example for caching multiple dirs * Lerna example updated to v2 Co-Authored-By: Josh Gross <joshmgross@github.com> Co-authored-by: Josh Gross <joshmgross@github.com>
321 lines
9 KiB
TypeScript
321 lines
9 KiB
TypeScript
import * as core from "@actions/core";
|
|
import { HttpClient, HttpCodes } from "@actions/http-client";
|
|
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
|
import {
|
|
IHttpClientResponse,
|
|
IRequestOptions,
|
|
ITypedResponse
|
|
} from "@actions/http-client/interfaces";
|
|
import * as crypto from "crypto";
|
|
import * as fs from "fs";
|
|
|
|
import { Inputs } from "./constants";
|
|
import {
|
|
ArtifactCacheEntry,
|
|
CommitCacheRequest,
|
|
ReserveCacheRequest,
|
|
ReserveCacheResponse
|
|
} from "./contracts";
|
|
import * as utils from "./utils/actionUtils";
|
|
|
|
const versionSalt = "1.0";
|
|
|
|
function isSuccessStatusCode(statusCode?: number): boolean {
|
|
if (!statusCode) {
|
|
return false;
|
|
}
|
|
return statusCode >= 200 && statusCode < 300;
|
|
}
|
|
|
|
function isRetryableStatusCode(statusCode?: number): boolean {
|
|
if (!statusCode) {
|
|
return false;
|
|
}
|
|
const retryableStatusCodes = [
|
|
HttpCodes.BadGateway,
|
|
HttpCodes.ServiceUnavailable,
|
|
HttpCodes.GatewayTimeout
|
|
];
|
|
return retryableStatusCodes.includes(statusCode);
|
|
}
|
|
|
|
function getCacheApiUrl(resource: string): string {
|
|
// Ideally we just use ACTIONS_CACHE_URL
|
|
const baseUrl: string = (
|
|
process.env["ACTIONS_CACHE_URL"] ||
|
|
process.env["ACTIONS_RUNTIME_URL"] ||
|
|
""
|
|
).replace("pipelines", "artifactcache");
|
|
if (!baseUrl) {
|
|
throw new Error(
|
|
"Cache Service Url not found, unable to restore cache."
|
|
);
|
|
}
|
|
|
|
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
|
core.debug(`Resource Url: ${url}`);
|
|
return url;
|
|
}
|
|
|
|
function createAcceptHeader(type: string, apiVersion: string): string {
|
|
return `${type};api-version=${apiVersion}`;
|
|
}
|
|
|
|
function getRequestOptions(): IRequestOptions {
|
|
const requestOptions: IRequestOptions = {
|
|
headers: {
|
|
Accept: createAcceptHeader("application/json", "6.0-preview.1")
|
|
}
|
|
};
|
|
|
|
return requestOptions;
|
|
}
|
|
|
|
function createHttpClient(): HttpClient {
|
|
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
|
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
|
|
|
return new HttpClient(
|
|
"actions/cache",
|
|
[bearerCredentialHandler],
|
|
getRequestOptions()
|
|
);
|
|
}
|
|
|
|
export function getCacheVersion(): string {
|
|
// Add salt to cache version to support breaking changes in cache entry
|
|
const components = [
|
|
core.getInput(Inputs.Path, { required: true }),
|
|
versionSalt
|
|
];
|
|
|
|
return crypto
|
|
.createHash("sha256")
|
|
.update(components.join("|"))
|
|
.digest("hex");
|
|
}
|
|
|
|
export async function getCacheEntry(
|
|
keys: string[]
|
|
): Promise<ArtifactCacheEntry | null> {
|
|
const httpClient = createHttpClient();
|
|
const version = getCacheVersion();
|
|
const resource = `cache?keys=${encodeURIComponent(
|
|
keys.join(",")
|
|
)}&version=${version}`;
|
|
|
|
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
|
getCacheApiUrl(resource)
|
|
);
|
|
if (response.statusCode === 204) {
|
|
return null;
|
|
}
|
|
if (!isSuccessStatusCode(response.statusCode)) {
|
|
throw new Error(`Cache service responded with ${response.statusCode}`);
|
|
}
|
|
|
|
const cacheResult = response.result;
|
|
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
|
if (!cacheDownloadUrl) {
|
|
throw new Error("Cache not found.");
|
|
}
|
|
core.setSecret(cacheDownloadUrl);
|
|
core.debug(`Cache Result:`);
|
|
core.debug(JSON.stringify(cacheResult));
|
|
|
|
return cacheResult;
|
|
}
|
|
|
|
async function pipeResponseToStream(
|
|
response: IHttpClientResponse,
|
|
stream: NodeJS.WritableStream
|
|
): Promise<void> {
|
|
return new Promise(resolve => {
|
|
response.message.pipe(stream).on("close", () => {
|
|
resolve();
|
|
});
|
|
});
|
|
}
|
|
|
|
export async function downloadCache(
|
|
archiveLocation: string,
|
|
archivePath: string
|
|
): Promise<void> {
|
|
const stream = fs.createWriteStream(archivePath);
|
|
const httpClient = new HttpClient("actions/cache");
|
|
const downloadResponse = await httpClient.get(archiveLocation);
|
|
await pipeResponseToStream(downloadResponse, stream);
|
|
}
|
|
|
|
// Reserve Cache
|
|
export async function reserveCache(key: string): Promise<number> {
|
|
const httpClient = createHttpClient();
|
|
const version = getCacheVersion();
|
|
|
|
const reserveCacheRequest: ReserveCacheRequest = {
|
|
key,
|
|
version
|
|
};
|
|
const response = await httpClient.postJson<ReserveCacheResponse>(
|
|
getCacheApiUrl("caches"),
|
|
reserveCacheRequest
|
|
);
|
|
return response?.result?.cacheId ?? -1;
|
|
}
|
|
|
|
function getContentRange(start: number, end: number): string {
|
|
// Format: `bytes start-end/filesize
|
|
// start and end are inclusive
|
|
// filesize can be *
|
|
// For a 200 byte chunk starting at byte 0:
|
|
// Content-Range: bytes 0-199/*
|
|
return `bytes ${start}-${end}/*`;
|
|
}
|
|
|
|
async function uploadChunk(
|
|
httpClient: HttpClient,
|
|
resourceUrl: string,
|
|
data: NodeJS.ReadableStream,
|
|
start: number,
|
|
end: number
|
|
): Promise<void> {
|
|
core.debug(
|
|
`Uploading chunk of size ${end -
|
|
start +
|
|
1} bytes at offset ${start} with content range: ${getContentRange(
|
|
start,
|
|
end
|
|
)}`
|
|
);
|
|
const additionalHeaders = {
|
|
"Content-Type": "application/octet-stream",
|
|
"Content-Range": getContentRange(start, end)
|
|
};
|
|
|
|
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
|
return await httpClient.sendStream(
|
|
"PATCH",
|
|
resourceUrl,
|
|
data,
|
|
additionalHeaders
|
|
);
|
|
};
|
|
|
|
const response = await uploadChunkRequest();
|
|
if (isSuccessStatusCode(response.message.statusCode)) {
|
|
return;
|
|
}
|
|
|
|
if (isRetryableStatusCode(response.message.statusCode)) {
|
|
core.debug(
|
|
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
|
|
);
|
|
const retryResponse = await uploadChunkRequest();
|
|
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
return;
|
|
}
|
|
}
|
|
|
|
throw new Error(
|
|
`Cache service responded with ${response.message.statusCode} during chunk upload.`
|
|
);
|
|
}
|
|
|
|
function parseEnvNumber(key: string): number | undefined {
|
|
const value = Number(process.env[key]);
|
|
if (Number.isNaN(value) || value < 0) {
|
|
return undefined;
|
|
}
|
|
return value;
|
|
}
|
|
|
|
async function uploadFile(
|
|
httpClient: HttpClient,
|
|
cacheId: number,
|
|
archivePath: string
|
|
): Promise<void> {
|
|
// Upload Chunks
|
|
const fileSize = fs.statSync(archivePath).size;
|
|
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
|
const fd = fs.openSync(archivePath, "r");
|
|
|
|
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
|
|
const MAX_CHUNK_SIZE =
|
|
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
|
|
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
|
|
|
const parallelUploads = [...new Array(concurrency).keys()];
|
|
core.debug("Awaiting all uploads");
|
|
let offset = 0;
|
|
|
|
try {
|
|
await Promise.all(
|
|
parallelUploads.map(async () => {
|
|
while (offset < fileSize) {
|
|
const chunkSize = Math.min(
|
|
fileSize - offset,
|
|
MAX_CHUNK_SIZE
|
|
);
|
|
const start = offset;
|
|
const end = offset + chunkSize - 1;
|
|
offset += MAX_CHUNK_SIZE;
|
|
const chunk = fs.createReadStream(archivePath, {
|
|
fd,
|
|
start,
|
|
end,
|
|
autoClose: false
|
|
});
|
|
|
|
await uploadChunk(
|
|
httpClient,
|
|
resourceUrl,
|
|
chunk,
|
|
start,
|
|
end
|
|
);
|
|
}
|
|
})
|
|
);
|
|
} finally {
|
|
fs.closeSync(fd);
|
|
}
|
|
return;
|
|
}
|
|
|
|
async function commitCache(
|
|
httpClient: HttpClient,
|
|
cacheId: number,
|
|
filesize: number
|
|
): Promise<ITypedResponse<null>> {
|
|
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
|
return await httpClient.postJson<null>(
|
|
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
|
commitCacheRequest
|
|
);
|
|
}
|
|
|
|
export async function saveCache(
|
|
cacheId: number,
|
|
archivePath: string
|
|
): Promise<void> {
|
|
const httpClient = createHttpClient();
|
|
|
|
core.debug("Upload cache");
|
|
await uploadFile(httpClient, cacheId, archivePath);
|
|
|
|
// Commit Cache
|
|
core.debug("Commiting cache");
|
|
const cacheSize = utils.getArchiveFileSize(archivePath);
|
|
const commitCacheResponse = await commitCache(
|
|
httpClient,
|
|
cacheId,
|
|
cacheSize
|
|
);
|
|
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
|
throw new Error(
|
|
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
|
);
|
|
}
|
|
|
|
core.info("Cache saved successfully");
|
|
}
|