1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2025-04-22 12:46:17 +02:00

Setup GCS as primary cache option. Fallback to GH if GCS not setup

Basically, do exactly what GH did to save and restore cache with the
exception being that the files are stored on GCS.
This commit is contained in:
Dany Sam 2025-04-10 22:55:13 +05:30
parent 5a3ec84eff
commit 6ec565b197
10 changed files with 1063 additions and 7448 deletions

View file

@ -26,6 +26,13 @@ inputs:
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache' description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
default: 'false' default: 'false'
required: false required: false
gcs-bucket:
description: 'Google Cloud Storage bucket name to use for caching. When provided, GCS will be used as the cache backend.'
required: false
gcs-path-prefix:
description: 'Optional prefix path within the GCS bucket for cache files'
required: false
default: 'github-cache'
save-always: save-always:
description: 'Run the post step to save the cache even if another step before fails' description: 'Run the post step to save the cache even if another step before fails'
default: 'false' default: 'false'

8202
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -26,7 +26,8 @@
"@actions/cache": "^4.0.3", "@actions/cache": "^4.0.3",
"@actions/core": "^1.11.1", "@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.3" "@actions/io": "^1.1.3",
"@google-cloud/storage": "^7.16.0"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^27.5.2", "@types/jest": "^27.5.2",
@ -46,6 +47,6 @@
"nock": "^13.2.9", "nock": "^13.2.9",
"prettier": "^2.8.0", "prettier": "^2.8.0",
"ts-jest": "^28.0.8", "ts-jest": "^28.0.8",
"typescript": "^4.9.3" "typescript": "^5.8.3"
} }
} }

View file

@ -23,6 +23,13 @@ inputs:
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache' description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
default: 'false' default: 'false'
required: false required: false
gcs-bucket:
description: 'Google Cloud Storage bucket name to use for caching. When provided, GCS will be used as the cache backend.'
required: false
gcs-path-prefix:
description: 'Optional prefix path within the GCS bucket for cache files'
default: 'github-cache'
required: false
outputs: outputs:
cache-hit: cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key' description: 'A boolean value to indicate an exact match was found for the primary key'

View file

@ -15,6 +15,13 @@ inputs:
description: 'An optional boolean when enabled, allows windows runners to save caches that can be restored on other platforms' description: 'An optional boolean when enabled, allows windows runners to save caches that can be restored on other platforms'
default: 'false' default: 'false'
required: false required: false
gcs-bucket:
description: 'Google Cloud Storage bucket name to use for caching. When provided, GCS will be used as the cache backend.'
required: false
gcs-path-prefix:
description: 'Optional prefix path within the GCS bucket for cache files'
default: 'github-cache'
required: false
runs: runs:
using: 'node20' using: 'node20'
main: '../dist/save-only/index.js' main: '../dist/save-only/index.js'

View file

@ -5,7 +5,9 @@ export enum Inputs {
UploadChunkSize = "upload-chunk-size", // Input for cache, save action UploadChunkSize = "upload-chunk-size", // Input for cache, save action
EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action
FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action
LookupOnly = "lookup-only" // Input for cache, restore action LookupOnly = "lookup-only", // Input for cache, restore action
GCSBucket = "gcs-bucket", // Input for cache, restore, save action
GCSPathPrefix = "gcs-path-prefix" // Input for cache, restore, save action
} }
export enum Outputs { export enum Outputs {

View file

@ -1,4 +1,3 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import { Events, Inputs, Outputs, State } from "./constants"; import { Events, Inputs, Outputs, State } from "./constants";
@ -8,6 +7,7 @@ import {
StateProvider StateProvider
} from "./stateProvider"; } from "./stateProvider";
import * as utils from "./utils/actionUtils"; import * as utils from "./utils/actionUtils";
import * as cache from "./utils/gcsCache";
export async function restoreImpl( export async function restoreImpl(
stateProvider: IStateProvider, stateProvider: IStateProvider,

View file

@ -1,4 +1,3 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import { Events, Inputs, State } from "./constants"; import { Events, Inputs, State } from "./constants";
@ -8,6 +7,7 @@ import {
StateProvider StateProvider
} from "./stateProvider"; } from "./stateProvider";
import * as utils from "./utils/actionUtils"; import * as utils from "./utils/actionUtils";
import * as cache from "./utils/gcsCache";
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in // Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to // @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to

View file

@ -1,7 +1,7 @@
import * as cache from "@actions/cache"; import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import { RefKey } from "../constants"; import { Inputs, RefKey } from "../constants";
export function isGhes(): boolean { export function isGhes(): boolean {
const ghUrl = new URL( const ghUrl = new URL(
@ -66,7 +66,29 @@ export function getInputAsBool(
return result.toLowerCase() === "true"; return result.toLowerCase() === "true";
} }
// Check if GCS is configured and available
export function isGCSAvailable(): boolean {
try {
const bucket = core.getInput(Inputs.GCSBucket);
if (!bucket) {
core.info("GCS bucket name not provided, falling back to GitHub cache");
return false;
}
return true;
} catch (error) {
logWarning(`Failed to check GCS availability: ${(error as Error).message}`);
return false;
}
}
export function isCacheFeatureAvailable(): boolean { export function isCacheFeatureAvailable(): boolean {
// Check if GCS cache is available
if (isGCSAvailable()) {
return true;
}
// Otherwise, check GitHub cache
if (cache.isFeatureAvailable()) { if (cache.isFeatureAvailable()) {
return true; return true;
} }

251
src/utils/gcsCache.ts Normal file
View file

@ -0,0 +1,251 @@
import * as core from "@actions/core";
import * as path from "path";
import { Inputs } from "../constants";
import { isGCSAvailable } from "./actionUtils";
import * as utils from "@actions/cache/lib/internal/cacheUtils"
import { Storage } from "@google-cloud/storage";
import * as cache from "@actions/cache";
import { DownloadOptions, UploadOptions } from '@actions/cache/lib/options'
import { createTar, extractTar, listTar } from "@actions/cache/lib/internal/tar"
import { CompressionMethod } from "@actions/cache/lib/internal/constants";
const DEFAULT_PATH_PREFIX = "github-cache"
// Function to initialize GCS client using Application Default Credentials
function getGCSClient(): Storage | null {
try {
core.info("Initializing GCS client");
return new Storage();
} catch (error) {
core.warning(`Failed to initialize GCS client: ${(error as Error).message}`);
return null;
}
}
export async function restoreCache(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive?: boolean
): Promise<string | undefined> {
// Check if GCS is available
if (isGCSAvailable()) {
try {
const result = await restoreFromGCS(
paths,
primaryKey,
restoreKeys,
options
);
if (result) {
core.info(`Cache restored from GCS with key: ${result}`);
return result;
}
core.info("Cache not found in GCS, falling back to GitHub cache");
} catch (error) {
core.warning(`Failed to restore from GCS: ${(error as Error).message}`);
core.info("Falling back to GitHub cache");
}
}
// Fall back to GitHub cache
return await cache.restoreCache(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
);
}
export async function saveCache(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive?: boolean
): Promise<number> {
if (isGCSAvailable()) {
try {
const result = await saveToGCS(paths, key);
if (result) {
core.info(`Cache saved to GCS with key: ${key}`);
return result; // Success ID
}
core.warning("Failed to save to GCS, falling back to GitHub cache");
} catch (error) {
core.warning(`Failed to save to GCS: ${(error as Error).message}`);
core.info("Falling back to GitHub cache");
}
}
// Fall back to GitHub cache
return await cache.saveCache(
paths,
key,
options,
enableCrossOsArchive
);
}
// Function that checks if the cache feature is available (either GCS or GitHub cache)
export function isFeatureAvailable(): boolean {
return isGCSAvailable() || cache.isFeatureAvailable();
}
async function restoreFromGCS(
_paths: string[], // validate paths?
primaryKey: string,
restoreKeys: string[] = [],
options?: DownloadOptions
): Promise<string | undefined> {
const storage = getGCSClient();
if (!storage) {
return undefined;
}
const bucket = core.getInput(Inputs.GCSBucket);
const pathPrefix = core.getInput(Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX;
const compressionMethod = await utils.getCompressionMethod()
const archiveFolder = await utils.createTempDirectory()
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
)
const keys = [primaryKey, ...restoreKeys]
const gcsPath = await findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod)
if (!gcsPath) {
core.info(`No matching cache found`)
return undefined;
}
// If lookup only, just return the key
if (options?.lookupOnly) {
core.info(`Cache found in GCS with key: ${gcsPath}`);
return gcsPath;
}
try {
core.info(`Downloading from GCS: ${bucket}/${gcsPath}`);
const file = storage.bucket(bucket).file(gcsPath);
await file.download({ destination: archivePath });
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')
return gcsPath;
} catch (error) {
core.warning(`Failed to restore: ${(error as Error).message}`)
} finally {
try {
await utils.unlinkFile(archivePath)
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
}
function getGCSPath(pathPrefix: any, key: any, compressionMethod: CompressionMethod) {
return `${pathPrefix}/${key}.${utils.getCacheFileName(compressionMethod)}`;
}
async function saveToGCS(
paths: string[],
key: string
): Promise<number> {
let cacheId = -1
const storage = getGCSClient();
if (!storage) {
return cacheId;
}
const bucket = core.getInput(Inputs.GCSBucket);
const pathPrefix = core.getInput(Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX;
const compressionMethod = await utils.getCompressionMethod()
const cachePaths = await utils.resolvePaths(paths)
core.debug('Cache Paths:')
core.debug(`${JSON.stringify(cachePaths)}`)
if (cachePaths.length === 0) {
throw new Error(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
)
}
const archiveFolder = await utils.createTempDirectory()
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive Path: ${archivePath}`)
try {
await createTar(archiveFolder, cachePaths, compressionMethod)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const gcsPath = getGCSPath(pathPrefix, key, compressionMethod)
core.info(`Uploading to GCS: ${bucket}/${gcsPath}`);
await storage.bucket(bucket).upload(archivePath, {
destination: gcsPath,
resumable: true, // this may not be necessary
});
return 1;
} catch (error) {
core.warning(`Error creating or uploading cache: ${(error as Error).message}`);
return -1;
} finally {
try {
await utils.unlinkFile(archivePath)
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
}
async function findFileOnGCS(
storage: Storage,
bucket: string,
pathPrefix: string,
keys: string[],
compressionMethod: CompressionMethod,
): Promise<string | undefined> {
for (const key of keys) {
const gcsPath = getGCSPath(pathPrefix, key, compressionMethod)
if (await checkFileExists(storage, bucket, gcsPath)) {
core.info(`Found file on bucket: ${bucket} with key: ${gcsPath}`)
return gcsPath
}
}
return undefined
}
async function checkFileExists(storage: Storage, bucket: string, path: string): Promise<boolean> {
const [exists] = await storage.bucket(bucket).file(path).exists()
return exists
}