1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-30 15:59:14 +01:00

update for new beta release

This commit is contained in:
Sampark Sharma 2022-12-12 13:01:08 +00:00 committed by GitHub
parent 5a2b5e5714
commit a2137c625c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 112 additions and 103 deletions

View file

@ -46,3 +46,6 @@
### 3.1.0-beta.2 ### 3.1.0-beta.2
- Added support for fallback to gzip to restore old caches on windows. - Added support for fallback to gzip to restore old caches on windows.
### 3.1.0-beta.3
- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows.

95
dist/restore/index.js vendored
View file

@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) {
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 204) { if (response.statusCode === 204) {
// Cache not found
return null; return null;
} }
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.'); throw new Error('Cache not found.');
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheDownloadUrl);
@ -38034,7 +38036,7 @@ const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931); const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
// Function also mutates the args array. For non-mutation call with passing an empty array. // Returns tar path and type: BSD or GNU
function getTarPath() { function getTarPath() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
@ -38066,6 +38068,7 @@ function getTarPath() {
default: default:
break; break;
} }
// Default assumption is GNU tar is present in path
return { return {
path: yield io.which('tar', true), path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU type: constants_1.ArchiveToolType.GNU
@ -38079,6 +38082,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
const cacheFileName = utils.getCacheFileName(compressionMethod); const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar'; const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip && compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS; IS_WINDOWS;
@ -38116,8 +38120,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return args; return args;
}); });
} }
function getArgs(compressionMethod, type, archivePath = '') { // Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath(); const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create' const compressionArgs = type !== 'create'
@ -38127,11 +38133,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
compressionMethod !== constants_1.CompressionMethod.Gzip && compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS; IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') { if (BSD_TAR_ZSTD && type !== 'create') {
return [...compressionArgs, ...tarArgs].join(' '); args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
} }
else { else {
return [...tarArgs, ...compressionArgs].join(' '); args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
} }
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
}); });
} }
function getWorkingDirectory() { function getWorkingDirectory() {
@ -38154,8 +38164,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [ ? [
'zstd -d --long=30 -o', 'zstd -d --long=30 -o',
constants_1.TarFilename, constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&'
] ]
: [ : [
'--use-compress-program', '--use-compress-program',
@ -38166,8 +38175,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [ ? [
'zstd -d -o', 'zstd -d -o',
constants_1.TarFilename, constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&'
] ]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default: default:
@ -38175,6 +38183,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
} }
}); });
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0' // zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
@ -38190,7 +38199,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD return BSD_TAR_ZSTD
? [ ? [
'&&',
'zstd -T0 --long=30 -o', 'zstd -T0 --long=30 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename constants_1.TarFilename
@ -38202,7 +38210,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD return BSD_TAR_ZSTD
? [ ? [
'&&',
'zstd -T0 -o', 'zstd -T0 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename constants_1.TarFilename
@ -38213,44 +38220,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
} }
}); });
} }
function listTar(archivePath, compressionMethod) { // Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const args = yield getArgs(compressionMethod, 'list', archivePath); for (const command of commands) {
try { try {
yield exec_1.exec(args); yield exec_1.exec(command, undefined, { cwd });
} }
catch (error) { catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
} }
}); });
} }
// List the contents of a tar
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const commands = yield getCommands(compressionMethod, 'list', archivePath);
yield execCommands(commands);
});
}
exports.listTar = listTar; exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
const args = yield getArgs(compressionMethod, 'extract', archivePath); const commands = yield getCommands(compressionMethod, 'extract', archivePath);
try { yield execCommands(commands);
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}); });
} }
exports.extractTar = extractTar; exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) { function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits // Write source directories to manifest.txt to avoid command length limits
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const args = yield getArgs(compressionMethod, 'create'); const commands = yield getCommands(compressionMethod, 'create');
try { yield execCommands(commands, archiveFolder);
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
@ -47147,15 +47155,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
let compressionMethod = yield utils.getCompressionMethod(); let compressionMethod = yield utils.getCompressionMethod();
let archivePath = ''; let archivePath = '';
try { try {
try { // path are needed to compute version
// path are needed to compute version cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod
compressionMethod });
}); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
} // This is to support the old cache entry created by gzip on windows.
catch (error) {
// This is to support the old cache entry created
// by the old version of the cache action on windows.
if (process.platform === 'win32' && if (process.platform === 'win32' &&
compressionMethod !== constants_1.CompressionMethod.Gzip) { compressionMethod !== constants_1.CompressionMethod.Gzip) {
compressionMethod = constants_1.CompressionMethod.Gzip; compressionMethod = constants_1.CompressionMethod.Gzip;
@ -47163,17 +47168,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
compressionMethod compressionMethod
}); });
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
throw error; return undefined;
} }
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
} }
else { else {
throw error; // Cache not found
return undefined;
} }
} }
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`); core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry // Download the cache from the cache entry

95
dist/save/index.js vendored
View file

@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) {
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 204) { if (response.statusCode === 204) {
// Cache not found
return null; return null;
} }
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.'); throw new Error('Cache not found.');
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheDownloadUrl);
@ -38034,7 +38036,7 @@ const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931); const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
// Function also mutates the args array. For non-mutation call with passing an empty array. // Returns tar path and type: BSD or GNU
function getTarPath() { function getTarPath() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
@ -38066,6 +38068,7 @@ function getTarPath() {
default: default:
break; break;
} }
// Default assumption is GNU tar is present in path
return { return {
path: yield io.which('tar', true), path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU type: constants_1.ArchiveToolType.GNU
@ -38079,6 +38082,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
const cacheFileName = utils.getCacheFileName(compressionMethod); const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar'; const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip && compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS; IS_WINDOWS;
@ -38116,8 +38120,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return args; return args;
}); });
} }
function getArgs(compressionMethod, type, archivePath = '') { // Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath(); const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create' const compressionArgs = type !== 'create'
@ -38127,11 +38133,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
compressionMethod !== constants_1.CompressionMethod.Gzip && compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS; IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') { if (BSD_TAR_ZSTD && type !== 'create') {
return [...compressionArgs, ...tarArgs].join(' '); args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
} }
else { else {
return [...tarArgs, ...compressionArgs].join(' '); args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
} }
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
}); });
} }
function getWorkingDirectory() { function getWorkingDirectory() {
@ -38154,8 +38164,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [ ? [
'zstd -d --long=30 -o', 'zstd -d --long=30 -o',
constants_1.TarFilename, constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&'
] ]
: [ : [
'--use-compress-program', '--use-compress-program',
@ -38166,8 +38175,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [ ? [
'zstd -d -o', 'zstd -d -o',
constants_1.TarFilename, constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&'
] ]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default: default:
@ -38175,6 +38183,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
} }
}); });
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0' // zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
@ -38190,7 +38199,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD return BSD_TAR_ZSTD
? [ ? [
'&&',
'zstd -T0 --long=30 -o', 'zstd -T0 --long=30 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename constants_1.TarFilename
@ -38202,7 +38210,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD return BSD_TAR_ZSTD
? [ ? [
'&&',
'zstd -T0 -o', 'zstd -T0 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename constants_1.TarFilename
@ -38213,44 +38220,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
} }
}); });
} }
function listTar(archivePath, compressionMethod) { // Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const args = yield getArgs(compressionMethod, 'list', archivePath); for (const command of commands) {
try { try {
yield exec_1.exec(args); yield exec_1.exec(command, undefined, { cwd });
} }
catch (error) { catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
} }
}); });
} }
// List the contents of a tar
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const commands = yield getCommands(compressionMethod, 'list', archivePath);
yield execCommands(commands);
});
}
exports.listTar = listTar; exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
const args = yield getArgs(compressionMethod, 'extract', archivePath); const commands = yield getCommands(compressionMethod, 'extract', archivePath);
try { yield execCommands(commands);
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}); });
} }
exports.extractTar = extractTar; exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) { function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits // Write source directories to manifest.txt to avoid command length limits
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const args = yield getArgs(compressionMethod, 'create'); const commands = yield getCommands(compressionMethod, 'create');
try { yield execCommands(commands, archiveFolder);
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
@ -47233,15 +47241,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
let compressionMethod = yield utils.getCompressionMethod(); let compressionMethod = yield utils.getCompressionMethod();
let archivePath = ''; let archivePath = '';
try { try {
try { // path are needed to compute version
// path are needed to compute version cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod
compressionMethod });
}); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
} // This is to support the old cache entry created by gzip on windows.
catch (error) {
// This is to support the old cache entry created
// by the old version of the cache action on windows.
if (process.platform === 'win32' && if (process.platform === 'win32' &&
compressionMethod !== constants_1.CompressionMethod.Gzip) { compressionMethod !== constants_1.CompressionMethod.Gzip) {
compressionMethod = constants_1.CompressionMethod.Gzip; compressionMethod = constants_1.CompressionMethod.Gzip;
@ -47249,17 +47254,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
compressionMethod compressionMethod
}); });
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
throw error; return undefined;
} }
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
} }
else { else {
throw error; // Cache not found
return undefined;
} }
} }
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`); core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry // Download the cache from the cache entry

18
package-lock.json generated
View file

@ -1,15 +1,15 @@
{ {
"name": "cache", "name": "cache",
"version": "3.1.0-beta.2", "version": "3.1.0-beta.3",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "cache", "name": "cache",
"version": "3.1.0-beta.2", "version": "3.1.0-beta.3",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "3.1.0-beta.2", "@actions/cache": "3.1.0-beta.3",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2" "@actions/io": "^1.1.2"
@ -36,9 +36,9 @@
} }
}, },
"node_modules/@actions/cache": { "node_modules/@actions/cache": {
"version": "3.1.0-beta.2", "version": "3.1.0-beta.3",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz",
"integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==", "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
@ -9722,9 +9722,9 @@
}, },
"dependencies": { "dependencies": {
"@actions/cache": { "@actions/cache": {
"version": "3.1.0-beta.2", "version": "3.1.0-beta.3",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz",
"integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==", "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==",
"requires": { "requires": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",

View file

@ -1,6 +1,6 @@
{ {
"name": "cache", "name": "cache",
"version": "3.1.0-beta.2", "version": "3.1.0-beta.3",
"private": true, "private": true,
"description": "Cache dependencies and build outputs", "description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js", "main": "dist/restore/index.js",
@ -23,7 +23,7 @@
"author": "GitHub", "author": "GitHub",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "3.1.0-beta.2", "@actions/cache": "3.1.0-beta.3",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2" "@actions/io": "^1.1.2"