mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-30 15:59:14 +01:00
Add tests for restoreCacheV2
This commit is contained in:
parent
0023caa23c
commit
57eada13e4
4 changed files with 644 additions and 832 deletions
369
dist/restore-only/index.js
vendored
369
dist/restore-only/index.js
vendored
|
@ -5954,8 +5954,9 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
|
||||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||||
|
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||||
|
checkPaths(paths);
|
||||||
switch (cacheServiceVersion) {
|
switch (cacheServiceVersion) {
|
||||||
case 'v2':
|
case 'v2':
|
||||||
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||||
|
@ -6078,12 +6079,13 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.info(`Cache hit for: ${request.key}`);
|
core.info(`Cache hit for: ${request.key}`);
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download');
|
core.info('Lookup only - skipping download');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive path: ${archivePath}`);
|
core.debug(`Archive path: ${archivePath}`);
|
||||||
core.debug(`Starting download of archive to: ${archivePath}`);
|
core.debug(`Starting download of archive to: ${archivePath}`);
|
||||||
yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
const downloadResponse = yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
||||||
|
core.debug(`Download response status: ${downloadResponse._response.status}`);
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -6091,10 +6093,17 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
}
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Failed to restore: ${error.message}`);
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
|
@ -6106,6 +6115,7 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -6269,7 +6279,15 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const typedError = error;
|
const typedError = error;
|
||||||
core.warning(`Failed to save: ${typedError.message}`);
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -6436,7 +6454,7 @@ exports.Timestamp = new Timestamp$Type();
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.CacheService = exports.LookupCacheEntryResponse_CacheEntry = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse_CacheEntry = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
|
@ -6446,7 +6464,7 @@ const runtime_2 = __nccwpck_require__(3503);
|
||||||
const runtime_3 = __nccwpck_require__(3503);
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
const runtime_4 = __nccwpck_require__(3503);
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
const runtime_5 = __nccwpck_require__(3503);
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
const timestamp_1 = __nccwpck_require__(8983);
|
const cacheentry_1 = __nccwpck_require__(1309);
|
||||||
const cachemetadata_1 = __nccwpck_require__(2773);
|
const cachemetadata_1 = __nccwpck_require__(2773);
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
|
@ -6758,11 +6776,12 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
const message = { ok: false, signedDownloadUrl: "" };
|
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
if (value !== undefined)
|
if (value !== undefined)
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
@ -6779,6 +6798,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
case /* string signed_download_url */ 2:
|
case /* string signed_download_url */ 2:
|
||||||
message.signedDownloadUrl = reader.string();
|
message.signedDownloadUrl = reader.string();
|
||||||
break;
|
break;
|
||||||
|
case /* string matched_key */ 3:
|
||||||
|
message.matchedKey = reader.string();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -6797,6 +6819,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
/* string signed_download_url = 2; */
|
/* string signed_download_url = 2; */
|
||||||
if (message.signedDownloadUrl !== "")
|
if (message.signedDownloadUrl !== "")
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||||
|
/* string matched_key = 3; */
|
||||||
|
if (message.matchedKey !== "")
|
||||||
|
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -6980,7 +7005,7 @@ exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||||
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => exports.ListCacheEntriesResponse_CacheEntry }
|
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -6995,8 +7020,8 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
while (reader.pos < end) {
|
while (reader.pos < end) {
|
||||||
let [fieldNo, wireType] = reader.tag();
|
let [fieldNo, wireType] = reader.tag();
|
||||||
switch (fieldNo) {
|
switch (fieldNo) {
|
||||||
case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1:
|
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||||
message.entries.push(exports.ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
|
@ -7010,9 +7035,9 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
return message;
|
return message;
|
||||||
}
|
}
|
||||||
internalBinaryWrite(message, writer, options) {
|
internalBinaryWrite(message, writer, options) {
|
||||||
/* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */
|
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||||
for (let i = 0; i < message.entries.length; i++)
|
for (let i = 0; i < message.entries.length; i++)
|
||||||
exports.ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7024,102 +7049,6 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
*/
|
*/
|
||||||
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class ListCacheEntriesResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type();
|
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||||
|
@ -7191,7 +7120,8 @@ exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||||
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -7209,6 +7139,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
case /* bool exists */ 1:
|
case /* bool exists */ 1:
|
||||||
message.exists = reader.bool();
|
message.exists = reader.bool();
|
||||||
break;
|
break;
|
||||||
|
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||||
|
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -7224,6 +7157,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
/* bool exists = 1; */
|
/* bool exists = 1; */
|
||||||
if (message.exists !== false)
|
if (message.exists !== false)
|
||||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
||||||
|
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||||
|
if (message.entry)
|
||||||
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7234,102 +7170,6 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||||
*/
|
*/
|
||||||
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type();
|
|
||||||
/**
|
/**
|
||||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||||
*/
|
*/
|
||||||
|
@ -7954,6 +7794,119 @@ function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, intercep
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 1309:
|
||||||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.CacheEntry = void 0;
|
||||||
|
const runtime_1 = __nccwpck_require__(3503);
|
||||||
|
const runtime_2 = __nccwpck_require__(3503);
|
||||||
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
|
const timestamp_1 = __nccwpck_require__(8983);
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class CacheEntry$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.entities.v1.CacheEntry", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
|
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string hash */ 2:
|
||||||
|
message.hash = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size_bytes */ 3:
|
||||||
|
message.sizeBytes = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
case /* string scope */ 4:
|
||||||
|
message.scope = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string version */ 5:
|
||||||
|
message.version = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||||
|
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string hash = 2; */
|
||||||
|
if (message.hash !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
||||||
|
/* int64 size_bytes = 3; */
|
||||||
|
if (message.sizeBytes !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||||
|
/* string scope = 4; */
|
||||||
|
if (message.scope !== "")
|
||||||
|
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||||
|
/* string version = 5; */
|
||||||
|
if (message.version !== "")
|
||||||
|
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||||
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
|
if (message.createdAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||||
|
if (message.lastAccessedAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp expires_at = 8; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||||
|
*/
|
||||||
|
exports.CacheEntry = new CacheEntry$Type();
|
||||||
|
//# sourceMappingURL=cacheentry.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2773:
|
/***/ 2773:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
|
369
dist/restore/index.js
vendored
369
dist/restore/index.js
vendored
|
@ -5954,8 +5954,9 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
|
||||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||||
|
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||||
|
checkPaths(paths);
|
||||||
switch (cacheServiceVersion) {
|
switch (cacheServiceVersion) {
|
||||||
case 'v2':
|
case 'v2':
|
||||||
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||||
|
@ -6078,12 +6079,13 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.info(`Cache hit for: ${request.key}`);
|
core.info(`Cache hit for: ${request.key}`);
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download');
|
core.info('Lookup only - skipping download');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive path: ${archivePath}`);
|
core.debug(`Archive path: ${archivePath}`);
|
||||||
core.debug(`Starting download of archive to: ${archivePath}`);
|
core.debug(`Starting download of archive to: ${archivePath}`);
|
||||||
yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
const downloadResponse = yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
||||||
|
core.debug(`Download response status: ${downloadResponse._response.status}`);
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -6091,10 +6093,17 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
}
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Failed to restore: ${error.message}`);
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
|
@ -6106,6 +6115,7 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -6269,7 +6279,15 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const typedError = error;
|
const typedError = error;
|
||||||
core.warning(`Failed to save: ${typedError.message}`);
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -6436,7 +6454,7 @@ exports.Timestamp = new Timestamp$Type();
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.CacheService = exports.LookupCacheEntryResponse_CacheEntry = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse_CacheEntry = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
|
@ -6446,7 +6464,7 @@ const runtime_2 = __nccwpck_require__(3503);
|
||||||
const runtime_3 = __nccwpck_require__(3503);
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
const runtime_4 = __nccwpck_require__(3503);
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
const runtime_5 = __nccwpck_require__(3503);
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
const timestamp_1 = __nccwpck_require__(8983);
|
const cacheentry_1 = __nccwpck_require__(1309);
|
||||||
const cachemetadata_1 = __nccwpck_require__(2773);
|
const cachemetadata_1 = __nccwpck_require__(2773);
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
|
@ -6758,11 +6776,12 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
const message = { ok: false, signedDownloadUrl: "" };
|
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
if (value !== undefined)
|
if (value !== undefined)
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
@ -6779,6 +6798,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
case /* string signed_download_url */ 2:
|
case /* string signed_download_url */ 2:
|
||||||
message.signedDownloadUrl = reader.string();
|
message.signedDownloadUrl = reader.string();
|
||||||
break;
|
break;
|
||||||
|
case /* string matched_key */ 3:
|
||||||
|
message.matchedKey = reader.string();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -6797,6 +6819,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
/* string signed_download_url = 2; */
|
/* string signed_download_url = 2; */
|
||||||
if (message.signedDownloadUrl !== "")
|
if (message.signedDownloadUrl !== "")
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||||
|
/* string matched_key = 3; */
|
||||||
|
if (message.matchedKey !== "")
|
||||||
|
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -6980,7 +7005,7 @@ exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||||
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => exports.ListCacheEntriesResponse_CacheEntry }
|
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -6995,8 +7020,8 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
while (reader.pos < end) {
|
while (reader.pos < end) {
|
||||||
let [fieldNo, wireType] = reader.tag();
|
let [fieldNo, wireType] = reader.tag();
|
||||||
switch (fieldNo) {
|
switch (fieldNo) {
|
||||||
case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1:
|
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||||
message.entries.push(exports.ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
|
@ -7010,9 +7035,9 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
return message;
|
return message;
|
||||||
}
|
}
|
||||||
internalBinaryWrite(message, writer, options) {
|
internalBinaryWrite(message, writer, options) {
|
||||||
/* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */
|
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||||
for (let i = 0; i < message.entries.length; i++)
|
for (let i = 0; i < message.entries.length; i++)
|
||||||
exports.ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7024,102 +7049,6 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
*/
|
*/
|
||||||
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class ListCacheEntriesResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type();
|
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||||
|
@ -7191,7 +7120,8 @@ exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||||
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -7209,6 +7139,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
case /* bool exists */ 1:
|
case /* bool exists */ 1:
|
||||||
message.exists = reader.bool();
|
message.exists = reader.bool();
|
||||||
break;
|
break;
|
||||||
|
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||||
|
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -7224,6 +7157,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
/* bool exists = 1; */
|
/* bool exists = 1; */
|
||||||
if (message.exists !== false)
|
if (message.exists !== false)
|
||||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
||||||
|
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||||
|
if (message.entry)
|
||||||
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7234,102 +7170,6 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||||
*/
|
*/
|
||||||
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type();
|
|
||||||
/**
|
/**
|
||||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||||
*/
|
*/
|
||||||
|
@ -7954,6 +7794,119 @@ function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, intercep
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 1309:
|
||||||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.CacheEntry = void 0;
|
||||||
|
const runtime_1 = __nccwpck_require__(3503);
|
||||||
|
const runtime_2 = __nccwpck_require__(3503);
|
||||||
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
|
const timestamp_1 = __nccwpck_require__(8983);
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class CacheEntry$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.entities.v1.CacheEntry", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
|
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string hash */ 2:
|
||||||
|
message.hash = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size_bytes */ 3:
|
||||||
|
message.sizeBytes = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
case /* string scope */ 4:
|
||||||
|
message.scope = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string version */ 5:
|
||||||
|
message.version = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||||
|
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string hash = 2; */
|
||||||
|
if (message.hash !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
||||||
|
/* int64 size_bytes = 3; */
|
||||||
|
if (message.sizeBytes !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||||
|
/* string scope = 4; */
|
||||||
|
if (message.scope !== "")
|
||||||
|
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||||
|
/* string version = 5; */
|
||||||
|
if (message.version !== "")
|
||||||
|
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||||
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
|
if (message.createdAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||||
|
if (message.lastAccessedAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp expires_at = 8; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||||
|
*/
|
||||||
|
exports.CacheEntry = new CacheEntry$Type();
|
||||||
|
//# sourceMappingURL=cacheentry.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2773:
|
/***/ 2773:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
|
369
dist/save-only/index.js
vendored
369
dist/save-only/index.js
vendored
|
@ -5954,8 +5954,9 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
|
||||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||||
|
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||||
|
checkPaths(paths);
|
||||||
switch (cacheServiceVersion) {
|
switch (cacheServiceVersion) {
|
||||||
case 'v2':
|
case 'v2':
|
||||||
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||||
|
@ -6078,12 +6079,13 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.info(`Cache hit for: ${request.key}`);
|
core.info(`Cache hit for: ${request.key}`);
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download');
|
core.info('Lookup only - skipping download');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive path: ${archivePath}`);
|
core.debug(`Archive path: ${archivePath}`);
|
||||||
core.debug(`Starting download of archive to: ${archivePath}`);
|
core.debug(`Starting download of archive to: ${archivePath}`);
|
||||||
yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
const downloadResponse = yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
||||||
|
core.debug(`Download response status: ${downloadResponse._response.status}`);
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -6091,10 +6093,17 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
}
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Failed to restore: ${error.message}`);
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
|
@ -6106,6 +6115,7 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -6269,7 +6279,15 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const typedError = error;
|
const typedError = error;
|
||||||
core.warning(`Failed to save: ${typedError.message}`);
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -6436,7 +6454,7 @@ exports.Timestamp = new Timestamp$Type();
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.CacheService = exports.LookupCacheEntryResponse_CacheEntry = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse_CacheEntry = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
|
@ -6446,7 +6464,7 @@ const runtime_2 = __nccwpck_require__(3503);
|
||||||
const runtime_3 = __nccwpck_require__(3503);
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
const runtime_4 = __nccwpck_require__(3503);
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
const runtime_5 = __nccwpck_require__(3503);
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
const timestamp_1 = __nccwpck_require__(8983);
|
const cacheentry_1 = __nccwpck_require__(1309);
|
||||||
const cachemetadata_1 = __nccwpck_require__(2773);
|
const cachemetadata_1 = __nccwpck_require__(2773);
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
|
@ -6758,11 +6776,12 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
const message = { ok: false, signedDownloadUrl: "" };
|
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
if (value !== undefined)
|
if (value !== undefined)
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
@ -6779,6 +6798,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
case /* string signed_download_url */ 2:
|
case /* string signed_download_url */ 2:
|
||||||
message.signedDownloadUrl = reader.string();
|
message.signedDownloadUrl = reader.string();
|
||||||
break;
|
break;
|
||||||
|
case /* string matched_key */ 3:
|
||||||
|
message.matchedKey = reader.string();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -6797,6 +6819,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
/* string signed_download_url = 2; */
|
/* string signed_download_url = 2; */
|
||||||
if (message.signedDownloadUrl !== "")
|
if (message.signedDownloadUrl !== "")
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||||
|
/* string matched_key = 3; */
|
||||||
|
if (message.matchedKey !== "")
|
||||||
|
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -6980,7 +7005,7 @@ exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||||
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => exports.ListCacheEntriesResponse_CacheEntry }
|
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -6995,8 +7020,8 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
while (reader.pos < end) {
|
while (reader.pos < end) {
|
||||||
let [fieldNo, wireType] = reader.tag();
|
let [fieldNo, wireType] = reader.tag();
|
||||||
switch (fieldNo) {
|
switch (fieldNo) {
|
||||||
case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1:
|
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||||
message.entries.push(exports.ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
|
@ -7010,9 +7035,9 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
return message;
|
return message;
|
||||||
}
|
}
|
||||||
internalBinaryWrite(message, writer, options) {
|
internalBinaryWrite(message, writer, options) {
|
||||||
/* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */
|
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||||
for (let i = 0; i < message.entries.length; i++)
|
for (let i = 0; i < message.entries.length; i++)
|
||||||
exports.ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7024,102 +7049,6 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
*/
|
*/
|
||||||
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class ListCacheEntriesResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type();
|
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||||
|
@ -7191,7 +7120,8 @@ exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||||
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -7209,6 +7139,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
case /* bool exists */ 1:
|
case /* bool exists */ 1:
|
||||||
message.exists = reader.bool();
|
message.exists = reader.bool();
|
||||||
break;
|
break;
|
||||||
|
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||||
|
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -7224,6 +7157,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
/* bool exists = 1; */
|
/* bool exists = 1; */
|
||||||
if (message.exists !== false)
|
if (message.exists !== false)
|
||||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
||||||
|
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||||
|
if (message.entry)
|
||||||
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7234,102 +7170,6 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||||
*/
|
*/
|
||||||
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type();
|
|
||||||
/**
|
/**
|
||||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||||
*/
|
*/
|
||||||
|
@ -7954,6 +7794,119 @@ function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, intercep
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 1309:
|
||||||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.CacheEntry = void 0;
|
||||||
|
const runtime_1 = __nccwpck_require__(3503);
|
||||||
|
const runtime_2 = __nccwpck_require__(3503);
|
||||||
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
|
const timestamp_1 = __nccwpck_require__(8983);
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class CacheEntry$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.entities.v1.CacheEntry", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
|
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string hash */ 2:
|
||||||
|
message.hash = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size_bytes */ 3:
|
||||||
|
message.sizeBytes = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
case /* string scope */ 4:
|
||||||
|
message.scope = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string version */ 5:
|
||||||
|
message.version = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||||
|
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string hash = 2; */
|
||||||
|
if (message.hash !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
||||||
|
/* int64 size_bytes = 3; */
|
||||||
|
if (message.sizeBytes !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||||
|
/* string scope = 4; */
|
||||||
|
if (message.scope !== "")
|
||||||
|
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||||
|
/* string version = 5; */
|
||||||
|
if (message.version !== "")
|
||||||
|
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||||
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
|
if (message.createdAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||||
|
if (message.lastAccessedAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp expires_at = 8; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||||
|
*/
|
||||||
|
exports.CacheEntry = new CacheEntry$Type();
|
||||||
|
//# sourceMappingURL=cacheentry.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2773:
|
/***/ 2773:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
|
369
dist/save/index.js
vendored
369
dist/save/index.js
vendored
|
@ -5954,8 +5954,9 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
|
||||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||||
|
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||||
|
checkPaths(paths);
|
||||||
switch (cacheServiceVersion) {
|
switch (cacheServiceVersion) {
|
||||||
case 'v2':
|
case 'v2':
|
||||||
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||||
|
@ -6078,12 +6079,13 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.info(`Cache hit for: ${request.key}`);
|
core.info(`Cache hit for: ${request.key}`);
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download');
|
core.info('Lookup only - skipping download');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive path: ${archivePath}`);
|
core.debug(`Archive path: ${archivePath}`);
|
||||||
core.debug(`Starting download of archive to: ${archivePath}`);
|
core.debug(`Starting download of archive to: ${archivePath}`);
|
||||||
yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
const downloadResponse = yield (0, download_cache_1.downloadCacheFile)(response.signedDownloadUrl, archivePath);
|
||||||
|
core.debug(`Download response status: ${downloadResponse._response.status}`);
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -6091,10 +6093,17 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
}
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
return request.key;
|
return response.matchedKey;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Failed to restore: ${error.message}`);
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
|
@ -6106,6 +6115,7 @@ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -6269,7 +6279,15 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const typedError = error;
|
const typedError = error;
|
||||||
core.warning(`Failed to save: ${typedError.message}`);
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -6436,7 +6454,7 @@ exports.Timestamp = new Timestamp$Type();
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.CacheService = exports.LookupCacheEntryResponse_CacheEntry = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse_CacheEntry = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
|
@ -6446,7 +6464,7 @@ const runtime_2 = __nccwpck_require__(3503);
|
||||||
const runtime_3 = __nccwpck_require__(3503);
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
const runtime_4 = __nccwpck_require__(3503);
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
const runtime_5 = __nccwpck_require__(3503);
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
const timestamp_1 = __nccwpck_require__(8983);
|
const cacheentry_1 = __nccwpck_require__(1309);
|
||||||
const cachemetadata_1 = __nccwpck_require__(2773);
|
const cachemetadata_1 = __nccwpck_require__(2773);
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
|
@ -6758,11 +6776,12 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
const message = { ok: false, signedDownloadUrl: "" };
|
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
if (value !== undefined)
|
if (value !== undefined)
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
@ -6779,6 +6798,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
case /* string signed_download_url */ 2:
|
case /* string signed_download_url */ 2:
|
||||||
message.signedDownloadUrl = reader.string();
|
message.signedDownloadUrl = reader.string();
|
||||||
break;
|
break;
|
||||||
|
case /* string matched_key */ 3:
|
||||||
|
message.matchedKey = reader.string();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -6797,6 +6819,9 @@ class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||||
/* string signed_download_url = 2; */
|
/* string signed_download_url = 2; */
|
||||||
if (message.signedDownloadUrl !== "")
|
if (message.signedDownloadUrl !== "")
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||||
|
/* string matched_key = 3; */
|
||||||
|
if (message.matchedKey !== "")
|
||||||
|
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -6980,7 +7005,7 @@ exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||||
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => exports.ListCacheEntriesResponse_CacheEntry }
|
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -6995,8 +7020,8 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
while (reader.pos < end) {
|
while (reader.pos < end) {
|
||||||
let [fieldNo, wireType] = reader.tag();
|
let [fieldNo, wireType] = reader.tag();
|
||||||
switch (fieldNo) {
|
switch (fieldNo) {
|
||||||
case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1:
|
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||||
message.entries.push(exports.ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
|
@ -7010,9 +7035,9 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
return message;
|
return message;
|
||||||
}
|
}
|
||||||
internalBinaryWrite(message, writer, options) {
|
internalBinaryWrite(message, writer, options) {
|
||||||
/* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */
|
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||||
for (let i = 0; i < message.entries.length; i++)
|
for (let i = 0; i < message.entries.length; i++)
|
||||||
exports.ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7024,102 +7049,6 @@ class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||||
*/
|
*/
|
||||||
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class ListCacheEntriesResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type();
|
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||||
|
@ -7191,7 +7120,8 @@ exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||||
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -7209,6 +7139,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
case /* bool exists */ 1:
|
case /* bool exists */ 1:
|
||||||
message.exists = reader.bool();
|
message.exists = reader.bool();
|
||||||
break;
|
break;
|
||||||
|
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||||
|
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -7224,6 +7157,9 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
/* bool exists = 1; */
|
/* bool exists = 1; */
|
||||||
if (message.exists !== false)
|
if (message.exists !== false)
|
||||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
||||||
|
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||||
|
if (message.entry)
|
||||||
|
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -7234,102 +7170,6 @@ class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||||
*/
|
*/
|
||||||
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryResponse_CacheEntry$Type extends runtime_5.MessageType {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value) {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader, length, options, target) {
|
|
||||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message, writer, options) {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
exports.LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type();
|
|
||||||
/**
|
/**
|
||||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||||
*/
|
*/
|
||||||
|
@ -7954,6 +7794,119 @@ function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, intercep
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 1309:
|
||||||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.CacheEntry = void 0;
|
||||||
|
const runtime_1 = __nccwpck_require__(3503);
|
||||||
|
const runtime_2 = __nccwpck_require__(3503);
|
||||||
|
const runtime_3 = __nccwpck_require__(3503);
|
||||||
|
const runtime_4 = __nccwpck_require__(3503);
|
||||||
|
const runtime_5 = __nccwpck_require__(3503);
|
||||||
|
const timestamp_1 = __nccwpck_require__(8983);
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class CacheEntry$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.entities.v1.CacheEntry", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
|
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string hash */ 2:
|
||||||
|
message.hash = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size_bytes */ 3:
|
||||||
|
message.sizeBytes = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
case /* string scope */ 4:
|
||||||
|
message.scope = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string version */ 5:
|
||||||
|
message.version = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||||
|
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string hash = 2; */
|
||||||
|
if (message.hash !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
||||||
|
/* int64 size_bytes = 3; */
|
||||||
|
if (message.sizeBytes !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||||
|
/* string scope = 4; */
|
||||||
|
if (message.scope !== "")
|
||||||
|
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||||
|
/* string version = 5; */
|
||||||
|
if (message.version !== "")
|
||||||
|
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||||
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
|
if (message.createdAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||||
|
if (message.lastAccessedAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp expires_at = 8; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||||
|
*/
|
||||||
|
exports.CacheEntry = new CacheEntry$Type();
|
||||||
|
//# sourceMappingURL=cacheentry.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2773:
|
/***/ 2773:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue