mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-24 04:29:16 +01:00
Concurrency take 2
This commit is contained in:
parent
ba6476e454
commit
289c5d2518
3 changed files with 80 additions and 119 deletions
65
dist/restore/index.js
vendored
65
dist/restore/index.js
vendored
|
@ -1615,53 +1615,40 @@ function commitCache(restClient, cacheId, filesize) {
|
||||||
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function parallelAwait(queue, concurrency) {
|
function uploadFile(restClient, cacheId, archivePath) {
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const workQueue = queue.reverse();
|
// Upload Chunks
|
||||||
let completedWork = [];
|
const fileSize = fs.statSync(archivePath).size;
|
||||||
let entries = queue.length;
|
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
||||||
while (entries > 0) {
|
const responses = [];
|
||||||
if (entries < concurrency) {
|
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
||||||
completedWork.push(yield Promise.all(workQueue));
|
const concurrency = 4; // # of HTTP requests in parallel
|
||||||
|
const threads = new Array(concurrency);
|
||||||
|
core.debug("Awaiting all uploads");
|
||||||
|
let offset = 0;
|
||||||
|
Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
while (offset < fileSize) {
|
||||||
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
|
const start = offset;
|
||||||
|
const end = offset + chunkSize - 1;
|
||||||
|
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
||||||
|
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
||||||
|
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
|
||||||
}
|
}
|
||||||
else {
|
})));
|
||||||
let promises = [];
|
fs.closeSync(fd);
|
||||||
let i;
|
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||||
for (i = 0; i < concurrency; i++) {
|
if (failedResponse) {
|
||||||
promises.push((_a = workQueue.pop(), (_a !== null && _a !== void 0 ? _a : Promise.resolve())));
|
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
||||||
}
|
}
|
||||||
completedWork.push(yield Promise.all(promises));
|
return;
|
||||||
}
|
|
||||||
}
|
|
||||||
return completedWork;
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function saveCache(cacheId, archivePath) {
|
function saveCache(cacheId, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const restClient = createRestClient();
|
const restClient = createRestClient();
|
||||||
core.debug("Uploading chunks");
|
core.debug("Upload cache");
|
||||||
// Upload Chunks
|
yield uploadFile(restClient, cacheId, archivePath);
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
|
||||||
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
|
||||||
const uploads = [];
|
|
||||||
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
|
||||||
let offset = 0;
|
|
||||||
while (offset < fileSize) {
|
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
|
||||||
const end = offset + chunkSize - 1;
|
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
|
||||||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
|
|
||||||
offset += MAX_CHUNK_SIZE;
|
|
||||||
}
|
|
||||||
core.debug("Awaiting all uploads");
|
|
||||||
const responses = yield parallelAwait(uploads, 4);
|
|
||||||
fs.closeSync(fd);
|
|
||||||
//const responses = await Promise.all(uploads);
|
|
||||||
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
|
||||||
if (failedResponse) {
|
|
||||||
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
|
||||||
}
|
|
||||||
core.debug("Commiting cache");
|
core.debug("Commiting cache");
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
65
dist/save/index.js
vendored
65
dist/save/index.js
vendored
|
@ -1615,53 +1615,40 @@ function commitCache(restClient, cacheId, filesize) {
|
||||||
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function parallelAwait(queue, concurrency) {
|
function uploadFile(restClient, cacheId, archivePath) {
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const workQueue = queue.reverse();
|
// Upload Chunks
|
||||||
let completedWork = [];
|
const fileSize = fs.statSync(archivePath).size;
|
||||||
let entries = queue.length;
|
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
||||||
while (entries > 0) {
|
const responses = [];
|
||||||
if (entries < concurrency) {
|
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
||||||
completedWork.push(yield Promise.all(workQueue));
|
const concurrency = 4; // # of HTTP requests in parallel
|
||||||
|
const threads = new Array(concurrency);
|
||||||
|
core.debug("Awaiting all uploads");
|
||||||
|
let offset = 0;
|
||||||
|
Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
while (offset < fileSize) {
|
||||||
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
|
const start = offset;
|
||||||
|
const end = offset + chunkSize - 1;
|
||||||
|
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
||||||
|
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
||||||
|
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
|
||||||
}
|
}
|
||||||
else {
|
})));
|
||||||
let promises = [];
|
fs.closeSync(fd);
|
||||||
let i;
|
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||||
for (i = 0; i < concurrency; i++) {
|
if (failedResponse) {
|
||||||
promises.push((_a = workQueue.pop(), (_a !== null && _a !== void 0 ? _a : Promise.resolve())));
|
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
||||||
}
|
}
|
||||||
completedWork.push(yield Promise.all(promises));
|
return;
|
||||||
}
|
|
||||||
}
|
|
||||||
return completedWork;
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function saveCache(cacheId, archivePath) {
|
function saveCache(cacheId, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const restClient = createRestClient();
|
const restClient = createRestClient();
|
||||||
core.debug("Uploading chunks");
|
core.debug("Upload cache");
|
||||||
// Upload Chunks
|
yield uploadFile(restClient, cacheId, archivePath);
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
|
||||||
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
|
||||||
const uploads = [];
|
|
||||||
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
|
||||||
let offset = 0;
|
|
||||||
while (offset < fileSize) {
|
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
|
||||||
const end = offset + chunkSize - 1;
|
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
|
||||||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
|
|
||||||
offset += MAX_CHUNK_SIZE;
|
|
||||||
}
|
|
||||||
core.debug("Awaiting all uploads");
|
|
||||||
const responses = yield parallelAwait(uploads, 4);
|
|
||||||
fs.closeSync(fd);
|
|
||||||
//const responses = await Promise.all(uploads);
|
|
||||||
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
|
||||||
if (failedResponse) {
|
|
||||||
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
|
||||||
}
|
|
||||||
core.debug("Commiting cache");
|
core.debug("Commiting cache");
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
|
@ -174,55 +174,30 @@ async function commitCache(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function parallelAwait(queue: Promise<any>[], concurrency: number): Promise<any[]> {
|
async function uploadFile(restClient: RestClient, cacheId: number, archivePath: string): Promise<void> {
|
||||||
const workQueue = queue.reverse();
|
|
||||||
let completedWork: any[] = [];
|
|
||||||
let entries = queue.length;
|
|
||||||
while (entries > 0) {
|
|
||||||
if (entries < concurrency) {
|
|
||||||
completedWork.push(await Promise.all(workQueue));
|
|
||||||
} else {
|
|
||||||
let promises: Promise<any>[] = [];
|
|
||||||
let i: number;
|
|
||||||
for (i = 0; i < concurrency; i++) {
|
|
||||||
promises.push(workQueue.pop() ?? Promise.resolve());
|
|
||||||
}
|
|
||||||
completedWork.push(await Promise.all(promises));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return completedWork;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function saveCache(
|
|
||||||
cacheId: number,
|
|
||||||
archivePath: string
|
|
||||||
): Promise<void> {
|
|
||||||
const restClient = createRestClient();
|
|
||||||
|
|
||||||
core.debug("Uploading chunks");
|
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
const fileSize = fs.statSync(archivePath).size;
|
||||||
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
||||||
const uploads: Promise<IRestResponse<void>>[] = [];
|
const responses: IRestResponse<void>[] = [];
|
||||||
|
|
||||||
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
||||||
|
|
||||||
|
const concurrency = 4; // # of HTTP requests in parallel
|
||||||
|
const threads = new Array(concurrency);
|
||||||
|
core.debug("Awaiting all uploads");
|
||||||
let offset = 0;
|
let offset = 0;
|
||||||
|
Promise.all(threads.map(async () => { // This might not work cause something something closures
|
||||||
while (offset < fileSize) {
|
while (offset < fileSize) {
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
||||||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
|
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
||||||
offset += MAX_CHUNK_SIZE;
|
responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
|
||||||
}
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
core.debug("Awaiting all uploads");
|
|
||||||
const responses = await parallelAwait(uploads, 4);
|
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
|
|
||||||
|
|
||||||
//const responses = await Promise.all(uploads);
|
|
||||||
|
|
||||||
const failedResponse = responses.find(
|
const failedResponse = responses.find(
|
||||||
x => !isSuccessStatusCode(x.statusCode)
|
x => !isSuccessStatusCode(x.statusCode)
|
||||||
);
|
);
|
||||||
|
@ -232,6 +207,18 @@ export async function saveCache(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveCache(
|
||||||
|
cacheId: number,
|
||||||
|
archivePath: string
|
||||||
|
): Promise<void> {
|
||||||
|
const restClient = createRestClient();
|
||||||
|
|
||||||
|
core.debug("Upload cache");
|
||||||
|
await uploadFile(restClient, cacheId, archivePath);
|
||||||
|
|
||||||
core.debug("Commiting cache");
|
core.debug("Commiting cache");
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
Loading…
Reference in a new issue