1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-27 22:49:17 +01:00

Make uploads serial

This commit is contained in:
Josh Gross 2019-12-17 15:43:50 -05:00
parent 64668e22dd
commit 83f86c103f
3 changed files with 22 additions and 17 deletions

11
dist/restore/index.js vendored
View file

@ -1623,18 +1623,19 @@ function saveCache(cacheId, archivePath) {
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const uploads = []; const uploads = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
let offset = 0; let offset = 0;
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
core.debug(`Offset: ${offset}`); const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
const chunk = fs.createReadStream(archivePath, { start: offset, end }); uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }
fs.closeSync(fd);
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
const responses = yield Promise.all(uploads); //const responses = await Promise.all(uploads);
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
if (failedResponse) { if (failedResponse) {
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
} }

11
dist/save/index.js vendored
View file

@ -1623,18 +1623,19 @@ function saveCache(cacheId, archivePath) {
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const uploads = []; const uploads = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
let offset = 0; let offset = 0;
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
core.debug(`Offset: ${offset}`); const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
const chunk = fs.createReadStream(archivePath, { start: offset, end }); uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }
fs.closeSync(fd);
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
const responses = yield Promise.all(uploads); //const responses = await Promise.all(uploads);
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
if (failedResponse) { if (failedResponse) {
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
} }

View file

@ -184,21 +184,24 @@ export async function saveCache(
// Upload Chunks // Upload Chunks
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const uploads: Promise<IRestResponse<void>>[] = []; const uploads: IRestResponse<void>[] = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
let offset = 0; let offset = 0;
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
core.debug(`Offset: ${offset}`); const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
const chunk = fs.createReadStream(archivePath, { start: offset, end }); uploads.push(await uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }
core.debug("Awaiting all uploads"); fs.closeSync(fd);
const responses = await Promise.all(uploads);
const failedResponse = responses.find( core.debug("Awaiting all uploads");
//const responses = await Promise.all(uploads);
const failedResponse = uploads.find(
x => !isSuccessStatusCode(x.statusCode) x => !isSuccessStatusCode(x.statusCode)
); );
if (failedResponse) { if (failedResponse) {