1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-27 22:49:17 +01:00

Test out 16 concurrent requests

This commit is contained in:
Josh Gross 2019-12-17 17:35:30 -05:00
parent 4fcbc07edb
commit 8c77f01f0b
3 changed files with 10 additions and 13 deletions

View file

@ -1622,18 +1622,17 @@ function uploadFile(restClient, cacheId, archivePath) {
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
const concurrency = 8; // # of HTTP requests in parallel
const concurrency = 16; // # of HTTP requests in parallel
core.debug(`Concurrency: ${concurrency}`);
const threads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
const end = offset + chunkSize - 1;
core.debug(`Start: ${start} End: ${end}`);
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
}

7
dist/save/index.js vendored
View file

@ -1622,18 +1622,17 @@ function uploadFile(restClient, cacheId, archivePath) {
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
const concurrency = 8; // # of HTTP requests in parallel
const concurrency = 16; // # of HTTP requests in parallel
core.debug(`Concurrency: ${concurrency}`);
const threads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
const end = offset + chunkSize - 1;
core.debug(`Start: ${start} End: ${end}`);
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
}

View file

@ -181,18 +181,17 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath:
const responses: IRestResponse<void>[] = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
const concurrency = 8; // # of HTTP requests in parallel
const concurrency = 16; // # of HTTP requests in parallel
core.debug(`Concurrency: ${concurrency}`);
const threads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
await Promise.all(threads.map(async () => { // This might not work cause something something closures
core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
await Promise.all(threads.map(async () => {
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
const end = offset + chunkSize - 1;
core.debug(`Start: ${start} End: ${end}`);
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
}