1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-24 04:29:16 +01:00

Test disabling concurrency

This commit is contained in:
Dave Hadka 2020-05-08 12:05:32 -04:00
parent aced43a650
commit 6efe05572d
3 changed files with 36 additions and 32 deletions

30
dist/restore/index.js vendored
View file

@ -2403,20 +2403,22 @@ function uploadFile(httpClient, cacheId, archivePath) {
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
let offset = 0; let offset = 0;
try { try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { // await Promise.all(
while (offset < fileSize) { // parallelUploads.map(async () => {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); while (offset < fileSize) {
const start = offset; const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const end = offset + chunkSize - 1; const start = offset;
offset += MAX_CHUNK_SIZE; const end = offset + chunkSize - 1;
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { offset += MAX_CHUNK_SIZE;
fd, yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
start, fd,
end, start,
autoClose: false end,
}), start, end); autoClose: false
} }), start, end);
}))); }
// })
// );
} }
finally { finally {
fs.closeSync(fd); fs.closeSync(fd);

30
dist/save/index.js vendored
View file

@ -2403,20 +2403,22 @@ function uploadFile(httpClient, cacheId, archivePath) {
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
let offset = 0; let offset = 0;
try { try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { // await Promise.all(
while (offset < fileSize) { // parallelUploads.map(async () => {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); while (offset < fileSize) {
const start = offset; const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const end = offset + chunkSize - 1; const start = offset;
offset += MAX_CHUNK_SIZE; const end = offset + chunkSize - 1;
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { offset += MAX_CHUNK_SIZE;
fd, yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
start, fd,
end, start,
autoClose: false end,
}), start, end); autoClose: false
} }), start, end);
}))); }
// })
// );
} }
finally { finally {
fs.closeSync(fd); fs.closeSync(fd);

View file

@ -342,8 +342,8 @@ async function uploadFile(
let offset = 0; let offset = 0;
try { try {
await Promise.all( // await Promise.all(
parallelUploads.map(async () => { // parallelUploads.map(async () => {
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = Math.min( const chunkSize = Math.min(
fileSize - offset, fileSize - offset,
@ -367,8 +367,8 @@ async function uploadFile(
end end
); );
} }
}) // })
); // );
} finally { } finally {
fs.closeSync(fd); fs.closeSync(fd);
} }