1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-27 22:49:17 +01:00

Test out 16 concurrency with 32mb chunks

This commit is contained in:
Josh Gross 2019-12-17 17:52:59 -05:00
parent 8c77f01f0b
commit 2ce22df8c4
3 changed files with 15 additions and 16 deletions

10
dist/restore/index.js vendored
View file

@ -1497,7 +1497,6 @@ const Handlers_1 = __webpack_require__(941);
const HttpClient_1 = __webpack_require__(874); const HttpClient_1 = __webpack_require__(874);
const RestClient_1 = __webpack_require__(105); const RestClient_1 = __webpack_require__(105);
const utils = __importStar(__webpack_require__(443)); const utils = __importStar(__webpack_require__(443));
const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
function isSuccessStatusCode(statusCode) { function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode < 300; return statusCode >= 200 && statusCode < 300;
} }
@ -1621,13 +1620,14 @@ function uploadFile(restClient, cacheId, archivePath) {
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses = []; const responses = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too? const fd = fs.openSync(archivePath, "r");
const concurrency = 16; // # of HTTP requests in parallel const concurrency = 16; // # of HTTP requests in parallel
core.debug(`Concurrency: ${concurrency}`); const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
const threads = [...new Array(concurrency).keys()]; core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
let offset = 0; let offset = 0;
yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset; const start = offset;

10
dist/save/index.js vendored
View file

@ -1497,7 +1497,6 @@ const Handlers_1 = __webpack_require__(941);
const HttpClient_1 = __webpack_require__(874); const HttpClient_1 = __webpack_require__(874);
const RestClient_1 = __webpack_require__(105); const RestClient_1 = __webpack_require__(105);
const utils = __importStar(__webpack_require__(443)); const utils = __importStar(__webpack_require__(443));
const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
function isSuccessStatusCode(statusCode) { function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode < 300; return statusCode >= 200 && statusCode < 300;
} }
@ -1621,13 +1620,14 @@ function uploadFile(restClient, cacheId, archivePath) {
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses = []; const responses = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too? const fd = fs.openSync(archivePath, "r");
const concurrency = 16; // # of HTTP requests in parallel const concurrency = 16; // # of HTTP requests in parallel
core.debug(`Concurrency: ${concurrency}`); const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
const threads = [...new Array(concurrency).keys()]; core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
let offset = 0; let offset = 0;
yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset; const start = offset;

View file

@ -16,8 +16,6 @@ import {
} from "./contracts"; } from "./contracts";
import * as utils from "./utils/actionUtils"; import * as utils from "./utils/actionUtils";
const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
function isSuccessStatusCode(statusCode: number): boolean { function isSuccessStatusCode(statusCode: number): boolean {
return statusCode >= 200 && statusCode < 300; return statusCode >= 200 && statusCode < 300;
} }
@ -179,14 +177,15 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath:
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses: IRestResponse<void>[] = []; const responses: IRestResponse<void>[] = [];
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too? const fd = fs.openSync(archivePath, "r");
const concurrency = 16; // # of HTTP requests in parallel const concurrency = 16; // # of HTTP requests in parallel
core.debug(`Concurrency: ${concurrency}`); const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
const threads = [...new Array(concurrency).keys()]; core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
let offset = 0; let offset = 0;
await Promise.all(threads.map(async () => { await Promise.all(parallelUploads.map(async () => {
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset; const start = offset;