1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-30 23:59:16 +01:00

Revert "Test disabling concurrency"

This reverts commit 6efe05572d.
This commit is contained in:
Dave Hadka 2020-05-08 15:59:00 -04:00
parent 6efe05572d
commit 25b1a139de
3 changed files with 32 additions and 36 deletions

30
dist/restore/index.js vendored
View file

@ -2403,22 +2403,20 @@ function uploadFile(httpClient, cacheId, archivePath) {
core.debug("Awaiting all uploads");
let offset = 0;
try {
// await Promise.all(
// parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}), start, end);
}
// })
// );
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}), start, end);
}
})));
}
finally {
fs.closeSync(fd);

30
dist/save/index.js vendored
View file

@ -2403,22 +2403,20 @@ function uploadFile(httpClient, cacheId, archivePath) {
core.debug("Awaiting all uploads");
let offset = 0;
try {
// await Promise.all(
// parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}), start, end);
}
// })
// );
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}), start, end);
}
})));
}
finally {
fs.closeSync(fd);

View file

@ -342,8 +342,8 @@ async function uploadFile(
let offset = 0;
try {
// await Promise.all(
// parallelUploads.map(async () => {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
@ -367,8 +367,8 @@ async function uploadFile(
end
);
}
// })
// );
})
);
} finally {
fs.closeSync(fd);
}