From 83f86c103ffbd262a1ec330792d2c2a77b4c1ba1 Mon Sep 17 00:00:00 2001
From: Josh Gross <jogros@microsoft.com>
Date: Tue, 17 Dec 2019 15:43:50 -0500
Subject: [PATCH] Make uploads serial

---
 dist/restore/index.js  | 11 ++++++-----
 dist/save/index.js     | 11 ++++++-----
 src/cacheHttpClient.ts | 17 ++++++++++-------
 3 files changed, 22 insertions(+), 17 deletions(-)

diff --git a/dist/restore/index.js b/dist/restore/index.js
index 40527d7..c76d3fd 100644
--- a/dist/restore/index.js
+++ b/dist/restore/index.js
@@ -1623,18 +1623,19 @@ function saveCache(cacheId, archivePath) {
         const fileSize = fs.statSync(archivePath).size;
         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
         const uploads = [];
+        const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
         let offset = 0;
         while (offset < fileSize) {
             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
             const end = offset + chunkSize - 1;
-            core.debug(`Offset: ${offset}`);
-            const chunk = fs.createReadStream(archivePath, { start: offset, end });
-            uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
+            const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
+            uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
             offset += MAX_CHUNK_SIZE;
         }
+        fs.closeSync(fd);
         core.debug("Awaiting all uploads");
-        const responses = yield Promise.all(uploads);
-        const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
+        //const responses = await Promise.all(uploads);
+        const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
         if (failedResponse) {
             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
         }
diff --git a/dist/save/index.js b/dist/save/index.js
index 6d3d845..139362b 100644
--- a/dist/save/index.js
+++ b/dist/save/index.js
@@ -1623,18 +1623,19 @@ function saveCache(cacheId, archivePath) {
         const fileSize = fs.statSync(archivePath).size;
         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
         const uploads = [];
+        const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
         let offset = 0;
         while (offset < fileSize) {
             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
             const end = offset + chunkSize - 1;
-            core.debug(`Offset: ${offset}`);
-            const chunk = fs.createReadStream(archivePath, { start: offset, end });
-            uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
+            const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
+            uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
             offset += MAX_CHUNK_SIZE;
         }
+        fs.closeSync(fd);
         core.debug("Awaiting all uploads");
-        const responses = yield Promise.all(uploads);
-        const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
+        //const responses = await Promise.all(uploads);
+        const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
         if (failedResponse) {
             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
         }
diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts
index 01d8582..87b9b66 100644
--- a/src/cacheHttpClient.ts
+++ b/src/cacheHttpClient.ts
@@ -184,21 +184,24 @@ export async function saveCache(
     // Upload Chunks
     const fileSize = fs.statSync(archivePath).size;
     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
-    const uploads: Promise<IRestResponse<void>>[] = [];
+    const uploads: IRestResponse<void>[] = [];
+
+    const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
     let offset = 0;
     while (offset < fileSize) {
         const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
         const end = offset + chunkSize - 1;
-        core.debug(`Offset: ${offset}`);
-        const chunk = fs.createReadStream(archivePath, { start: offset, end });
-        uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
+        const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
+        uploads.push(await uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
         offset += MAX_CHUNK_SIZE;
     }
 
-    core.debug("Awaiting all uploads");
-    const responses = await Promise.all(uploads);
+    fs.closeSync(fd);
 
-    const failedResponse = responses.find(
+    core.debug("Awaiting all uploads");
+    //const responses = await Promise.all(uploads);
+
+    const failedResponse = uploads.find(
         x => !isSuccessStatusCode(x.statusCode)
     );
     if (failedResponse) {