mirror of
				https://github.com/actions/cache.git
				synced 2025-11-01 04:28:39 +08:00 
			
		
		
		
	PR feedback
This commit is contained in:
		
							parent
							
								
									61f374ae8c
								
							
						
					
					
						commit
						23daa0a638
					
				|  | @ -208,7 +208,7 @@ test("save with large cache outputs warning", async () => { | |||
| 
 | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache." | ||||
|         "Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache." | ||||
|     ); | ||||
| 
 | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|  |  | |||
							
								
								
									
										2
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							|  | @ -1,6 +1,6 @@ | |||
| { | ||||
|   "name": "cache", | ||||
|   "version": "1.0.3", | ||||
|   "version": "1.1.0", | ||||
|   "lockfileVersion": 1, | ||||
|   "requires": true, | ||||
|   "dependencies": { | ||||
|  |  | |||
|  | @ -150,8 +150,8 @@ async function uploadChunk( | |||
| ): Promise<IRestResponse<void>> { | ||||
|     core.debug( | ||||
|         `Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange( | ||||
|         start + | ||||
|         1} bytes at offset ${start} with content range: ${getContentRange( | ||||
|             start, | ||||
|             end | ||||
|         )}` | ||||
|  | @ -177,6 +177,7 @@ async function uploadChunk( | |||
|     } | ||||
| 
 | ||||
|     if (isRetryableStatusCode(response.statusCode)) { | ||||
|         core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`); | ||||
|         const retryResponse = await uploadChunkRequest(); | ||||
|         if (isSuccessStatusCode(retryResponse.statusCode)) { | ||||
|             return retryResponse; | ||||
|  | @ -199,53 +200,43 @@ async function uploadFile( | |||
|     const responses: IRestResponse<void>[] = []; | ||||
|     const fd = fs.openSync(archivePath, "r"); | ||||
| 
 | ||||
|     const concurrency = 4; // # of HTTP requests in parallel
 | ||||
|     const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
 | ||||
|     const concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]) ?? 4; // # of HTTP requests in parallel
 | ||||
|     const MAX_CHUNK_SIZE = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) ?? 32000000; // 32 MB Chunks
 | ||||
|     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
| 
 | ||||
|     const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|     core.debug("Awaiting all uploads"); | ||||
|     let offset = 0; | ||||
|     await Promise.all( | ||||
|         parallelUploads.map(async () => { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = | ||||
|                     offset + MAX_CHUNK_SIZE > fileSize | ||||
|                         ? fileSize - offset | ||||
|                         : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { | ||||
|                     fd, | ||||
|                     start, | ||||
|                     end, | ||||
|                     autoClose: false | ||||
|                 }); | ||||
|                 responses.push( | ||||
|                     await uploadChunk( | ||||
|                         restClient, | ||||
|                         resourceUrl, | ||||
|                         chunk, | ||||
| 
 | ||||
|     try { | ||||
|         await Promise.all( | ||||
|             parallelUploads.map(async () => { | ||||
|                 while (offset < fileSize) { | ||||
|                     const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE) | ||||
|                     const start = offset; | ||||
|                     const end = offset + chunkSize - 1; | ||||
|                     offset += MAX_CHUNK_SIZE; | ||||
|                     const chunk = fs.createReadStream(archivePath, { | ||||
|                         fd, | ||||
|                         start, | ||||
|                         end | ||||
|                     ) | ||||
|                 ); | ||||
|             } | ||||
|         }) | ||||
|     ); | ||||
| 
 | ||||
|     fs.closeSync(fd); | ||||
| 
 | ||||
|     const failedResponse = responses.find( | ||||
|         x => !isSuccessStatusCode(x.statusCode) | ||||
|     ); | ||||
|     if (failedResponse) { | ||||
|         throw new Error( | ||||
|             `Cache service responded with ${failedResponse.statusCode} during chunk upload.` | ||||
|                         end, | ||||
|                         autoClose: false | ||||
|                     }); | ||||
|                     responses.push( | ||||
|                         await uploadChunk( | ||||
|                             restClient, | ||||
|                             resourceUrl, | ||||
|                             chunk, | ||||
|                             start, | ||||
|                             end | ||||
|                         ) | ||||
|                     ); | ||||
|                 } | ||||
|             }) | ||||
|         ); | ||||
|     } finally { | ||||
|         fs.closeSync(fd); | ||||
|     } | ||||
| 
 | ||||
|     return; | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -60,7 +60,7 @@ async function run(): Promise<void> { | |||
| 
 | ||||
|         try { | ||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||
|             if (!cacheEntry || !cacheEntry?.archiveLocation) { | ||||
|             if (!cacheEntry?.archiveLocation) { | ||||
|                 core.info( | ||||
|                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||
|                 ); | ||||
|  | @ -78,7 +78,7 @@ async function run(): Promise<void> { | |||
| 
 | ||||
|             // Download the cache from the cache entry
 | ||||
|             await cacheHttpClient.downloadCache( | ||||
|                 cacheEntry?.archiveLocation, | ||||
|                 cacheEntry.archiveLocation, | ||||
|                 archivePath | ||||
|             ); | ||||
| 
 | ||||
|  |  | |||
|  | @ -36,7 +36,7 @@ async function run(): Promise<void> { | |||
| 
 | ||||
|         core.debug("Reserving Cache"); | ||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey); | ||||
|         if (cacheId < 0) { | ||||
|         if (cacheId == -1) { | ||||
|             core.info( | ||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
|             ); | ||||
|  | @ -62,13 +62,13 @@ async function run(): Promise<void> { | |||
|         if (archiveFileSize > fileSizeLimit) { | ||||
|             utils.logWarning( | ||||
|                 `Cache size of ~${Math.round( | ||||
|                     archiveFileSize / (1024 * 1024 * 1024) | ||||
|                 )} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` | ||||
|                     archiveFileSize / (1024 * 1024) | ||||
|                 )} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
| 
 | ||||
|         core.debug("Saving Cache"); | ||||
|         core.debug(`Saving Cache (ID: ${cacheId})`); | ||||
|         await cacheHttpClient.saveCache(cacheId, archivePath); | ||||
|     } catch (error) { | ||||
|         utils.logWarning(error.message); | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue
	
	Block a user
	 Josh Gross
						Josh Gross