mirror of
				https://github.com/actions/cache.git
				synced 2025-10-31 07:16:24 +07:00 
			
		
		
		
	Compare commits
	
		
			9 Commits
		
	
	
		
			tiwarishub
			...
			tiwarishub
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 20817ef617 | |||
| 103570a2bf | |||
| aeb01573e6 | |||
| d351e68b9a | |||
| 3d236ac88e | |||
| b8ddf3df10 | |||
| 0c5d98e6bb | |||
| 7c59aeb02d | |||
| c75dca6de7 | 
							
								
								
									
										1
									
								
								.github/auto_assign.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/auto_assign.yml
									
									
									
									
										vendored
									
									
								
							| @ -6,7 +6,6 @@ addAssignees: false | ||||
|  | ||||
| # A list of reviewers to be added to pull requests (GitHub user name) | ||||
| reviewers: | ||||
|   - phantsure | ||||
|   - kotewar | ||||
|   - aparna-ravindra | ||||
|   - tiwarishub | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/auto-assign-issues.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/auto-assign-issues.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,5 +11,5 @@ jobs: | ||||
|             - name: 'Auto-assign issue' | ||||
|               uses: pozil/auto-assign-issue@v1.4.0 | ||||
|               with: | ||||
|                   assignees: phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft | ||||
|                   assignees: kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft | ||||
|                   numOfAssignee: 1 | ||||
|  | ||||
| @ -14,7 +14,4 @@ | ||||
| - Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624)) | ||||
|  | ||||
| ### 3.0.4 | ||||
| - Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689)) | ||||
|  | ||||
| ### 3.0.5 | ||||
| - Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834)) | ||||
| - Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689)) | ||||
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										40
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1113,13 +1113,7 @@ function resolvePaths(patterns) { | ||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                 core.debug(`Matched: ${relativeFile}`); | ||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                 if (relativeFile === '') { | ||||
|                     // path.relative returns empty string if workspace and file are equal
 | ||||
|                     paths.push('.'); | ||||
|                 } | ||||
|                 else { | ||||
|                     paths.push(`${relativeFile}`); | ||||
|                 } | ||||
|                 paths.push(`${relativeFile}`); | ||||
|             } | ||||
|         } | ||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||
| @ -5470,7 +5464,6 @@ const buffer = __importStar(__webpack_require__(293)); | ||||
| const fs = __importStar(__webpack_require__(747)); | ||||
| const stream = __importStar(__webpack_require__(794)); | ||||
| const util = __importStar(__webpack_require__(669)); | ||||
| const timer = __importStar(__webpack_require__(581)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| const requestUtils_1 = __webpack_require__(899); | ||||
| @ -5661,14 +5654,10 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { | ||||
|                     const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; | ||||
|                     const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); | ||||
|                     downloadProgress.nextSegment(segmentSize); | ||||
|                     const result = yield Promise.race([client.downloadToBuffer(segmentStart, segmentSize, { | ||||
|                             concurrency: options.downloadConcurrency, | ||||
|                             onProgress: downloadProgress.onProgress() | ||||
|                         }), | ||||
|                         timer.setTimeout(60 * 60 * 1000, 'timeout')]); | ||||
|                     if (result === 'timeout') { | ||||
|                         throw new Error("Segment download timed out"); | ||||
|                     } | ||||
|                     const result = yield client.downloadToBuffer(segmentStart, segmentSize, { | ||||
|                         concurrency: options.downloadConcurrency, | ||||
|                         onProgress: downloadProgress.onProgress() | ||||
|                     }); | ||||
|                     fs.writeFileSync(fd, result); | ||||
|                 } | ||||
|             } | ||||
| @ -37283,9 +37272,9 @@ function extractTar(archivePath, compressionMethod) { | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return ['--use-compress-program', 'unzstd --long=30']; | ||||
|                     return ['--use-compress-program', 'zstd -d --long=30']; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', 'unzstd']; | ||||
|                     return ['--use-compress-program', 'zstd -d']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
| @ -37316,9 +37305,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) { | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return ['--use-compress-program', 'zstdmt --long=30']; | ||||
|                     return ['--use-compress-program', 'zstd -T0 --long=30']; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', 'zstdmt']; | ||||
|                     return ['--use-compress-program', 'zstd -T0']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
| @ -37349,9 +37338,9 @@ function listTar(archivePath, compressionMethod) { | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return ['--use-compress-program', 'unzstd --long=30']; | ||||
|                     return ['--use-compress-program', 'zstd -d --long=30']; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', 'unzstd']; | ||||
|                     return ['--use-compress-program', 'zstd -d']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
| @ -42354,12 +42343,7 @@ function clean(key) | ||||
| /* 578 */, | ||||
| /* 579 */, | ||||
| /* 580 */, | ||||
| /* 581 */ | ||||
| /***/ (function(module) { | ||||
| 
 | ||||
| module.exports = require("timers/promises"); | ||||
| 
 | ||||
| /***/ }), | ||||
| /* 581 */, | ||||
| /* 582 */ | ||||
| /***/ (function(module) { | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										40
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1113,13 +1113,7 @@ function resolvePaths(patterns) { | ||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                 core.debug(`Matched: ${relativeFile}`); | ||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                 if (relativeFile === '') { | ||||
|                     // path.relative returns empty string if workspace and file are equal
 | ||||
|                     paths.push('.'); | ||||
|                 } | ||||
|                 else { | ||||
|                     paths.push(`${relativeFile}`); | ||||
|                 } | ||||
|                 paths.push(`${relativeFile}`); | ||||
|             } | ||||
|         } | ||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||
| @ -5470,7 +5464,6 @@ const buffer = __importStar(__webpack_require__(293)); | ||||
| const fs = __importStar(__webpack_require__(747)); | ||||
| const stream = __importStar(__webpack_require__(794)); | ||||
| const util = __importStar(__webpack_require__(669)); | ||||
| const timer = __importStar(__webpack_require__(581)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| const requestUtils_1 = __webpack_require__(899); | ||||
| @ -5661,14 +5654,10 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { | ||||
|                     const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; | ||||
|                     const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); | ||||
|                     downloadProgress.nextSegment(segmentSize); | ||||
|                     const result = yield Promise.race([client.downloadToBuffer(segmentStart, segmentSize, { | ||||
|                             concurrency: options.downloadConcurrency, | ||||
|                             onProgress: downloadProgress.onProgress() | ||||
|                         }), | ||||
|                         timer.setTimeout(60 * 60 * 1000, 'timeout')]); | ||||
|                     if (result === 'timeout') { | ||||
|                         throw new Error("Segment download timed out"); | ||||
|                     } | ||||
|                     const result = yield client.downloadToBuffer(segmentStart, segmentSize, { | ||||
|                         concurrency: options.downloadConcurrency, | ||||
|                         onProgress: downloadProgress.onProgress() | ||||
|                     }); | ||||
|                     fs.writeFileSync(fd, result); | ||||
|                 } | ||||
|             } | ||||
| @ -37283,9 +37272,9 @@ function extractTar(archivePath, compressionMethod) { | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return ['--use-compress-program', 'unzstd --long=30']; | ||||
|                     return ['--use-compress-program', 'zstd -d --long=30']; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', 'unzstd']; | ||||
|                     return ['--use-compress-program', 'zstd -d']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
| @ -37316,9 +37305,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) { | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return ['--use-compress-program', 'zstdmt --long=30']; | ||||
|                     return ['--use-compress-program', 'zstd -T0 --long=30']; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', 'zstdmt']; | ||||
|                     return ['--use-compress-program', 'zstd -T0']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
| @ -37349,9 +37338,9 @@ function listTar(archivePath, compressionMethod) { | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return ['--use-compress-program', 'unzstd --long=30']; | ||||
|                     return ['--use-compress-program', 'zstd -d --long=30']; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', 'unzstd']; | ||||
|                     return ['--use-compress-program', 'zstd -d']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
| @ -42354,12 +42343,7 @@ function clean(key) | ||||
| /* 578 */, | ||||
| /* 579 */, | ||||
| /* 580 */, | ||||
| /* 581 */ | ||||
| /***/ (function(module) { | ||||
| 
 | ||||
| module.exports = require("timers/promises"); | ||||
| 
 | ||||
| /***/ }), | ||||
| /* 581 */, | ||||
| /* 582 */ | ||||
| /***/ (function(module) { | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @ -1,15 +1,15 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "3.0.5", | ||||
|   "version": "3.0.4", | ||||
|   "lockfileVersion": 2, | ||||
|   "requires": true, | ||||
|   "packages": { | ||||
|     "": { | ||||
|       "name": "cache", | ||||
|       "version": "3.0.5", | ||||
|       "version": "3.0.4", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@actions/cache": "file:actions-cache-3.0.1.tgz", | ||||
|         "@actions/cache": "^3.0.0", | ||||
|         "@actions/core": "^1.7.0", | ||||
|         "@actions/exec": "^1.1.1", | ||||
|         "@actions/io": "^1.1.2" | ||||
| @ -36,10 +36,9 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@actions/cache": { | ||||
|       "version": "3.0.1", | ||||
|       "resolved": "file:actions-cache-3.0.1.tgz", | ||||
|       "integrity": "sha512-ucvw0xvFpe0/vfNQ/rc11ste0nidCdBAJ5j5F01BxBqjxmGH2doVzfPlqSIGhcN7wKI074x2ATb9+7HSrTqGHg==", | ||||
|       "license": "MIT", | ||||
|       "version": "3.0.0", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", | ||||
|       "integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", | ||||
|       "dependencies": { | ||||
|         "@actions/core": "^1.2.6", | ||||
|         "@actions/exec": "^1.0.1", | ||||
| @ -9534,8 +9533,9 @@ | ||||
|   }, | ||||
|   "dependencies": { | ||||
|     "@actions/cache": { | ||||
|       "version": "file:actions-cache-3.0.1.tgz", | ||||
|       "integrity": "sha512-ucvw0xvFpe0/vfNQ/rc11ste0nidCdBAJ5j5F01BxBqjxmGH2doVzfPlqSIGhcN7wKI074x2ATb9+7HSrTqGHg==", | ||||
|       "version": "3.0.0", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", | ||||
|       "integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", | ||||
|       "requires": { | ||||
|         "@actions/core": "^1.2.6", | ||||
|         "@actions/exec": "^1.0.1", | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "3.0.5", | ||||
|   "version": "3.0.4", | ||||
|   "private": true, | ||||
|   "description": "Cache dependencies and build outputs", | ||||
|   "main": "dist/restore/index.js", | ||||
| @ -23,7 +23,7 @@ | ||||
|   "author": "GitHub", | ||||
|   "license": "MIT", | ||||
|   "dependencies": { | ||||
|     "@actions/cache": "file:actions-cache-3.0.1.tgz", | ||||
|     "@actions/cache": "^3.0.0", | ||||
|     "@actions/core": "^1.7.0", | ||||
|     "@actions/exec": "^1.1.1", | ||||
|     "@actions/io": "^1.1.2" | ||||
|  | ||||
		Reference in New Issue
	
	Block a user
	