mirror of
				https://github.com/actions/cache.git
				synced 2025-10-31 07:16:24 +07:00 
			
		
		
		
	Compare commits
	
		
			107 Commits
		
	
	
		
			v3.2.1
			...
			releases/v
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 64a59d5b55 | |||
| b388bcadaf | |||
| f9ae5e0028 | |||
| 34faf3ad7c | |||
| 1d114a8000 | |||
| 8955114d15 | |||
| 3f0eaf4956 | |||
| 770b3a8151 | |||
| ed5e94a5f5 | |||
| 6277f55919 | |||
| c30e6dcb11 | |||
| 05c9b49ea4 | |||
| da311f75a6 | |||
| 7a139a9cec | |||
| 930f080bad | |||
| 56e956426f | |||
| 766d8255cd | |||
| 686bf424a8 | |||
| fb5b333162 | |||
| c11ac6c2fe | |||
| d5c949690c | |||
| 44df5ab77e | |||
| 65057ce6fe | |||
| 8031e403b4 | |||
| c8d01facfc | |||
| 81aaae062b | |||
| 9d445b2565 | |||
| adecab4b4a | |||
| 075ad790b0 | |||
| b275c83bd6 | |||
| 1ddc49105d | |||
| 407044787b | |||
| 87a7d01109 | |||
| 29d6c7aa7f | |||
| df53d3c04b | |||
| dd740c87de | |||
| 0a6e5b052a | |||
| a2137c625c | |||
| 5a2b5e5714 | |||
| 9e9a19bf5f | |||
| 84ea3e177d | |||
| 00b72c7e02 | |||
| 0cc9c1d4e8 | |||
| ccf90c02ed | |||
| 4f42dc56c1 | |||
| b6604364ae | |||
| c0cc8dd60a | |||
| 91d7bd61be | |||
| 5e7f2c1182 | |||
| 3d4af52c52 | |||
| d91f5bd2fd | |||
| 61aa90bfc3 | |||
| 0c2d18e609 | |||
| 804322aab0 | |||
| f426a0deb2 | |||
| be72d0432d | |||
| 8ecd00a4be | |||
| 12a1a354bd | |||
| 782b0bd3df | |||
| f33ca902b8 | |||
| d48d03435b | |||
| 6f77edac15 | |||
| 6e12d27152 | |||
| c346bf01e0 | |||
| 593e91a38b | |||
| e4c2242eff | |||
| 66ef8a0951 | |||
| 657c52f11e | |||
| 34e917cb7d | |||
| ac8fc97c06 | |||
| 86712a0733 | |||
| d6e98d9302 | |||
| a76826ef46 | |||
| e02e5113ed | |||
| 85ae5bbcea | |||
| cce93fb2c7 | |||
| e3d8fb0b34 | |||
| 020a412c27 | |||
| d95c048983 | |||
| 706c369cf1 | |||
| 11ab7ccfa2 | |||
| 4b5f33df54 | |||
| 56a0133650 | |||
| 19446b165a | |||
| 8a88690a20 | |||
| 6e2c6a5916 | |||
| 2c9fb32186 | |||
| 01d96636a0 | |||
| 9c5a42a7c9 | |||
| a172494938 | |||
| f8717682fb | |||
| af1210e2a3 | |||
| ab0e7714ce | |||
| fb4a5dce60 | |||
| 71334c58b2 | |||
| 888d454557 | |||
| dddd7ce07c | |||
| abddc4dd44 | |||
| 921c58ee44 | |||
| 7f45813c72 | |||
| 0769f2e443 | |||
| 5fe0b944ef | |||
| 69b8227b27 | |||
| 515d10b4fd | |||
| 669e7536d9 | |||
| 29dbbce762 | |||
| ea5981db97 | 
							
								
								
									
										40
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @ -8,39 +8,45 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   CodeQL-Build: |   CodeQL-Build: | ||||||
|     # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     permissions: |     # CodeQL runs on ubuntu-latest and windows-latest | ||||||
|       # required for all workflows |     runs-on: ubuntu-latest | ||||||
|       security-events: write |  | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|     - name: Checkout repository |     - name: Checkout repository | ||||||
|       uses: actions/checkout@v3 |       uses: actions/checkout@v3 | ||||||
|  |       with: | ||||||
|  |         # We must fetch at least the immediate parents so that if this is | ||||||
|  |         # a pull request then we can checkout the head. | ||||||
|  |         fetch-depth: 2 | ||||||
|  |  | ||||||
|  |     # If this run was triggered by a pull request event, then checkout | ||||||
|  |     # the head of the pull request instead of the merge commit. | ||||||
|  |     - run: git checkout HEAD^2 | ||||||
|  |       if: ${{ github.event_name == 'pull_request' }} | ||||||
|  |  | ||||||
|     # Initializes the CodeQL tools for scanning. |     # Initializes the CodeQL tools for scanning. | ||||||
|     - name: Initialize CodeQL |     - name: Initialize CodeQL | ||||||
|       uses: github/codeql-action/init@v2 |       uses: github/codeql-action/init@v1 | ||||||
|       # Override language selection by uncommenting this and choosing your languages |       # Override language selection by uncommenting this and choosing your languages | ||||||
|       # with: |       # with: | ||||||
|       #   languages: go, javascript, csharp, python, cpp, java, ruby |       #   languages: go, javascript, csharp, python, cpp, java | ||||||
|  |  | ||||||
|     # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). |     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). | ||||||
|     # If this step fails, then you should remove it and run the build manually (see below). |     # If this step fails, then you should remove it and run the build manually (see below) | ||||||
|     - name: Autobuild |     - name: Autobuild | ||||||
|       uses: github/codeql-action/autobuild@v2 |       uses: github/codeql-action/autobuild@v1 | ||||||
|  |  | ||||||
|     # ℹ️ Command-line programs to run using the OS shell. |     # ℹ️ Command-line programs to run using the OS shell. | ||||||
|     # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun |     # 📚 https://git.io/JvXDl | ||||||
|  |  | ||||||
|     # ✏️ If the Autobuild fails above, remove it and uncomment the following |     # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines | ||||||
|     #    three lines and modify them (or add more) to build your code if your |     #    and modify them (or add more) to build your code if your project | ||||||
|     #    project uses a compiled language |     #    uses a compiled language | ||||||
|  |  | ||||||
|     #- run: | |     #- run: | | ||||||
|     #     make bootstrap |     #   make bootstrap | ||||||
|     #     make release |     #   make release | ||||||
|  |  | ||||||
|     - name: Perform CodeQL Analysis |     - name: Perform CodeQL Analysis | ||||||
|       uses: github/codeql-action/analyze@v2 |       uses: github/codeql-action/analyze@v1 | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.licenses/npm/@actions/cache.dep.yml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2
									
								
								.licenses/npm/@actions/cache.dep.yml
									
									
									
										generated
									
									
									
								
							| @ -1,6 +1,6 @@ | |||||||
| --- | --- | ||||||
| name: "@actions/cache" | name: "@actions/cache" | ||||||
| version: 3.1.0 | version: 3.0.5 | ||||||
| type: npm | type: npm | ||||||
| summary: | summary: | ||||||
| homepage: | homepage: | ||||||
|  | |||||||
| @ -27,8 +27,6 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac | |||||||
| * Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads. | * Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads. | ||||||
| * Fix zstd not working for windows on gnu tar in issues. | * Fix zstd not working for windows on gnu tar in issues. | ||||||
| * Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes. | * Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes. | ||||||
| * Two new actions available for granular control over caches - [restore](restore/action.yml) and [save](save/action.yml) |  | ||||||
| * Add support for cross os caching. For example, a cache saved on windows can be restored on ubuntu and vice versa. |  | ||||||
|  |  | ||||||
| Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions | Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions | ||||||
|  |  | ||||||
|  | |||||||
| @ -52,11 +52,3 @@ | |||||||
|  |  | ||||||
| ### 3.2.0-beta.1 | ### 3.2.0-beta.1 | ||||||
| - Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache. | - Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache. | ||||||
|  |  | ||||||
| ### 3.2.0 |  | ||||||
| - Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache |  | ||||||
|  |  | ||||||
| ### 3.2.1 |  | ||||||
| - Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984)) |  | ||||||
| - Added support for fallback to gzip to restore old caches on windows. |  | ||||||
| - Added logs for cache version in case of a cache miss. |  | ||||||
							
								
								
									
										32
									
								
								dist/restore-only/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								dist/restore-only/index.js
									
									
									
									
										vendored
									
									
								
							| @ -3431,12 +3431,8 @@ function getCacheEntry(keys, paths, options) { | |||||||
|         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); |         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); | ||||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; |         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||||
|         // Cache not found
 |  | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             // List cache for primary key only if cache miss occurs
 |             // Cache not found
 | ||||||
|             if (core.isDebug()) { |  | ||||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); |  | ||||||
|             } |  | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { |         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||||
| @ -3455,22 +3451,6 @@ function getCacheEntry(keys, paths, options) { | |||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.getCacheEntry = getCacheEntry; | exports.getCacheEntry = getCacheEntry; | ||||||
| function printCachesListForDiagnostics(key, httpClient, version) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; |  | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |  | ||||||
|         if (response.statusCode === 200) { |  | ||||||
|             const cacheListResult = response.result; |  | ||||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; |  | ||||||
|             if (totalCount && totalCount > 0) { |  | ||||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); |  | ||||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { |  | ||||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function downloadCache(archiveLocation, archivePath, options) { | function downloadCache(archiveLocation, archivePath, options) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const archiveUrl = new url_1.URL(archiveLocation); |         const archiveUrl = new url_1.URL(archiveLocation); | ||||||
| @ -38349,7 +38329,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --long=30 --force -o', |                         'zstd -d --long=30 -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38360,7 +38340,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --force -o', |                         'zstd -d -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38386,7 +38366,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --long=30 --force -o', |                         'zstd -T0 --long=30 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -38397,7 +38377,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --force -o', |                         'zstd -T0 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -47276,7 +47256,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | |||||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { |                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||||
|                         return undefined; |                         return undefined; | ||||||
|                     } |                     } | ||||||
|                     core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); |                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||||
|                 } |                 } | ||||||
|                 else { |                 else { | ||||||
|                     // Cache not found
 |                     // Cache not found
 | ||||||
|  | |||||||
							
								
								
									
										32
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -3431,12 +3431,8 @@ function getCacheEntry(keys, paths, options) { | |||||||
|         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); |         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); | ||||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; |         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||||
|         // Cache not found
 |  | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             // List cache for primary key only if cache miss occurs
 |             // Cache not found
 | ||||||
|             if (core.isDebug()) { |  | ||||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); |  | ||||||
|             } |  | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { |         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||||
| @ -3455,22 +3451,6 @@ function getCacheEntry(keys, paths, options) { | |||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.getCacheEntry = getCacheEntry; | exports.getCacheEntry = getCacheEntry; | ||||||
| function printCachesListForDiagnostics(key, httpClient, version) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; |  | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |  | ||||||
|         if (response.statusCode === 200) { |  | ||||||
|             const cacheListResult = response.result; |  | ||||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; |  | ||||||
|             if (totalCount && totalCount > 0) { |  | ||||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); |  | ||||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { |  | ||||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function downloadCache(archiveLocation, archivePath, options) { | function downloadCache(archiveLocation, archivePath, options) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const archiveUrl = new url_1.URL(archiveLocation); |         const archiveUrl = new url_1.URL(archiveLocation); | ||||||
| @ -38262,7 +38242,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --long=30 --force -o', |                         'zstd -d --long=30 -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38273,7 +38253,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --force -o', |                         'zstd -d -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38299,7 +38279,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --long=30 --force -o', |                         'zstd -T0 --long=30 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -38310,7 +38290,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --force -o', |                         'zstd -T0 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -47247,7 +47227,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | |||||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { |                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||||
|                         return undefined; |                         return undefined; | ||||||
|                     } |                     } | ||||||
|                     core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); |                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||||
|                 } |                 } | ||||||
|                 else { |                 else { | ||||||
|                     // Cache not found
 |                     // Cache not found
 | ||||||
|  | |||||||
							
								
								
									
										32
									
								
								dist/save-only/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								dist/save-only/index.js
									
									
									
									
										vendored
									
									
								
							| @ -3487,12 +3487,8 @@ function getCacheEntry(keys, paths, options) { | |||||||
|         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); |         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); | ||||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; |         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||||
|         // Cache not found
 |  | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             // List cache for primary key only if cache miss occurs
 |             // Cache not found
 | ||||||
|             if (core.isDebug()) { |  | ||||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); |  | ||||||
|             } |  | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { |         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||||
| @ -3511,22 +3507,6 @@ function getCacheEntry(keys, paths, options) { | |||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.getCacheEntry = getCacheEntry; | exports.getCacheEntry = getCacheEntry; | ||||||
| function printCachesListForDiagnostics(key, httpClient, version) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; |  | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |  | ||||||
|         if (response.statusCode === 200) { |  | ||||||
|             const cacheListResult = response.result; |  | ||||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; |  | ||||||
|             if (totalCount && totalCount > 0) { |  | ||||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); |  | ||||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { |  | ||||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function downloadCache(archiveLocation, archivePath, options) { | function downloadCache(archiveLocation, archivePath, options) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const archiveUrl = new url_1.URL(archiveLocation); |         const archiveUrl = new url_1.URL(archiveLocation); | ||||||
| @ -38313,7 +38293,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --long=30 --force -o', |                         'zstd -d --long=30 -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38324,7 +38304,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --force -o', |                         'zstd -d -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38350,7 +38330,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --long=30 --force -o', |                         'zstd -T0 --long=30 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -38361,7 +38341,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --force -o', |                         'zstd -T0 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -47389,7 +47369,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | |||||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { |                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||||
|                         return undefined; |                         return undefined; | ||||||
|                     } |                     } | ||||||
|                     core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); |                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||||
|                 } |                 } | ||||||
|                 else { |                 else { | ||||||
|                     // Cache not found
 |                     // Cache not found
 | ||||||
|  | |||||||
							
								
								
									
										32
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -3431,12 +3431,8 @@ function getCacheEntry(keys, paths, options) { | |||||||
|         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); |         const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); | ||||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; |         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||||
|         // Cache not found
 |  | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             // List cache for primary key only if cache miss occurs
 |             // Cache not found
 | ||||||
|             if (core.isDebug()) { |  | ||||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); |  | ||||||
|             } |  | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { |         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||||
| @ -3455,22 +3451,6 @@ function getCacheEntry(keys, paths, options) { | |||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.getCacheEntry = getCacheEntry; | exports.getCacheEntry = getCacheEntry; | ||||||
| function printCachesListForDiagnostics(key, httpClient, version) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; |  | ||||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); |  | ||||||
|         if (response.statusCode === 200) { |  | ||||||
|             const cacheListResult = response.result; |  | ||||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; |  | ||||||
|             if (totalCount && totalCount > 0) { |  | ||||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); |  | ||||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { |  | ||||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function downloadCache(archiveLocation, archivePath, options) { | function downloadCache(archiveLocation, archivePath, options) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const archiveUrl = new url_1.URL(archiveLocation); |         const archiveUrl = new url_1.URL(archiveLocation); | ||||||
| @ -38257,7 +38237,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --long=30 --force -o', |                         'zstd -d --long=30 -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38268,7 +38248,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -d --force -o', |                         'zstd -d -o', | ||||||
|                         constants_1.TarFilename, |                         constants_1.TarFilename, | ||||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') |                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||||
|                     ] |                     ] | ||||||
| @ -38294,7 +38274,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.Zstd: |             case constants_1.CompressionMethod.Zstd: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --long=30 --force -o', |                         'zstd -T0 --long=30 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -38305,7 +38285,7 @@ function getCompressionProgram(tarPath, compressionMethod) { | |||||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: |             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||||
|                 return BSD_TAR_ZSTD |                 return BSD_TAR_ZSTD | ||||||
|                     ? [ |                     ? [ | ||||||
|                         'zstd -T0 --force -o', |                         'zstd -T0 -o', | ||||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), |                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||||
|                         constants_1.TarFilename |                         constants_1.TarFilename | ||||||
|                     ] |                     ] | ||||||
| @ -47362,7 +47342,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | |||||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { |                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||||
|                         return undefined; |                         return undefined; | ||||||
|                     } |                     } | ||||||
|                     core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); |                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||||
|                 } |                 } | ||||||
|                 else { |                 else { | ||||||
|                     // Cache not found
 |                     // Cache not found
 | ||||||
|  | |||||||
							
								
								
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @ -1,15 +1,15 @@ | |||||||
| { | { | ||||||
|   "name": "cache", |   "name": "cache", | ||||||
|   "version": "3.2.1", |   "version": "3.2.0-beta.1", | ||||||
|   "lockfileVersion": 2, |   "lockfileVersion": 2, | ||||||
|   "requires": true, |   "requires": true, | ||||||
|   "packages": { |   "packages": { | ||||||
|     "": { |     "": { | ||||||
|       "name": "cache", |       "name": "cache", | ||||||
|       "version": "3.2.1", |       "version": "3.2.0-beta.1", | ||||||
|       "license": "MIT", |       "license": "MIT", | ||||||
|       "dependencies": { |       "dependencies": { | ||||||
|         "@actions/cache": "^3.1.0", |         "@actions/cache": "3.1.0-beta.3", | ||||||
|         "@actions/core": "^1.10.0", |         "@actions/core": "^1.10.0", | ||||||
|         "@actions/exec": "^1.1.1", |         "@actions/exec": "^1.1.1", | ||||||
|         "@actions/io": "^1.1.2" |         "@actions/io": "^1.1.2" | ||||||
| @ -36,9 +36,9 @@ | |||||||
|       } |       } | ||||||
|     }, |     }, | ||||||
|     "node_modules/@actions/cache": { |     "node_modules/@actions/cache": { | ||||||
|       "version": "3.1.0", |       "version": "3.1.0-beta.3", | ||||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz", |       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz", | ||||||
|       "integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==", |       "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==", | ||||||
|       "dependencies": { |       "dependencies": { | ||||||
|         "@actions/core": "^1.10.0", |         "@actions/core": "^1.10.0", | ||||||
|         "@actions/exec": "^1.0.1", |         "@actions/exec": "^1.0.1", | ||||||
| @ -9722,9 +9722,9 @@ | |||||||
|   }, |   }, | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
|     "@actions/cache": { |     "@actions/cache": { | ||||||
|       "version": "3.1.0", |       "version": "3.1.0-beta.3", | ||||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz", |       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz", | ||||||
|       "integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==", |       "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==", | ||||||
|       "requires": { |       "requires": { | ||||||
|         "@actions/core": "^1.10.0", |         "@actions/core": "^1.10.0", | ||||||
|         "@actions/exec": "^1.0.1", |         "@actions/exec": "^1.0.1", | ||||||
|  | |||||||
| @ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "name": "cache", |   "name": "cache", | ||||||
|   "version": "3.2.1", |   "version": "3.2.0-beta.1", | ||||||
|   "private": true, |   "private": true, | ||||||
|   "description": "Cache dependencies and build outputs", |   "description": "Cache dependencies and build outputs", | ||||||
|   "main": "dist/restore/index.js", |   "main": "dist/restore/index.js", | ||||||
| @ -23,7 +23,7 @@ | |||||||
|   "author": "GitHub", |   "author": "GitHub", | ||||||
|   "license": "MIT", |   "license": "MIT", | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
|     "@actions/cache": "^3.1.0", |     "@actions/cache": "3.1.0-beta.3", | ||||||
|     "@actions/core": "^1.10.0", |     "@actions/core": "^1.10.0", | ||||||
|     "@actions/exec": "^1.1.1", |     "@actions/exec": "^1.1.1", | ||||||
|     "@actions/io": "^1.1.2" |     "@actions/io": "^1.1.2" | ||||||
|  | |||||||
| @ -19,6 +19,24 @@ A cache today is immutable and cannot be updated. But some use cases require the | |||||||
| ## Use cache across feature branches | ## Use cache across feature branches | ||||||
| Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches. | Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches. | ||||||
|  |  | ||||||
|  | ## Improving cache restore performance on Windows/Using cross-os caching | ||||||
|  | Currently, cache restore is slow on Windows due to tar being inherently slow and the compression algorithm `gzip` in use. `zstd` is the default algorithm in use on linux and macos. It was disabled on Windows due to issues with bsd tar(libarchive), the tar implementation in use on Windows.  | ||||||
|  |  | ||||||
|  | To improve cache restore performance, we can re-enable `zstd` as the compression algorithm using the following workaround. Add the following step to your workflow before the cache step: | ||||||
|  |  | ||||||
|  | ```yaml | ||||||
|  |     - if: ${{ runner.os == 'Windows' }} | ||||||
|  |       name: Use GNU tar | ||||||
|  |       shell: cmd | ||||||
|  |       run: | | ||||||
|  |         echo "Adding GNU tar to PATH" | ||||||
|  |         echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%" | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | The `cache` action will use GNU tar instead of bsd tar on Windows. This should work on all Github Hosted runners as it is. For self-hosted runners, please ensure you have GNU tar and `zstd` installed. | ||||||
|  |  | ||||||
|  | The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key. | ||||||
|  |  | ||||||
| ## Force deletion of caches overriding default cache eviction policy | ## Force deletion of caches overriding default cache eviction policy | ||||||
| Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches. | Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches. | ||||||
|  |  | ||||||
|  | |||||||
		Reference in New Issue
	
	Block a user
	